From 8a09d526609cf002619058c4f5c303be910d6933 Mon Sep 17 00:00:00 2001 From: Johannes Schindelin Date: Wed, 24 Mar 2021 21:20:54 +0100 Subject: [PATCH] npm run build && npm run package Signed-off-by: Johannes Schindelin --- dist/index.js | 267 ++++++++++++++++++---------------------------- dist/index.js.map | 2 +- 2 files changed, 105 insertions(+), 164 deletions(-) diff --git a/dist/index.js b/dist/index.js index d0a60ae0..42e1b08e 100644 --- a/dist/index.js +++ b/dist/index.js @@ -35,70 +35,63 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); const core = __importStar(__nccwpck_require__(2186)); -const process_1 = __importDefault(__nccwpck_require__(1765)); const downloader_1 = __nccwpck_require__(7511); const cache_1 = __nccwpck_require__(7799); +const fs_1 = __nccwpck_require__(5747); function run() { return __awaiter(this, void 0, void 0, function* () { try { - if (process_1.default.platform !== 'win32') { - core.warning(`Skipping this Action because it only works on Windows, not on ${process_1.default.platform}`); - return; - } - const flavor = core.getInput('flavor'); - const architecture = core.getInput('architecture'); - const verbose = core.getInput('verbose'); - const { artifactName, download, id } = yield downloader_1.get(flavor, architecture); - const outputDirectory = core.getInput('path') || `C:/${artifactName}`; - let useCache; - switch (core.getInput('cache')) { - case 'true': - useCache = true; - break; - case 'auto': - useCache = flavor !== 'full'; - break; - default: - useCache = false; - } + const { artifactName, stripPrefix, download, cacheId } = yield downloader_1.get(core.getInput('repository'), core.getInput('definitionId'), core.getInput('artifact'), core.getInput('stripPrefix')); + const outputDirectory = core.getInput('path') || artifactName; + let useCache = core.getInput('cache') === 'true'; + const verbose = ((input) => input && input.match(/^\d+$/) ? parseInt(input) : input === 'true')(core.getInput('verbose')); + const isDirectoryEmpty = (path) => { + try { + return fs_1.readdirSync(path).length === 0; + } + catch (e) { + return e && e.code === 'ENOENT'; + } + }; let needToDownload = true; - try { - if (useCache && (yield cache_1.restoreCache([outputDirectory], id))) { - core.info(`Cached ${id} was successfully restored`); - needToDownload = false; + let storeZipAs; + if (useCache) { + try { + if (!isDirectoryEmpty(outputDirectory)) { + storeZipAs = `${outputDirectory}/.${cacheId}.zip`; + if (yield cache_1.restoreCache([storeZipAs], cacheId)) { + yield downloader_1.unzip(`file:${storeZipAs}`, stripPrefix, outputDirectory, verbose); + core.info(`Cached ${cacheId} was successfully restored`); + needToDownload = false; + } + } + else if (yield cache_1.restoreCache([outputDirectory], cacheId)) { + core.info(`Cached ${cacheId} was successfully restored`); + needToDownload = false; + } + } + catch (e) { + core.warning(`Cannot use @actions/cache (${e})`); + useCache = false; } - } - catch (e) { - core.warning(`Cannot use @actions/cache (${e})`); - useCache = false; } if (needToDownload) { core.info(`Downloading ${artifactName}`); - yield download(outputDirectory, verbose.match(/^\d+$/) ? parseInt(verbose) : verbose === 'true'); + yield download(outputDirectory, verbose, storeZipAs); try { - if (useCache && !(yield cache_1.saveCache([outputDirectory], id))) { - core.warning(`Failed to cache ${id}`); + if (useCache && + !(yield cache_1.saveCache([storeZipAs || outputDirectory], cacheId))) { + core.warning(`Failed to cache ${cacheId}`); } } catch (e) { - core.warning(`Failed to cache ${id}: ${e.message}`); + core.warning(`Failed to cache ${cacheId}: ${e.message}`); + } + if (storeZipAs) { + fs_1.unlinkSync(storeZipAs); } - } - // Set up PATH so that Git for Windows' SDK's `bash.exe`, `prove` and `gcc` are found - core.addPath(`${outputDirectory}/usr/bin/core_perl`); - core.addPath(`${outputDirectory}/usr/bin`); - const msystem = architecture === 'i686' ? 'MINGW32' : 'MINGW64'; - core.addPath(`${outputDirectory}/${msystem.toLocaleLowerCase()}/bin`); - core.exportVariable('MSYSTEM', msystem); - if (!('LANG' in process_1.default.env) && - !('LC_ALL' in process_1.default.env) && - !('LC_CTYPE' in process_1.default.env)) { - core.exportVariable('LC_CTYPE', 'C.UTF-8'); } } catch (error) { @@ -129,15 +122,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.get = void 0; +exports.get = exports.unzip = void 0; const fs_1 = __importDefault(__nccwpck_require__(5747)); const https_1 = __importDefault(__nccwpck_require__(7211)); const unzipper_1 = __importDefault(__nccwpck_require__(1639)); -const child_process_1 = __nccwpck_require__(3129); -const path_1 = __nccwpck_require__(5622); const node_fetch_retry_1 = __importDefault(__nccwpck_require__(3006)); -const gitForWindowsUsrBinPath = 'C:/Program Files/Git/usr/bin'; -const gitForWindowsMINGW64BinPath = 'C:/Program Files/Git/mingw64/bin'; function fetchJSONFromURL(url) { return __awaiter(this, void 0, void 0, function* () { const res = yield node_fetch_retry_1.default(url); @@ -162,31 +151,39 @@ function mkdirp(directoryPath) { } fs_1.default.mkdirSync(directoryPath, { recursive: true }); } -function unzip(url, stripPrefix, outputDirectory, verbose, downloader) { +function unzip(url, stripPrefix, outputDirectory, verbose, storeZipAs) { return __awaiter(this, void 0, void 0, function* () { + const files = []; let progress = verbose === false - ? () => { } + ? (path) => { + if (path !== undefined) { + files.push(path); + } + } : (path) => { - path === undefined || process.stderr.write(`${path}\n`); + if (path !== undefined) { + files.push(path); + process.stderr.write(`${path}\n`); + } }; if (typeof verbose === 'number') { let counter = 0; progress = (path) => { - if (path === undefined || ++counter % verbose === 0) { - process.stderr.write(`${counter} items extracted\n`); + if (path !== undefined) { + files.push(path); + if (++counter % verbose === 0) { + process.stderr.write(`${counter} items extracted\n`); + } } }; } mkdirp(outputDirectory); - if (downloader) { - // `https.get()` seems to have performance problems that cause frequent - // ECONNRESET problems with larger payloads. Let's (ab-)use Git for Windows' - // `curl.exe` to do the downloading for us in that case. - return yield downloader(url, outputDirectory, verbose); - } return new Promise((resolve, reject) => { - https_1.default - .get(url, (res) => { + const callback = (res) => { + if (storeZipAs) { + process.stderr.write(`Writing ${storeZipAs}\n`); + res.pipe(fs_1.default.createWriteStream(storeZipAs)).on('error', reject); + } res .on('error', reject) .pipe(unzipper_1.default.Parse()) @@ -195,121 +192,72 @@ function unzip(url, stripPrefix, outputDirectory, verbose, downloader) { process.stderr.write(`warning: skipping ${entry.path} because it does not start with ${stripPrefix}\n`); } const entryPath = `${outputDirectory}/${entry.path.substring(stripPrefix.length)}`; - progress(entryPath); if (entryPath.endsWith('/')) { mkdirp(entryPath.replace(/\/$/, '')); entry.autodrain(); } else { + progress(entryPath); entry.pipe(fs_1.default.createWriteStream(`${entryPath}`)); } }) .on('error', reject) .on('finish', progress) - .on('finish', resolve); - }) - .on('error', reject); - }); - }); -} -/* We're (ab-)using Git for Windows' `tar.exe` and `xz.exe` to do the job */ -function unpackTarXZInZipFromURL(url, outputDirectory, verbose = false) { - return __awaiter(this, void 0, void 0, function* () { - const tmp = yield fs_1.default.promises.mkdtemp(`${outputDirectory}/tmp`); - const zipPath = `${tmp}/artifacts.zip`; - const curl = child_process_1.spawn(`${gitForWindowsMINGW64BinPath}/curl.exe`, [ - '--retry', - '16', - '--retry-all-errors', - '--retry-connrefused', - '-o', - zipPath, - url - ], { stdio: [undefined, 'inherit', 'inherit'] }); - yield new Promise((resolve, reject) => { - curl - .on('close', code => code === 0 ? resolve() : reject(new Error(`${code}`))) - .on('error', e => reject(new Error(`${e}`))); - }); - const zipContents = (yield unzipper_1.default.Open.file(zipPath)).files.filter(e => !e.path.endsWith('/')); - if (zipContents.length !== 1) { - throw new Error(`${zipPath} does not contain exactly one file (${zipContents.map(e => e.path)})`); - } - // eslint-disable-next-line no-console - console.log(`unzipping ${zipPath}\n`); - const tarXZ = child_process_1.spawn(`${gitForWindowsUsrBinPath}/bash.exe`, [ - '-lc', - `unzip -p "${zipPath}" ${zipContents[0].path} | tar ${verbose === true ? 'xJvf' : 'xJf'} -` - ], { - cwd: outputDirectory, - env: { - CHERE_INVOKING: '1', - MSYSTEM: 'MINGW64', - PATH: `${gitForWindowsUsrBinPath}${path_1.delimiter}${process.env.PATH}` - }, - stdio: [undefined, 'inherit', 'inherit'] - }); - yield new Promise((resolve, reject) => { - tarXZ.on('close', code => { - if (code === 0) { - resolve(); - } - else { - reject(new Error(`tar: exited with code ${code}`)); - } - }); + .on('finish', () => resolve(files)); + }; + if (url.startsWith('file:')) { + callback(fs_1.default.createReadStream(url.substring('file:'.length))); + } + else { + https_1.default.get(url, callback).on('error', reject); + } }); - yield fs_1.default.promises.rmdir(tmp, { recursive: true }); }); } -function get(flavor, architecture) { +exports.unzip = unzip; +function get(repository, definitionId, artifactName, stripPrefix) { return __awaiter(this, void 0, void 0, function* () { - if (!['x86_64', 'i686'].includes(architecture)) { - throw new Error(`Unsupported architecture: ${architecture}`); - } - let definitionId; - let artifactName; - switch (flavor) { - case 'minimal': - if (architecture === 'i686') { - throw new Error(`Flavor "minimal" is only available for x86_64`); - } - definitionId = 22; - artifactName = 'git-sdk-64-minimal'; - break; - case 'makepkg-git': - if (architecture === 'i686') { - throw new Error(`Flavor "makepkg-git" is only available for x86_64`); - } - definitionId = 29; - artifactName = 'git-sdk-64-makepkg-git'; - break; - case 'build-installers': - case 'full': - definitionId = architecture === 'i686' ? 30 : 29; - artifactName = `git-sdk-${architecture === 'i686' ? 32 : 64}-${flavor === 'full' ? 'full-sdk' : flavor}`; - break; - default: - throw new Error(`Unknown flavor: '${flavor}`); + if (!repository || !definitionId) { + throw new Error(`Need repository and definitionId (got ${repository} and ${definitionId})`); } - const baseURL = 'https://dev.azure.com/git-for-windows/git/_apis/build/builds'; + const baseURL = `https://dev.azure.com/${repository}/_apis/build/builds`; const data = yield fetchJSONFromURL(`${baseURL}?definitions=${definitionId}&statusFilter=completed&resultFilter=succeeded&$top=1`); if (data.count !== 1) { throw new Error(`Unexpected number of builds: ${data.count}`); } - const id = `${artifactName}-${data.value[0].id}`; - const download = (outputDirectory, verbose = false) => __awaiter(this, void 0, void 0, function* () { + let url; + const getURL = () => __awaiter(this, void 0, void 0, function* () { const data2 = yield fetchJSONFromURL(`${baseURL}/${data.value[0].id}/artifacts`); + if (!artifactName) { + if (data2.value.length !== 1) { + throw new Error(`Cannot deduce artifact name (candidates: ${data2.value + .map(e => e.name) + .join(', ')})`); + } + artifactName = data2.value[0].name; + } const filtered = data2.value.filter(e => e.name === artifactName); if (filtered.length !== 1) { - throw new Error(`Could not find ${artifactName} in ${JSON.stringify(data2, null, 4)}`); + throw new Error(`Could not find ${artifactName} in ${data2.value + .map(e => e.name) + .join(', ')}`); + } + return filtered[0].resource.downloadUrl; + }); + if (!artifactName) { + url = yield getURL(); + } + if (!stripPrefix) { + stripPrefix = `${artifactName}/`; + } + const download = (outputDirectory, verbose = false, storeZipAs) => __awaiter(this, void 0, void 0, function* () { + if (!url) { + url = yield getURL(); } - const url = filtered[0].resource.downloadUrl; let delayInSeconds = 1; for (;;) { try { - yield unzip(url, `${artifactName}/`, outputDirectory, verbose, flavor === 'full' ? unpackTarXZInZipFromURL : undefined); - break; + return yield unzip(url, stripPrefix || `${artifactName}/`, outputDirectory, verbose, storeZipAs); } catch (e) { delayInSeconds *= 2; @@ -321,7 +269,8 @@ function get(flavor, architecture) { } } }); - return { artifactName, download, id }; + const cacheId = `${repository}-${definitionId}-${artifactName}-${data.value[0].id}`.replace('/', '.'); + return { artifactName, stripPrefix, download, cacheId }; }); } exports.get = get; @@ -76471,14 +76420,6 @@ module.exports = require("path");; /***/ }), -/***/ 1765: -/***/ ((module) => { - -"use strict"; -module.exports = require("process");; - -/***/ }), - /***/ 4213: /***/ ((module) => { diff --git a/dist/index.js.map b/dist/index.js.map index 19b9b67c..2a0c020a 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../webpack://setup-git-for-windows-sdk/./lib/main.js","../webpack://setup-git-for-windows-sdk/./lib/src/downloader.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/cache.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/cacheHttpClient.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/cacheUtils.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/constants.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/downloadUtils.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/requestUtils.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/internal/tar.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/cache/lib/options.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/core/lib/command.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/core/lib/core.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/core/lib/file-command.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/core/lib/utils.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/exec/lib/exec.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/glob.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-glob-options-helper.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-globber.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-match-kind.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-path-helper.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-path.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-pattern-helper.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-pattern.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/glob/lib/internal-search-state.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/http-client/auth.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/http-client/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/http-client/proxy.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/io/lib/io-util.js","../webpack://setup-git-for-windows-sdk/./node_modules/@actions/io/lib/io.js","../webpack://setup-git-for-windows-sdk/./node_modules/@adobe/node-fetch-retry/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/abort-controller/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-asynciterator-polyfill/dist-esm/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-auth/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/form-data/lib/populate.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-lro/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-paging/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-tracing/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/logger/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/logger/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/storage-blob/dist/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/api/context.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/api/global-utils.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/api/metrics.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/api/propagation.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/api/trace.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/common/Logger.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/common/Time.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/context/propagation/HttpTextPropagator.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/context/propagation/NoopHttpTextPropagator.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/context/propagation/getter.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/context/propagation/setter.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/correlation_context/CorrelationContext.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/correlation_context/EntryValue.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/BatchObserverResult.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/BoundInstrument.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/Meter.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/Metric.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/Observation.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/metrics/ObserverResult.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/platform/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/platform/node/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/Event.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/NoopSpan.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/Sampler.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/TimedEvent.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/attributes.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/instrumentation/Plugin.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/link.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/link_context.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/span.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/span_context.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/span_kind.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/status.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/trace_flags.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/trace_state.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/tracer.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/context-base/build/src/NoopContextManager.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/context-base/build/src/context.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/context-base/build/src/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@opentelemetry/context-base/build/src/types.js","../webpack://setup-git-for-windows-sdk/./node_modules/abort-controller/dist/abort-controller.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/abort.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/async.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/defer.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/iterate.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/state.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/lib/terminator.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/parallel.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/serial.js","../webpack://setup-git-for-windows-sdk/./node_modules/asynckit/serialOrdered.js","../webpack://setup-git-for-windows-sdk/./node_modules/balanced-match/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/big-integer/BigInteger.js","../webpack://setup-git-for-windows-sdk/./node_modules/binary/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/binary/lib/vars.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/any.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/async.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/bind.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/bluebird.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/call_get.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/cancel.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/catch_filter.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/context.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/debuggability.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/direct_resolve.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/each.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/errors.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/es5.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/filter.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/finally.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/generators.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/join.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/map.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/method.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/nodeback.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/nodeify.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/promise.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/promise_array.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/promisify.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/props.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/queue.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/race.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/reduce.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/schedule.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/settle.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/some.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/synchronous_inspection.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/thenables.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/timers.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/using.js","../webpack://setup-git-for-windows-sdk/./node_modules/bluebird/js/release/util.js","../webpack://setup-git-for-windows-sdk/./node_modules/brace-expansion/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/buffer-indexof-polyfill/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/buffer-indexof-polyfill/init-buffer.js","../webpack://setup-git-for-windows-sdk/./node_modules/buffers/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/chainsaw/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/combined-stream/lib/combined_stream.js","../webpack://setup-git-for-windows-sdk/./node_modules/concat-map/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/core-util-is/lib/util.js","../webpack://setup-git-for-windows-sdk/./node_modules/delayed-stream/lib/delayed_stream.js","../webpack://setup-git-for-windows-sdk/./node_modules/duplexer2/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/event-target-shim/dist/event-target-shim.js","../webpack://setup-git-for-windows-sdk/./node_modules/fs.realpath/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/fs.realpath/old.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/fstream.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/abstract.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/collect.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/dir-reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/dir-writer.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/file-reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/file-writer.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/get-type.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/link-reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/link-writer.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/proxy-reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/proxy-writer.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/socket-reader.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/lib/writer.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/node_modules/mkdirp/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/fstream/node_modules/rimraf/rimraf.js","../webpack://setup-git-for-windows-sdk/./node_modules/glob/common.js","../webpack://setup-git-for-windows-sdk/./node_modules/glob/glob.js","../webpack://setup-git-for-windows-sdk/./node_modules/glob/sync.js","../webpack://setup-git-for-windows-sdk/./node_modules/graceful-fs/clone.js","../webpack://setup-git-for-windows-sdk/./node_modules/graceful-fs/graceful-fs.js","../webpack://setup-git-for-windows-sdk/./node_modules/graceful-fs/legacy-streams.js","../webpack://setup-git-for-windows-sdk/./node_modules/graceful-fs/polyfills.js","../webpack://setup-git-for-windows-sdk/./node_modules/inflight/inflight.js","../webpack://setup-git-for-windows-sdk/./node_modules/inherits/inherits.js","../webpack://setup-git-for-windows-sdk/./node_modules/inherits/inherits_browser.js","../webpack://setup-git-for-windows-sdk/./node_modules/isarray/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/listenercount/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/mime-db/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/mime-types/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/minimatch/minimatch.js","../webpack://setup-git-for-windows-sdk/./node_modules/node-fetch/lib/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/once/once.js","../webpack://setup-git-for-windows-sdk/./node_modules/path-is-absolute/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/process-nextick-args/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/psl/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/_stream_duplex.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/_stream_passthrough.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/_stream_readable.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/_stream_transform.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/_stream_writable.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/internal/streams/BufferList.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/internal/streams/destroy.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/lib/internal/streams/stream.js","../webpack://setup-git-for-windows-sdk/./node_modules/readable-stream/readable.js","../webpack://setup-git-for-windows-sdk/./node_modules/safe-buffer/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/sax/lib/sax.js","../webpack://setup-git-for-windows-sdk/./node_modules/semver/semver.js","../webpack://setup-git-for-windows-sdk/./node_modules/setimmediate/setImmediate.js","../webpack://setup-git-for-windows-sdk/./node_modules/string_decoder/lib/string_decoder.js","../webpack://setup-git-for-windows-sdk/./node_modules/traverse/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/tunnel/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/tunnel/lib/tunnel.js","../webpack://setup-git-for-windows-sdk/./node_modules/universalify/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/Buffer.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/BufferStream.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/Decrypt.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/NoopStream.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/Open/directory.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/Open/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/Open/unzip.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/PullStream.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/extract.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/parse.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/parseDateTime.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/parseExtraField.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/lib/parseOne.js","../webpack://setup-git-for-windows-sdk/./node_modules/unzipper/unzip.js","../webpack://setup-git-for-windows-sdk/./node_modules/util-deprecate/node.js","../webpack://setup-git-for-windows-sdk/./node_modules/uuid/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/uuid/lib/bytesToUuid.js","../webpack://setup-git-for-windows-sdk/./node_modules/uuid/lib/rng.js","../webpack://setup-git-for-windows-sdk/./node_modules/uuid/v1.js","../webpack://setup-git-for-windows-sdk/./node_modules/uuid/v4.js","../webpack://setup-git-for-windows-sdk/./node_modules/wrappy/wrappy.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/bom.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/builder.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/defaults.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/parser.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/processors.js","../webpack://setup-git-for-windows-sdk/./node_modules/xml2js/lib/xml2js.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/Utility.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://setup-git-for-windows-sdk/./node_modules/xmlbuilder/lib/index.js","../webpack://setup-git-for-windows-sdk/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://setup-git-for-windows-sdk/external \"assert\"","../webpack://setup-git-for-windows-sdk/external \"buffer\"","../webpack://setup-git-for-windows-sdk/external \"child_process\"","../webpack://setup-git-for-windows-sdk/external \"constants\"","../webpack://setup-git-for-windows-sdk/external \"crypto\"","../webpack://setup-git-for-windows-sdk/external \"events\"","../webpack://setup-git-for-windows-sdk/external \"fs\"","../webpack://setup-git-for-windows-sdk/external \"http\"","../webpack://setup-git-for-windows-sdk/external \"https\"","../webpack://setup-git-for-windows-sdk/external \"net\"","../webpack://setup-git-for-windows-sdk/external \"os\"","../webpack://setup-git-for-windows-sdk/external \"path\"","../webpack://setup-git-for-windows-sdk/external \"process\"","../webpack://setup-git-for-windows-sdk/external \"punycode\"","../webpack://setup-git-for-windows-sdk/external \"stream\"","../webpack://setup-git-for-windows-sdk/external \"string_decoder\"","../webpack://setup-git-for-windows-sdk/external \"timers\"","../webpack://setup-git-for-windows-sdk/external \"tls\"","../webpack://setup-git-for-windows-sdk/external \"url\"","../webpack://setup-git-for-windows-sdk/external \"util\"","../webpack://setup-git-for-windows-sdk/external \"zlib\"","../webpack://setup-git-for-windows-sdk/webpack/bootstrap","../webpack://setup-git-for-windows-sdk/webpack/runtime/compat get default export","../webpack://setup-git-for-windows-sdk/webpack/runtime/define property getters","../webpack://setup-git-for-windows-sdk/webpack/runtime/hasOwnProperty shorthand","../webpack://setup-git-for-windows-sdk/webpack/runtime/make namespace object","../webpack://setup-git-for-windows-sdk/webpack/runtime/node module decorator","../webpack://setup-git-for-windows-sdk/webpack/runtime/compat","../webpack://setup-git-for-windows-sdk/webpack/startup"],"sourcesContent":["\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst process_1 = __importDefault(require(\"process\"));\r\nconst downloader_1 = require(\"./src/downloader\");\r\nconst cache_1 = require(\"@actions/cache\");\r\nfunction run() {\r\n return __awaiter(this, void 0, void 0, function* () {\r\n try {\r\n if (process_1.default.platform !== 'win32') {\r\n core.warning(`Skipping this Action because it only works on Windows, not on ${process_1.default.platform}`);\r\n return;\r\n }\r\n const flavor = core.getInput('flavor');\r\n const architecture = core.getInput('architecture');\r\n const verbose = core.getInput('verbose');\r\n const { artifactName, download, id } = yield downloader_1.get(flavor, architecture);\r\n const outputDirectory = core.getInput('path') || `C:/${artifactName}`;\r\n let useCache;\r\n switch (core.getInput('cache')) {\r\n case 'true':\r\n useCache = true;\r\n break;\r\n case 'auto':\r\n useCache = flavor !== 'full';\r\n break;\r\n default:\r\n useCache = false;\r\n }\r\n let needToDownload = true;\r\n try {\r\n if (useCache && (yield cache_1.restoreCache([outputDirectory], id))) {\r\n core.info(`Cached ${id} was successfully restored`);\r\n needToDownload = false;\r\n }\r\n }\r\n catch (e) {\r\n core.warning(`Cannot use @actions/cache (${e})`);\r\n useCache = false;\r\n }\r\n if (needToDownload) {\r\n core.info(`Downloading ${artifactName}`);\r\n yield download(outputDirectory, verbose.match(/^\\d+$/) ? parseInt(verbose) : verbose === 'true');\r\n try {\r\n if (useCache && !(yield cache_1.saveCache([outputDirectory], id))) {\r\n core.warning(`Failed to cache ${id}`);\r\n }\r\n }\r\n catch (e) {\r\n core.warning(`Failed to cache ${id}: ${e.message}`);\r\n }\r\n }\r\n // Set up PATH so that Git for Windows' SDK's `bash.exe`, `prove` and `gcc` are found\r\n core.addPath(`${outputDirectory}/usr/bin/core_perl`);\r\n core.addPath(`${outputDirectory}/usr/bin`);\r\n const msystem = architecture === 'i686' ? 'MINGW32' : 'MINGW64';\r\n core.addPath(`${outputDirectory}/${msystem.toLocaleLowerCase()}/bin`);\r\n core.exportVariable('MSYSTEM', msystem);\r\n if (!('LANG' in process_1.default.env) &&\r\n !('LC_ALL' in process_1.default.env) &&\r\n !('LC_CTYPE' in process_1.default.env)) {\r\n core.exportVariable('LC_CTYPE', 'C.UTF-8');\r\n }\r\n }\r\n catch (error) {\r\n core.setFailed(error.message);\r\n }\r\n });\r\n}\r\nrun();\r\n","\"use strict\";\r\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.get = void 0;\r\nconst fs_1 = __importDefault(require(\"fs\"));\r\nconst https_1 = __importDefault(require(\"https\"));\r\nconst unzipper_1 = __importDefault(require(\"unzipper\"));\r\nconst child_process_1 = require(\"child_process\");\r\nconst path_1 = require(\"path\");\r\nconst node_fetch_retry_1 = __importDefault(require(\"@adobe/node-fetch-retry\"));\r\nconst gitForWindowsUsrBinPath = 'C:/Program Files/Git/usr/bin';\r\nconst gitForWindowsMINGW64BinPath = 'C:/Program Files/Git/mingw64/bin';\r\nfunction fetchJSONFromURL(url) {\r\n return __awaiter(this, void 0, void 0, function* () {\r\n const res = yield node_fetch_retry_1.default(url);\r\n if (res.status !== 200) {\r\n throw new Error(`Got code ${res.status}, URL: ${url}, message: ${res.statusText}`);\r\n }\r\n return res.json();\r\n });\r\n}\r\nfunction mkdirp(directoryPath) {\r\n try {\r\n const stat = fs_1.default.statSync(directoryPath);\r\n if (stat.isDirectory()) {\r\n return;\r\n }\r\n throw new Error(`${directoryPath} exists, but is not a directory`);\r\n }\r\n catch (e) {\r\n if (!e || e.code !== 'ENOENT') {\r\n throw e;\r\n }\r\n }\r\n fs_1.default.mkdirSync(directoryPath, { recursive: true });\r\n}\r\nfunction unzip(url, stripPrefix, outputDirectory, verbose, downloader) {\r\n return __awaiter(this, void 0, void 0, function* () {\r\n let progress = verbose === false\r\n ? () => { }\r\n : (path) => {\r\n path === undefined || process.stderr.write(`${path}\\n`);\r\n };\r\n if (typeof verbose === 'number') {\r\n let counter = 0;\r\n progress = (path) => {\r\n if (path === undefined || ++counter % verbose === 0) {\r\n process.stderr.write(`${counter} items extracted\\n`);\r\n }\r\n };\r\n }\r\n mkdirp(outputDirectory);\r\n if (downloader) {\r\n // `https.get()` seems to have performance problems that cause frequent\r\n // ECONNRESET problems with larger payloads. Let's (ab-)use Git for Windows'\r\n // `curl.exe` to do the downloading for us in that case.\r\n return yield downloader(url, outputDirectory, verbose);\r\n }\r\n return new Promise((resolve, reject) => {\r\n https_1.default\r\n .get(url, (res) => {\r\n res\r\n .on('error', reject)\r\n .pipe(unzipper_1.default.Parse())\r\n .on('entry', entry => {\r\n if (!entry.path.startsWith(stripPrefix)) {\r\n process.stderr.write(`warning: skipping ${entry.path} because it does not start with ${stripPrefix}\\n`);\r\n }\r\n const entryPath = `${outputDirectory}/${entry.path.substring(stripPrefix.length)}`;\r\n progress(entryPath);\r\n if (entryPath.endsWith('/')) {\r\n mkdirp(entryPath.replace(/\\/$/, ''));\r\n entry.autodrain();\r\n }\r\n else {\r\n entry.pipe(fs_1.default.createWriteStream(`${entryPath}`));\r\n }\r\n })\r\n .on('error', reject)\r\n .on('finish', progress)\r\n .on('finish', resolve);\r\n })\r\n .on('error', reject);\r\n });\r\n });\r\n}\r\n/* We're (ab-)using Git for Windows' `tar.exe` and `xz.exe` to do the job */\r\nfunction unpackTarXZInZipFromURL(url, outputDirectory, verbose = false) {\r\n return __awaiter(this, void 0, void 0, function* () {\r\n const tmp = yield fs_1.default.promises.mkdtemp(`${outputDirectory}/tmp`);\r\n const zipPath = `${tmp}/artifacts.zip`;\r\n const curl = child_process_1.spawn(`${gitForWindowsMINGW64BinPath}/curl.exe`, [\r\n '--retry',\r\n '16',\r\n '--retry-all-errors',\r\n '--retry-connrefused',\r\n '-o',\r\n zipPath,\r\n url\r\n ], { stdio: [undefined, 'inherit', 'inherit'] });\r\n yield new Promise((resolve, reject) => {\r\n curl\r\n .on('close', code => code === 0 ? resolve() : reject(new Error(`${code}`)))\r\n .on('error', e => reject(new Error(`${e}`)));\r\n });\r\n const zipContents = (yield unzipper_1.default.Open.file(zipPath)).files.filter(e => !e.path.endsWith('/'));\r\n if (zipContents.length !== 1) {\r\n throw new Error(`${zipPath} does not contain exactly one file (${zipContents.map(e => e.path)})`);\r\n }\r\n // eslint-disable-next-line no-console\r\n console.log(`unzipping ${zipPath}\\n`);\r\n const tarXZ = child_process_1.spawn(`${gitForWindowsUsrBinPath}/bash.exe`, [\r\n '-lc',\r\n `unzip -p \"${zipPath}\" ${zipContents[0].path} | tar ${verbose === true ? 'xJvf' : 'xJf'} -`\r\n ], {\r\n cwd: outputDirectory,\r\n env: {\r\n CHERE_INVOKING: '1',\r\n MSYSTEM: 'MINGW64',\r\n PATH: `${gitForWindowsUsrBinPath}${path_1.delimiter}${process.env.PATH}`\r\n },\r\n stdio: [undefined, 'inherit', 'inherit']\r\n });\r\n yield new Promise((resolve, reject) => {\r\n tarXZ.on('close', code => {\r\n if (code === 0) {\r\n resolve();\r\n }\r\n else {\r\n reject(new Error(`tar: exited with code ${code}`));\r\n }\r\n });\r\n });\r\n yield fs_1.default.promises.rmdir(tmp, { recursive: true });\r\n });\r\n}\r\nfunction get(flavor, architecture) {\r\n return __awaiter(this, void 0, void 0, function* () {\r\n if (!['x86_64', 'i686'].includes(architecture)) {\r\n throw new Error(`Unsupported architecture: ${architecture}`);\r\n }\r\n let definitionId;\r\n let artifactName;\r\n switch (flavor) {\r\n case 'minimal':\r\n if (architecture === 'i686') {\r\n throw new Error(`Flavor \"minimal\" is only available for x86_64`);\r\n }\r\n definitionId = 22;\r\n artifactName = 'git-sdk-64-minimal';\r\n break;\r\n case 'makepkg-git':\r\n if (architecture === 'i686') {\r\n throw new Error(`Flavor \"makepkg-git\" is only available for x86_64`);\r\n }\r\n definitionId = 29;\r\n artifactName = 'git-sdk-64-makepkg-git';\r\n break;\r\n case 'build-installers':\r\n case 'full':\r\n definitionId = architecture === 'i686' ? 30 : 29;\r\n artifactName = `git-sdk-${architecture === 'i686' ? 32 : 64}-${flavor === 'full' ? 'full-sdk' : flavor}`;\r\n break;\r\n default:\r\n throw new Error(`Unknown flavor: '${flavor}`);\r\n }\r\n const baseURL = 'https://dev.azure.com/git-for-windows/git/_apis/build/builds';\r\n const data = yield fetchJSONFromURL(`${baseURL}?definitions=${definitionId}&statusFilter=completed&resultFilter=succeeded&$top=1`);\r\n if (data.count !== 1) {\r\n throw new Error(`Unexpected number of builds: ${data.count}`);\r\n }\r\n const id = `${artifactName}-${data.value[0].id}`;\r\n const download = (outputDirectory, verbose = false) => __awaiter(this, void 0, void 0, function* () {\r\n const data2 = yield fetchJSONFromURL(`${baseURL}/${data.value[0].id}/artifacts`);\r\n const filtered = data2.value.filter(e => e.name === artifactName);\r\n if (filtered.length !== 1) {\r\n throw new Error(`Could not find ${artifactName} in ${JSON.stringify(data2, null, 4)}`);\r\n }\r\n const url = filtered[0].resource.downloadUrl;\r\n let delayInSeconds = 1;\r\n for (;;) {\r\n try {\r\n yield unzip(url, `${artifactName}/`, outputDirectory, verbose, flavor === 'full' ? unpackTarXZInZipFromURL : undefined);\r\n break;\r\n }\r\n catch (e) {\r\n delayInSeconds *= 2;\r\n if (delayInSeconds >= 60) {\r\n throw e;\r\n }\r\n process.stderr.write(`Encountered problem downloading/extracting ${url}: ${e}; Retrying in ${delayInSeconds} seconds...\\n`);\r\n yield new Promise((resolve, _reject) => setTimeout(resolve, delayInSeconds * 1000));\r\n }\r\n }\r\n });\r\n return { artifactName, download, id };\r\n });\r\n}\r\nexports.get = get;\r\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst path = __importStar(require(\"path\"));\nconst utils = __importStar(require(\"./internal/cacheUtils\"));\nconst cacheHttpClient = __importStar(require(\"./internal/cacheHttpClient\"));\nconst tar_1 = require(\"./internal/tar\");\nclass ValidationError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ValidationError';\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\nexports.ValidationError = ValidationError;\nclass ReserveCacheError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ReserveCacheError';\n Object.setPrototypeOf(this, ReserveCacheError.prototype);\n }\n}\nexports.ReserveCacheError = ReserveCacheError;\nfunction checkPaths(paths) {\n if (!paths || paths.length === 0) {\n throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);\n }\n}\nfunction checkKey(key) {\n if (key.length > 512) {\n throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);\n }\n const regex = /^[^,]*$/;\n if (!regex.test(key)) {\n throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);\n }\n}\n/**\n * Restores cache from keys\n *\n * @param paths a list of file paths to restore from the cache\n * @param primaryKey an explicit key for restoring the cache\n * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key\n * @param downloadOptions cache download options\n * @returns string returns the key for the cache hit, otherwise returns undefined\n */\nfunction restoreCache(paths, primaryKey, restoreKeys, options) {\n return __awaiter(this, void 0, void 0, function* () {\n checkPaths(paths);\n restoreKeys = restoreKeys || [];\n const keys = [primaryKey, ...restoreKeys];\n core.debug('Resolved Keys:');\n core.debug(JSON.stringify(keys));\n if (keys.length > 10) {\n throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);\n }\n for (const key of keys) {\n checkKey(key);\n }\n const compressionMethod = yield utils.getCompressionMethod();\n // path are needed to compute version\n const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {\n compressionMethod\n });\n if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {\n // Cache not found\n return undefined;\n }\n const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n try {\n // Download the cache from the cache entry\n yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);\n if (core.isDebug()) {\n yield tar_1.listTar(archivePath, compressionMethod);\n }\n const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);\n yield tar_1.extractTar(archivePath, compressionMethod);\n core.info('Cache restored successfully');\n }\n finally {\n // Try to delete the archive to save space\n try {\n yield utils.unlinkFile(archivePath);\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return cacheEntry.cacheKey;\n });\n}\nexports.restoreCache = restoreCache;\n/**\n * Saves a list of files with the specified key\n *\n * @param paths a list of file paths to be cached\n * @param key an explicit key for restoring the cache\n * @param options cache upload options\n * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails\n */\nfunction saveCache(paths, key, options) {\n return __awaiter(this, void 0, void 0, function* () {\n checkPaths(paths);\n checkKey(key);\n const compressionMethod = yield utils.getCompressionMethod();\n core.debug('Reserving Cache');\n const cacheId = yield cacheHttpClient.reserveCache(key, paths, {\n compressionMethod\n });\n if (cacheId === -1) {\n throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);\n }\n core.debug(`Cache ID: ${cacheId}`);\n const cachePaths = yield utils.resolvePaths(paths);\n core.debug('Cache Paths:');\n core.debug(`${JSON.stringify(cachePaths)}`);\n const archiveFolder = yield utils.createTempDirectory();\n const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);\n if (core.isDebug()) {\n yield tar_1.listTar(archivePath, compressionMethod);\n }\n const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit\n const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.debug(`File Size: ${archiveFileSize}`);\n if (archiveFileSize > fileSizeLimit) {\n throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);\n }\n core.debug(`Saving Cache (ID: ${cacheId})`);\n yield cacheHttpClient.saveCache(cacheId, archivePath, options);\n return cacheId;\n });\n}\nexports.saveCache = saveCache;\n//# sourceMappingURL=cache.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/auth\");\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst url_1 = require(\"url\");\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nconst downloadUtils_1 = require(\"./downloadUtils\");\nconst options_1 = require(\"../options\");\nconst requestUtils_1 = require(\"./requestUtils\");\nconst versionSalt = '1.0';\nfunction getCacheApiUrl(resource) {\n // Ideally we just use ACTIONS_CACHE_URL\n const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||\n process.env['ACTIONS_RUNTIME_URL'] ||\n '').replace('pipelines', 'artifactcache');\n if (!baseUrl) {\n throw new Error('Cache Service Url not found, unable to restore cache.');\n }\n const url = `${baseUrl}_apis/artifactcache/${resource}`;\n core.debug(`Resource Url: ${url}`);\n return url;\n}\nfunction createAcceptHeader(type, apiVersion) {\n return `${type};api-version=${apiVersion}`;\n}\nfunction getRequestOptions() {\n const requestOptions = {\n headers: {\n Accept: createAcceptHeader('application/json', '6.0-preview.1')\n }\n };\n return requestOptions;\n}\nfunction createHttpClient() {\n const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';\n const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);\n return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());\n}\nfunction getCacheVersion(paths, compressionMethod) {\n const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip\n ? []\n : [compressionMethod]);\n // Add salt to cache version to support breaking changes in cache entry\n components.push(versionSalt);\n return crypto\n .createHash('sha256')\n .update(components.join('|'))\n .digest('hex');\n}\nexports.getCacheVersion = getCacheVersion;\nfunction getCacheEntry(keys, paths, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);\n const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;\n const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));\n if (response.statusCode === 204) {\n return null;\n }\n if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {\n throw new Error(`Cache service responded with ${response.statusCode}`);\n }\n const cacheResult = response.result;\n const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;\n if (!cacheDownloadUrl) {\n throw new Error('Cache not found.');\n }\n core.setSecret(cacheDownloadUrl);\n core.debug(`Cache Result:`);\n core.debug(JSON.stringify(cacheResult));\n return cacheResult;\n });\n}\nexports.getCacheEntry = getCacheEntry;\nfunction downloadCache(archiveLocation, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const archiveUrl = new url_1.URL(archiveLocation);\n const downloadOptions = options_1.getDownloadOptions(options);\n if (downloadOptions.useAzureSdk &&\n archiveUrl.hostname.endsWith('.blob.core.windows.net')) {\n // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.\n yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);\n }\n else {\n // Otherwise, download using the Actions http-client.\n yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);\n }\n });\n}\nexports.downloadCache = downloadCache;\n// Reserve Cache\nfunction reserveCache(key, paths, options) {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);\n const reserveCacheRequest = {\n key,\n version\n };\n const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);\n }));\n return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;\n });\n}\nexports.reserveCache = reserveCache;\nfunction getContentRange(start, end) {\n // Format: `bytes start-end/filesize\n // start and end are inclusive\n // filesize can be *\n // For a 200 byte chunk starting at byte 0:\n // Content-Range: bytes 0-199/*\n return `bytes ${start}-${end}/*`;\n}\nfunction uploadChunk(httpClient, resourceUrl, openStream, start, end) {\n return __awaiter(this, void 0, void 0, function* () {\n core.debug(`Uploading chunk of size ${end -\n start +\n 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);\n const additionalHeaders = {\n 'Content-Type': 'application/octet-stream',\n 'Content-Range': getContentRange(start, end)\n };\n const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);\n }));\n if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {\n throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);\n }\n });\n}\nfunction uploadFile(httpClient, cacheId, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n // Upload Chunks\n const fileSize = fs.statSync(archivePath).size;\n const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);\n const fd = fs.openSync(archivePath, 'r');\n const uploadOptions = options_1.getUploadOptions(options);\n const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);\n const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);\n const parallelUploads = [...new Array(concurrency).keys()];\n core.debug('Awaiting all uploads');\n let offset = 0;\n try {\n yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {\n while (offset < fileSize) {\n const chunkSize = Math.min(fileSize - offset, maxChunkSize);\n const start = offset;\n const end = offset + chunkSize - 1;\n offset += maxChunkSize;\n yield uploadChunk(httpClient, resourceUrl, () => fs\n .createReadStream(archivePath, {\n fd,\n start,\n end,\n autoClose: false\n })\n .on('error', error => {\n throw new Error(`Cache upload failed because file read failed with ${error.message}`);\n }), start, end);\n }\n })));\n }\n finally {\n fs.closeSync(fd);\n }\n return;\n });\n}\nfunction commitCache(httpClient, cacheId, filesize) {\n return __awaiter(this, void 0, void 0, function* () {\n const commitCacheRequest = { size: filesize };\n return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);\n }));\n });\n}\nfunction saveCache(cacheId, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n core.debug('Upload cache');\n yield uploadFile(httpClient, cacheId, archivePath, options);\n // Commit Cache\n core.debug('Commiting cache');\n const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);\n const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);\n if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {\n throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);\n }\n core.info('Cache saved successfully');\n });\n}\nexports.saveCache = saveCache;\n//# sourceMappingURL=cacheHttpClient.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst glob = __importStar(require(\"@actions/glob\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\nconst semver = __importStar(require(\"semver\"));\nconst util = __importStar(require(\"util\"));\nconst uuid_1 = require(\"uuid\");\nconst constants_1 = require(\"./constants\");\n// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23\nfunction createTempDirectory() {\n return __awaiter(this, void 0, void 0, function* () {\n const IS_WINDOWS = process.platform === 'win32';\n let tempDirectory = process.env['RUNNER_TEMP'] || '';\n if (!tempDirectory) {\n let baseLocation;\n if (IS_WINDOWS) {\n // On Windows use the USERPROFILE env variable\n baseLocation = process.env['USERPROFILE'] || 'C:\\\\';\n }\n else {\n if (process.platform === 'darwin') {\n baseLocation = '/Users';\n }\n else {\n baseLocation = '/home';\n }\n }\n tempDirectory = path.join(baseLocation, 'actions', 'temp');\n }\n const dest = path.join(tempDirectory, uuid_1.v4());\n yield io.mkdirP(dest);\n return dest;\n });\n}\nexports.createTempDirectory = createTempDirectory;\nfunction getArchiveFileSizeIsBytes(filePath) {\n return fs.statSync(filePath).size;\n}\nexports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;\nfunction resolvePaths(patterns) {\n var e_1, _a;\n var _b;\n return __awaiter(this, void 0, void 0, function* () {\n const paths = [];\n const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();\n const globber = yield glob.create(patterns.join('\\n'), {\n implicitDescendants: false\n });\n try {\n for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {\n const file = _d.value;\n const relativeFile = path\n .relative(workspace, file)\n .replace(new RegExp(`\\\\${path.sep}`, 'g'), '/');\n core.debug(`Matched: ${relativeFile}`);\n // Paths are made relative so the tar entries are all relative to the root of the workspace.\n paths.push(`${relativeFile}`);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return paths;\n });\n}\nexports.resolvePaths = resolvePaths;\nfunction unlinkFile(filePath) {\n return __awaiter(this, void 0, void 0, function* () {\n return util.promisify(fs.unlink)(filePath);\n });\n}\nexports.unlinkFile = unlinkFile;\nfunction getVersion(app) {\n return __awaiter(this, void 0, void 0, function* () {\n core.debug(`Checking ${app} --version`);\n let versionOutput = '';\n try {\n yield exec.exec(`${app} --version`, [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n }\n catch (err) {\n core.debug(err.message);\n }\n versionOutput = versionOutput.trim();\n core.debug(versionOutput);\n return versionOutput;\n });\n}\n// Use zstandard if possible to maximize cache performance\nfunction getCompressionMethod() {\n return __awaiter(this, void 0, void 0, function* () {\n if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {\n // Disable zstd due to bug https://github.com/actions/cache/issues/301\n return constants_1.CompressionMethod.Gzip;\n }\n const versionOutput = yield getVersion('zstd');\n const version = semver.clean(versionOutput);\n if (!versionOutput.toLowerCase().includes('zstd command line interface')) {\n // zstd is not installed\n return constants_1.CompressionMethod.Gzip;\n }\n else if (!version || semver.lt(version, 'v1.3.2')) {\n // zstd is installed but using a version earlier than v1.3.2\n // v1.3.2 is required to use the `--long` options in zstd\n return constants_1.CompressionMethod.ZstdWithoutLong;\n }\n else {\n return constants_1.CompressionMethod.Zstd;\n }\n });\n}\nexports.getCompressionMethod = getCompressionMethod;\nfunction getCacheFileName(compressionMethod) {\n return compressionMethod === constants_1.CompressionMethod.Gzip\n ? constants_1.CacheFilename.Gzip\n : constants_1.CacheFilename.Zstd;\n}\nexports.getCacheFileName = getCacheFileName;\nfunction isGnuTarInstalled() {\n return __awaiter(this, void 0, void 0, function* () {\n const versionOutput = yield getVersion('tar');\n return versionOutput.toLowerCase().includes('gnu tar');\n });\n}\nexports.isGnuTarInstalled = isGnuTarInstalled;\nfunction assertDefined(name, value) {\n if (value === undefined) {\n throw Error(`Expected ${name} but value was undefiend`);\n }\n return value;\n}\nexports.assertDefined = assertDefined;\n//# sourceMappingURL=cacheUtils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nvar CacheFilename;\n(function (CacheFilename) {\n CacheFilename[\"Gzip\"] = \"cache.tgz\";\n CacheFilename[\"Zstd\"] = \"cache.tzst\";\n})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));\nvar CompressionMethod;\n(function (CompressionMethod) {\n CompressionMethod[\"Gzip\"] = \"gzip\";\n // Long range mode was added to zstd in v1.3.2.\n // This enum is for earlier version of zstd that does not have --long support\n CompressionMethod[\"ZstdWithoutLong\"] = \"zstd-without-long\";\n CompressionMethod[\"Zstd\"] = \"zstd\";\n})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));\n// The default number of retry attempts.\nexports.DefaultRetryAttempts = 2;\n// The default delay in milliseconds between retry attempts.\nexports.DefaultRetryDelay = 5000;\n// Socket timeout in milliseconds during download. If no traffic is received\n// over the socket during this period, the socket is destroyed and the download\n// is aborted.\nexports.SocketTimeout = 5000;\n//# sourceMappingURL=constants.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst storage_blob_1 = require(\"@azure/storage-blob\");\nconst buffer = __importStar(require(\"buffer\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nconst requestUtils_1 = require(\"./requestUtils\");\n/**\n * Pipes the body of a HTTP response to a stream\n *\n * @param response the HTTP response\n * @param output the writable stream\n */\nfunction pipeResponseToStream(response, output) {\n return __awaiter(this, void 0, void 0, function* () {\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(response.message, output);\n });\n}\n/**\n * Class for tracking the download state and displaying stats.\n */\nclass DownloadProgress {\n constructor(contentLength) {\n this.contentLength = contentLength;\n this.segmentIndex = 0;\n this.segmentSize = 0;\n this.segmentOffset = 0;\n this.receivedBytes = 0;\n this.displayedComplete = false;\n this.startTime = Date.now();\n }\n /**\n * Progress to the next segment. Only call this method when the previous segment\n * is complete.\n *\n * @param segmentSize the length of the next segment\n */\n nextSegment(segmentSize) {\n this.segmentOffset = this.segmentOffset + this.segmentSize;\n this.segmentIndex = this.segmentIndex + 1;\n this.segmentSize = segmentSize;\n this.receivedBytes = 0;\n core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);\n }\n /**\n * Sets the number of bytes received for the current segment.\n *\n * @param receivedBytes the number of bytes received\n */\n setReceivedBytes(receivedBytes) {\n this.receivedBytes = receivedBytes;\n }\n /**\n * Returns the total number of bytes transferred.\n */\n getTransferredBytes() {\n return this.segmentOffset + this.receivedBytes;\n }\n /**\n * Returns true if the download is complete.\n */\n isDone() {\n return this.getTransferredBytes() === this.contentLength;\n }\n /**\n * Prints the current download stats. Once the download completes, this will print one\n * last line and then stop.\n */\n display() {\n if (this.displayedComplete) {\n return;\n }\n const transferredBytes = this.segmentOffset + this.receivedBytes;\n const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);\n const elapsedTime = Date.now() - this.startTime;\n const downloadSpeed = (transferredBytes /\n (1024 * 1024) /\n (elapsedTime / 1000)).toFixed(1);\n core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);\n if (this.isDone()) {\n this.displayedComplete = true;\n }\n }\n /**\n * Returns a function used to handle TransferProgressEvents.\n */\n onProgress() {\n return (progress) => {\n this.setReceivedBytes(progress.loadedBytes);\n };\n }\n /**\n * Starts the timer that displays the stats.\n *\n * @param delayInMs the delay between each write\n */\n startDisplayTimer(delayInMs = 1000) {\n const displayCallback = () => {\n this.display();\n if (!this.isDone()) {\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n };\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n /**\n * Stops the timer that displays the stats. As this typically indicates the download\n * is complete, this will display one last line, unless the last line has already\n * been written.\n */\n stopDisplayTimer() {\n if (this.timeoutHandle) {\n clearTimeout(this.timeoutHandle);\n this.timeoutHandle = undefined;\n }\n this.display();\n }\n}\nexports.DownloadProgress = DownloadProgress;\n/**\n * Download the cache using the Actions toolkit http-client\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n */\nfunction downloadCacheHttpClient(archiveLocation, archivePath) {\n return __awaiter(this, void 0, void 0, function* () {\n const writeStream = fs.createWriteStream(archivePath);\n const httpClient = new http_client_1.HttpClient('actions/cache');\n const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));\n // Abort download if no traffic received over the socket.\n downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {\n downloadResponse.message.destroy();\n core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);\n });\n yield pipeResponseToStream(downloadResponse, writeStream);\n // Validate download size.\n const contentLengthHeader = downloadResponse.message.headers['content-length'];\n if (contentLengthHeader) {\n const expectedLength = parseInt(contentLengthHeader);\n const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);\n if (actualLength !== expectedLength) {\n throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);\n }\n }\n else {\n core.debug('Unable to validate download, no Content-Length header');\n }\n });\n}\nexports.downloadCacheHttpClient = downloadCacheHttpClient;\n/**\n * Download the cache using the Azure Storage SDK. Only call this method if the\n * URL points to an Azure Storage endpoint.\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n * @param options the download options with the defaults set\n */\nfunction downloadCacheStorageSDK(archiveLocation, archivePath, options) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {\n retryOptions: {\n // Override the timeout used when downloading each 4 MB chunk\n // The default is 2 min / MB, which is way too slow\n tryTimeoutInMs: options.timeoutInMs\n }\n });\n const properties = yield client.getProperties();\n const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;\n if (contentLength < 0) {\n // We should never hit this condition, but just in case fall back to downloading the\n // file as one large stream\n core.debug('Unable to determine content length, downloading file with http-client...');\n yield downloadCacheHttpClient(archiveLocation, archivePath);\n }\n else {\n // Use downloadToBuffer for faster downloads, since internally it splits the\n // file into 4 MB chunks which can then be parallelized and retried independently\n //\n // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB\n // on 64-bit systems), split the download into multiple segments\n const maxSegmentSize = buffer.constants.MAX_LENGTH;\n const downloadProgress = new DownloadProgress(contentLength);\n const fd = fs.openSync(archivePath, 'w');\n try {\n downloadProgress.startDisplayTimer();\n while (!downloadProgress.isDone()) {\n const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;\n const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);\n downloadProgress.nextSegment(segmentSize);\n const result = yield client.downloadToBuffer(segmentStart, segmentSize, {\n concurrency: options.downloadConcurrency,\n onProgress: downloadProgress.onProgress()\n });\n fs.writeFileSync(fd, result);\n }\n }\n finally {\n downloadProgress.stopDisplayTimer();\n fs.closeSync(fd);\n }\n }\n });\n}\nexports.downloadCacheStorageSDK = downloadCacheStorageSDK;\n//# sourceMappingURL=downloadUtils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst constants_1 = require(\"./constants\");\nfunction isSuccessStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n return statusCode >= 200 && statusCode < 300;\n}\nexports.isSuccessStatusCode = isSuccessStatusCode;\nfunction isServerErrorStatusCode(statusCode) {\n if (!statusCode) {\n return true;\n }\n return statusCode >= 500;\n}\nexports.isServerErrorStatusCode = isServerErrorStatusCode;\nfunction isRetryableStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n const retryableStatusCodes = [\n http_client_1.HttpCodes.BadGateway,\n http_client_1.HttpCodes.ServiceUnavailable,\n http_client_1.HttpCodes.GatewayTimeout\n ];\n return retryableStatusCodes.includes(statusCode);\n}\nexports.isRetryableStatusCode = isRetryableStatusCode;\nfunction sleep(milliseconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, milliseconds));\n });\n}\nfunction retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {\n return __awaiter(this, void 0, void 0, function* () {\n let errorMessage = '';\n let attempt = 1;\n while (attempt <= maxAttempts) {\n let response = undefined;\n let statusCode = undefined;\n let isRetryable = false;\n try {\n response = yield method();\n }\n catch (error) {\n if (onError) {\n response = onError(error);\n }\n isRetryable = true;\n errorMessage = error.message;\n }\n if (response) {\n statusCode = getStatusCode(response);\n if (!isServerErrorStatusCode(statusCode)) {\n return response;\n }\n }\n if (statusCode) {\n isRetryable = isRetryableStatusCode(statusCode);\n errorMessage = `Cache service responded with ${statusCode}`;\n }\n core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);\n if (!isRetryable) {\n core.debug(`${name} - Error is not retryable`);\n break;\n }\n yield sleep(delay);\n attempt++;\n }\n throw Error(`${name} failed: ${errorMessage}`);\n });\n}\nexports.retry = retry;\nfunction retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, \n // If the error object contains the statusCode property, extract it and return\n // an ITypedResponse so it can be processed by the retry logic.\n (error) => {\n if (error instanceof http_client_1.HttpClientError) {\n return {\n statusCode: error.statusCode,\n result: null,\n headers: {}\n };\n }\n else {\n return undefined;\n }\n });\n });\n}\nexports.retryTypedResponse = retryTypedResponse;\nfunction retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);\n });\n}\nexports.retryHttpClientResponse = retryHttpClientResponse;\n//# sourceMappingURL=requestUtils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst exec_1 = require(\"@actions/exec\");\nconst io = __importStar(require(\"@actions/io\"));\nconst fs_1 = require(\"fs\");\nconst path = __importStar(require(\"path\"));\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nfunction getTarPath(args, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n switch (process.platform) {\n case 'win32': {\n const systemTar = `${process.env['windir']}\\\\System32\\\\tar.exe`;\n if (compressionMethod !== constants_1.CompressionMethod.Gzip) {\n // We only use zstandard compression on windows when gnu tar is installed due to\n // a bug with compressing large files with bsdtar + zstd\n args.push('--force-local');\n }\n else if (fs_1.existsSync(systemTar)) {\n return systemTar;\n }\n else if (yield utils.isGnuTarInstalled()) {\n args.push('--force-local');\n }\n break;\n }\n case 'darwin': {\n const gnuTar = yield io.which('gtar', false);\n if (gnuTar) {\n return gnuTar;\n }\n break;\n }\n default:\n break;\n }\n return yield io.which('tar', true);\n });\n}\nfunction execTar(args, compressionMethod, cwd) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exec_1.exec(`\"${yield getTarPath(args, compressionMethod)}\"`, args, { cwd });\n }\n catch (error) {\n throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);\n }\n });\n}\nfunction getWorkingDirectory() {\n var _a;\n return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();\n}\nfunction extractTar(archivePath, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // Create directory to extract tar into\n const workingDirectory = getWorkingDirectory();\n yield io.mkdirP(workingDirectory);\n // --d: Decompress.\n // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -d --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -d'];\n default:\n return ['-z'];\n }\n }\n const args = [\n ...getCompressionProgram(),\n '-xf',\n archivePath.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P',\n '-C',\n workingDirectory.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/')\n ];\n yield execTar(args, compressionMethod);\n });\n}\nexports.extractTar = extractTar;\nfunction createTar(archiveFolder, sourceDirectories, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // Write source directories to manifest.txt to avoid command length limits\n const manifestFilename = 'manifest.txt';\n const cacheFileName = utils.getCacheFileName(compressionMethod);\n fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\\n'));\n const workingDirectory = getWorkingDirectory();\n // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.\n // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -T0 --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -T0'];\n default:\n return ['-z'];\n }\n }\n const args = [\n '--posix',\n ...getCompressionProgram(),\n '-cf',\n cacheFileName.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P',\n '-C',\n workingDirectory.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '--files-from',\n manifestFilename\n ];\n yield execTar(args, compressionMethod, archiveFolder);\n });\n}\nexports.createTar = createTar;\nfunction listTar(archivePath, compressionMethod) {\n return __awaiter(this, void 0, void 0, function* () {\n // --d: Decompress.\n // --long=#: Enables long distance matching with # bits.\n // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.\n // Using 30 here because we also support 32-bit self-hosted runners.\n function getCompressionProgram() {\n switch (compressionMethod) {\n case constants_1.CompressionMethod.Zstd:\n return ['--use-compress-program', 'zstd -d --long=30'];\n case constants_1.CompressionMethod.ZstdWithoutLong:\n return ['--use-compress-program', 'zstd -d'];\n default:\n return ['-z'];\n }\n }\n const args = [\n ...getCompressionProgram(),\n '-tf',\n archivePath.replace(new RegExp(`\\\\${path.sep}`, 'g'), '/'),\n '-P'\n ];\n yield execTar(args, compressionMethod);\n });\n}\nexports.listTar = listTar;\n//# sourceMappingURL=tar.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy of the upload options with defaults filled in.\n *\n * @param copy the original upload options\n */\nfunction getUploadOptions(copy) {\n const result = {\n uploadConcurrency: 4,\n uploadChunkSize: 32 * 1024 * 1024\n };\n if (copy) {\n if (typeof copy.uploadConcurrency === 'number') {\n result.uploadConcurrency = copy.uploadConcurrency;\n }\n if (typeof copy.uploadChunkSize === 'number') {\n result.uploadChunkSize = copy.uploadChunkSize;\n }\n }\n core.debug(`Upload concurrency: ${result.uploadConcurrency}`);\n core.debug(`Upload chunk size: ${result.uploadChunkSize}`);\n return result;\n}\nexports.getUploadOptions = getUploadOptions;\n/**\n * Returns a copy of the download options with defaults filled in.\n *\n * @param copy the original download options\n */\nfunction getDownloadOptions(copy) {\n const result = {\n useAzureSdk: true,\n downloadConcurrency: 8,\n timeoutInMs: 30000\n };\n if (copy) {\n if (typeof copy.useAzureSdk === 'boolean') {\n result.useAzureSdk = copy.useAzureSdk;\n }\n if (typeof copy.downloadConcurrency === 'number') {\n result.downloadConcurrency = copy.downloadConcurrency;\n }\n if (typeof copy.timeoutInMs === 'number') {\n result.timeoutInMs = copy.timeoutInMs;\n }\n }\n core.debug(`Use Azure SDK: ${result.useAzureSdk}`);\n core.debug(`Download concurrency: ${result.downloadConcurrency}`);\n core.debug(`Request timeout (ms): ${result.timeoutInMs}`);\n return result;\n}\nexports.getDownloadOptions = getDownloadOptions;\n//# sourceMappingURL=options.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst internal_globber_1 = require(\"./internal-globber\");\n/**\n * Constructs a globber\n *\n * @param patterns Patterns separated by newlines\n * @param options Glob options\n */\nfunction create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield internal_globber_1.DefaultGlobber.create(patterns, options);\n });\n}\nexports.create = create;\n//# sourceMappingURL=glob.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy with defaults filled in.\n */\nfunction getOptions(copy) {\n const result = {\n followSymbolicLinks: true,\n implicitDescendants: true,\n omitBrokenSymbolicLinks: true\n };\n if (copy) {\n if (typeof copy.followSymbolicLinks === 'boolean') {\n result.followSymbolicLinks = copy.followSymbolicLinks;\n core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);\n }\n if (typeof copy.implicitDescendants === 'boolean') {\n result.implicitDescendants = copy.implicitDescendants;\n core.debug(`implicitDescendants '${result.implicitDescendants}'`);\n }\n if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {\n result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;\n core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);\n }\n }\n return result;\n}\nexports.getOptions = getOptions;\n//# sourceMappingURL=internal-glob-options-helper.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }\nvar __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\n function fulfill(value) { resume(\"next\", value); }\n function reject(value) { resume(\"throw\", value); }\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst globOptionsHelper = __importStar(require(\"./internal-glob-options-helper\"));\nconst path = __importStar(require(\"path\"));\nconst patternHelper = __importStar(require(\"./internal-pattern-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_pattern_1 = require(\"./internal-pattern\");\nconst internal_search_state_1 = require(\"./internal-search-state\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass DefaultGlobber {\n constructor(options) {\n this.patterns = [];\n this.searchPaths = [];\n this.options = globOptionsHelper.getOptions(options);\n }\n getSearchPaths() {\n // Return a copy\n return this.searchPaths.slice();\n }\n glob() {\n var e_1, _a;\n return __awaiter(this, void 0, void 0, function* () {\n const result = [];\n try {\n for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {\n const itemPath = _c.value;\n result.push(itemPath);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return result;\n });\n }\n globGenerator() {\n return __asyncGenerator(this, arguments, function* globGenerator_1() {\n // Fill in defaults options\n const options = globOptionsHelper.getOptions(this.options);\n // Implicit descendants?\n const patterns = [];\n for (const pattern of this.patterns) {\n patterns.push(pattern);\n if (options.implicitDescendants &&\n (pattern.trailingSeparator ||\n pattern.segments[pattern.segments.length - 1] !== '**')) {\n patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));\n }\n }\n // Push the search paths\n const stack = [];\n for (const searchPath of patternHelper.getSearchPaths(patterns)) {\n core.debug(`Search path '${searchPath}'`);\n // Exists?\n try {\n // Intentionally using lstat. Detection for broken symlink\n // will be performed later (if following symlinks).\n yield __await(fs.promises.lstat(searchPath));\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n continue;\n }\n throw err;\n }\n stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));\n }\n // Search\n const traversalChain = []; // used to detect cycles\n while (stack.length) {\n // Pop\n const item = stack.pop();\n // Match?\n const match = patternHelper.match(patterns, item.path);\n const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);\n if (!match && !partialMatch) {\n continue;\n }\n // Stat\n const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)\n // Broken symlink, or symlink cycle detected, or no longer exists\n );\n // Broken symlink, or symlink cycle detected, or no longer exists\n if (!stats) {\n continue;\n }\n // Directory\n if (stats.isDirectory()) {\n // Matched\n if (match & internal_match_kind_1.MatchKind.Directory) {\n yield yield __await(item.path);\n }\n // Descend?\n else if (!partialMatch) {\n continue;\n }\n // Push the child items in reverse\n const childLevel = item.level + 1;\n const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));\n stack.push(...childItems.reverse());\n }\n // File\n else if (match & internal_match_kind_1.MatchKind.File) {\n yield yield __await(item.path);\n }\n }\n });\n }\n /**\n * Constructs a DefaultGlobber\n */\n static create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const result = new DefaultGlobber(options);\n if (IS_WINDOWS) {\n patterns = patterns.replace(/\\r\\n/g, '\\n');\n patterns = patterns.replace(/\\r/g, '\\n');\n }\n const lines = patterns.split('\\n').map(x => x.trim());\n for (const line of lines) {\n // Empty or comment\n if (!line || line.startsWith('#')) {\n continue;\n }\n // Pattern\n else {\n result.patterns.push(new internal_pattern_1.Pattern(line));\n }\n }\n result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));\n return result;\n });\n }\n static stat(item, options, traversalChain) {\n return __awaiter(this, void 0, void 0, function* () {\n // Note:\n // `stat` returns info about the target of a symlink (or symlink chain)\n // `lstat` returns info about a symlink itself\n let stats;\n if (options.followSymbolicLinks) {\n try {\n // Use `stat` (following symlinks)\n stats = yield fs.promises.stat(item.path);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n if (options.omitBrokenSymbolicLinks) {\n core.debug(`Broken symlink '${item.path}'`);\n return undefined;\n }\n throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);\n }\n throw err;\n }\n }\n else {\n // Use `lstat` (not following symlinks)\n stats = yield fs.promises.lstat(item.path);\n }\n // Note, isDirectory() returns false for the lstat of a symlink\n if (stats.isDirectory() && options.followSymbolicLinks) {\n // Get the realpath\n const realPath = yield fs.promises.realpath(item.path);\n // Fixup the traversal chain to match the item level\n while (traversalChain.length >= item.level) {\n traversalChain.pop();\n }\n // Test for a cycle\n if (traversalChain.some((x) => x === realPath)) {\n core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);\n return undefined;\n }\n // Update the traversal chain\n traversalChain.push(realPath);\n }\n return stats;\n });\n }\n}\nexports.DefaultGlobber = DefaultGlobber;\n//# sourceMappingURL=internal-globber.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Indicates whether a pattern matches a path\n */\nvar MatchKind;\n(function (MatchKind) {\n /** Not matched */\n MatchKind[MatchKind[\"None\"] = 0] = \"None\";\n /** Matched if the path is a directory */\n MatchKind[MatchKind[\"Directory\"] = 1] = \"Directory\";\n /** Matched if the path is a regular file */\n MatchKind[MatchKind[\"File\"] = 2] = \"File\";\n /** Matched */\n MatchKind[MatchKind[\"All\"] = 3] = \"All\";\n})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));\n//# sourceMappingURL=internal-match-kind.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst path = __importStar(require(\"path\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.\n *\n * For example, on Linux/macOS:\n * - `/ => /`\n * - `/hello => /`\n *\n * For example, on Windows:\n * - `C:\\ => C:\\`\n * - `C:\\hello => C:\\`\n * - `C: => C:`\n * - `C:hello => C:`\n * - `\\ => \\`\n * - `\\hello => \\`\n * - `\\\\hello => \\\\hello`\n * - `\\\\hello\\world => \\\\hello\\world`\n */\nfunction dirname(p) {\n // Normalize slashes and trim unnecessary trailing slash\n p = safeTrimTrailingSeparator(p);\n // Windows UNC root, e.g. \\\\hello or \\\\hello\\world\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+(\\\\[^\\\\]+)?$/.test(p)) {\n return p;\n }\n // Get dirname\n let result = path.dirname(p);\n // Trim trailing slash for Windows UNC root, e.g. \\\\hello\\world\\\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+\\\\[^\\\\]+\\\\$/.test(result)) {\n result = safeTrimTrailingSeparator(result);\n }\n return result;\n}\nexports.dirname = dirname;\n/**\n * Roots the path if not already rooted. On Windows, relative roots like `\\`\n * or `C:` are expanded based on the current working directory.\n */\nfunction ensureAbsoluteRoot(root, itemPath) {\n assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);\n assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Already rooted\n if (hasAbsoluteRoot(itemPath)) {\n return itemPath;\n }\n // Windows\n if (IS_WINDOWS) {\n // Check for itemPath like C: or C:foo\n if (itemPath.match(/^[A-Z]:[^\\\\/]|^[A-Z]:$/i)) {\n let cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n // Drive letter matches cwd? Expand to cwd\n if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {\n // Drive only, e.g. C:\n if (itemPath.length === 2) {\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}`;\n }\n // Drive + path, e.g. C:foo\n else {\n if (!cwd.endsWith('\\\\')) {\n cwd += '\\\\';\n }\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}${itemPath.substr(2)}`;\n }\n }\n // Different drive\n else {\n return `${itemPath[0]}:\\\\${itemPath.substr(2)}`;\n }\n }\n // Check for itemPath like \\ or \\foo\n else if (normalizeSeparators(itemPath).match(/^\\\\$|^\\\\[^\\\\]/)) {\n const cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n return `${cwd[0]}:\\\\${itemPath.substr(1)}`;\n }\n }\n assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);\n // Otherwise ensure root ends with a separator\n if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\\\'))) {\n // Intentionally empty\n }\n else {\n // Append separator\n root += path.sep;\n }\n return root + itemPath;\n}\nexports.ensureAbsoluteRoot = ensureAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\\\hello\\share` and `C:\\hello` (and using alternate separator).\n */\nfunction hasAbsoluteRoot(itemPath) {\n assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\\\hello\\share or C:\\hello\n return itemPath.startsWith('\\\\\\\\') || /^[A-Z]:\\\\/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasAbsoluteRoot = hasAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\`, `\\hello`, `\\\\hello\\share`, `C:`, and `C:\\hello` (and using alternate separator).\n */\nfunction hasRoot(itemPath) {\n assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\ or \\hello or \\\\hello\n // E.g. C: or C:\\hello\n return itemPath.startsWith('\\\\') || /^[A-Z]:/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasRoot = hasRoot;\n/**\n * Removes redundant slashes and converts `/` to `\\` on Windows\n */\nfunction normalizeSeparators(p) {\n p = p || '';\n // Windows\n if (IS_WINDOWS) {\n // Convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // Remove redundant slashes\n const isUnc = /^\\\\\\\\+[^\\\\]/.test(p); // e.g. \\\\hello\n return (isUnc ? '\\\\' : '') + p.replace(/\\\\\\\\+/g, '\\\\'); // preserve leading \\\\ for UNC\n }\n // Remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\nexports.normalizeSeparators = normalizeSeparators;\n/**\n * Normalizes the path separators and trims the trailing separator (when safe).\n * For example, `/foo/ => /foo` but `/ => /`\n */\nfunction safeTrimTrailingSeparator(p) {\n // Short-circuit if empty\n if (!p) {\n return '';\n }\n // Normalize separators\n p = normalizeSeparators(p);\n // No trailing slash\n if (!p.endsWith(path.sep)) {\n return p;\n }\n // Check '/' on Linux/macOS and '\\' on Windows\n if (p === path.sep) {\n return p;\n }\n // On Windows check if drive root. E.g. C:\\\n if (IS_WINDOWS && /^[A-Z]:\\\\$/i.test(p)) {\n return p;\n }\n // Otherwise trim trailing slash\n return p.substr(0, p.length - 1);\n}\nexports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;\n//# sourceMappingURL=internal-path-helper.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Helper class for parsing paths into segments\n */\nclass Path {\n /**\n * Constructs a Path\n * @param itemPath Path or array of segments\n */\n constructor(itemPath) {\n this.segments = [];\n // String\n if (typeof itemPath === 'string') {\n assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // Not rooted\n if (!pathHelper.hasRoot(itemPath)) {\n this.segments = itemPath.split(path.sep);\n }\n // Rooted\n else {\n // Add all segments, while not at the root\n let remaining = itemPath;\n let dir = pathHelper.dirname(remaining);\n while (dir !== remaining) {\n // Add the segment\n const basename = path.basename(remaining);\n this.segments.unshift(basename);\n // Truncate the last segment\n remaining = dir;\n dir = pathHelper.dirname(remaining);\n }\n // Remainder is the root\n this.segments.unshift(remaining);\n }\n }\n // Array\n else {\n // Must not be empty\n assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);\n // Each segment\n for (let i = 0; i < itemPath.length; i++) {\n let segment = itemPath[i];\n // Must not be empty\n assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);\n // Normalize slashes\n segment = pathHelper.normalizeSeparators(itemPath[i]);\n // Root segment\n if (i === 0 && pathHelper.hasRoot(segment)) {\n segment = pathHelper.safeTrimTrailingSeparator(segment);\n assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);\n this.segments.push(segment);\n }\n // All other segments\n else {\n // Must not contain slash\n assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);\n this.segments.push(segment);\n }\n }\n }\n }\n /**\n * Converts the path to it's string representation\n */\n toString() {\n // First segment\n let result = this.segments[0];\n // All others\n let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));\n for (let i = 1; i < this.segments.length; i++) {\n if (skipSlash) {\n skipSlash = false;\n }\n else {\n result += path.sep;\n }\n result += this.segments[i];\n }\n return result;\n }\n}\nexports.Path = Path;\n//# sourceMappingURL=internal-path.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Given an array of patterns, returns an array of paths to search.\n * Duplicates and paths under other included paths are filtered out.\n */\nfunction getSearchPaths(patterns) {\n // Ignore negate patterns\n patterns = patterns.filter(x => !x.negate);\n // Create a map of all search paths\n const searchPathMap = {};\n for (const pattern of patterns) {\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n searchPathMap[key] = 'candidate';\n }\n const result = [];\n for (const pattern of patterns) {\n // Check if already included\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n if (searchPathMap[key] === 'included') {\n continue;\n }\n // Check for an ancestor search path\n let foundAncestor = false;\n let tempKey = key;\n let parent = pathHelper.dirname(tempKey);\n while (parent !== tempKey) {\n if (searchPathMap[parent]) {\n foundAncestor = true;\n break;\n }\n tempKey = parent;\n parent = pathHelper.dirname(tempKey);\n }\n // Include the search pattern in the result\n if (!foundAncestor) {\n result.push(pattern.searchPath);\n searchPathMap[key] = 'included';\n }\n }\n return result;\n}\nexports.getSearchPaths = getSearchPaths;\n/**\n * Matches the patterns against the path\n */\nfunction match(patterns, itemPath) {\n let result = internal_match_kind_1.MatchKind.None;\n for (const pattern of patterns) {\n if (pattern.negate) {\n result &= ~pattern.match(itemPath);\n }\n else {\n result |= pattern.match(itemPath);\n }\n }\n return result;\n}\nexports.match = match;\n/**\n * Checks whether to descend further into the directory\n */\nfunction partialMatch(patterns, itemPath) {\n return patterns.some(x => !x.negate && x.partialMatch(itemPath));\n}\nexports.partialMatch = partialMatch;\n//# sourceMappingURL=internal-pattern-helper.js.map","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst minimatch_1 = require(\"minimatch\");\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_path_1 = require(\"./internal-path\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass Pattern {\n constructor(patternOrNegate, segments, homedir) {\n /**\n * Indicates whether matches should be excluded from the result set\n */\n this.negate = false;\n // Pattern overload\n let pattern;\n if (typeof patternOrNegate === 'string') {\n pattern = patternOrNegate.trim();\n }\n // Segments overload\n else {\n // Convert to pattern\n segments = segments || [];\n assert_1.default(segments.length, `Parameter 'segments' must not empty`);\n const root = Pattern.getLiteral(segments[0]);\n assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);\n pattern = new internal_path_1.Path(segments).toString().trim();\n if (patternOrNegate) {\n pattern = `!${pattern}`;\n }\n }\n // Negate\n while (pattern.startsWith('!')) {\n this.negate = !this.negate;\n pattern = pattern.substr(1).trim();\n }\n // Normalize slashes and ensures absolute root\n pattern = Pattern.fixupPattern(pattern, homedir);\n // Segments\n this.segments = new internal_path_1.Path(pattern).segments;\n // Trailing slash indicates the pattern should only match directories, not regular files\n this.trailingSeparator = pathHelper\n .normalizeSeparators(pattern)\n .endsWith(path.sep);\n pattern = pathHelper.safeTrimTrailingSeparator(pattern);\n // Search path (literal path prior to the first glob segment)\n let foundGlob = false;\n const searchSegments = this.segments\n .map(x => Pattern.getLiteral(x))\n .filter(x => !foundGlob && !(foundGlob = x === ''));\n this.searchPath = new internal_path_1.Path(searchSegments).toString();\n // Root RegExp (required when determining partial match)\n this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');\n // Create minimatch\n const minimatchOptions = {\n dot: true,\n nobrace: true,\n nocase: IS_WINDOWS,\n nocomment: true,\n noext: true,\n nonegate: true\n };\n pattern = IS_WINDOWS ? pattern.replace(/\\\\/g, '/') : pattern;\n this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);\n }\n /**\n * Matches the pattern against the specified path\n */\n match(itemPath) {\n // Last segment is globstar?\n if (this.segments[this.segments.length - 1] === '**') {\n // Normalize slashes\n itemPath = pathHelper.normalizeSeparators(itemPath);\n // Append a trailing slash. Otherwise Minimatch will not match the directory immediately\n // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns\n // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.\n if (!itemPath.endsWith(path.sep)) {\n // Note, this is safe because the constructor ensures the pattern has an absolute root.\n // For example, formats like C: and C:foo on Windows are resolved to an absolute root.\n itemPath = `${itemPath}${path.sep}`;\n }\n }\n else {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n }\n // Match\n if (this.minimatch.match(itemPath)) {\n return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;\n }\n return internal_match_kind_1.MatchKind.None;\n }\n /**\n * Indicates whether the pattern may match descendants of the specified path\n */\n partialMatch(itemPath) {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // matchOne does not handle root path correctly\n if (pathHelper.dirname(itemPath) === itemPath) {\n return this.rootRegExp.test(itemPath);\n }\n return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\\\+/ : /\\/+/), this.minimatch.set[0], true);\n }\n /**\n * Escapes glob patterns within a path\n */\n static globEscape(s) {\n return (IS_WINDOWS ? s : s.replace(/\\\\/g, '\\\\\\\\')) // escape '\\' on Linux/macOS\n .replace(/(\\[)(?=[^/]+\\])/g, '[[]') // escape '[' when ']' follows within the path segment\n .replace(/\\?/g, '[?]') // escape '?'\n .replace(/\\*/g, '[*]'); // escape '*'\n }\n /**\n * Normalizes slashes and ensures absolute root\n */\n static fixupPattern(pattern, homedir) {\n // Empty\n assert_1.default(pattern, 'pattern cannot be empty');\n // Must not contain `.` segment, unless first segment\n // Must not contain `..` segment\n const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));\n assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);\n // Must not contain globs in root, e.g. Windows UNC path \\\\foo\\b*r\n assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);\n // Normalize slashes\n pattern = pathHelper.normalizeSeparators(pattern);\n // Replace leading `.` segment\n if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {\n pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);\n }\n // Replace leading `~` segment\n else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {\n homedir = homedir || os.homedir();\n assert_1.default(homedir, 'Unable to determine HOME directory');\n assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);\n pattern = Pattern.globEscape(homedir) + pattern.substr(1);\n }\n // Replace relative drive root, e.g. pattern is C: or C:foo\n else if (IS_WINDOWS &&\n (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\\\]/i))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', pattern.substr(0, 2));\n if (pattern.length > 2 && !root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(2);\n }\n // Replace relative root, e.g. pattern is \\ or \\foo\n else if (IS_WINDOWS && (pattern === '\\\\' || pattern.match(/^\\\\[^\\\\]/))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', '\\\\');\n if (!root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(1);\n }\n // Otherwise ensure absolute root\n else {\n pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);\n }\n return pathHelper.normalizeSeparators(pattern);\n }\n /**\n * Attempts to unescape a pattern segment to create a literal path segment.\n * Otherwise returns empty string.\n */\n static getLiteral(segment) {\n let literal = '';\n for (let i = 0; i < segment.length; i++) {\n const c = segment[i];\n // Escape\n if (c === '\\\\' && !IS_WINDOWS && i + 1 < segment.length) {\n literal += segment[++i];\n continue;\n }\n // Wildcard\n else if (c === '*' || c === '?') {\n return '';\n }\n // Character set\n else if (c === '[' && i + 1 < segment.length) {\n let set = '';\n let closed = -1;\n for (let i2 = i + 1; i2 < segment.length; i2++) {\n const c2 = segment[i2];\n // Escape\n if (c2 === '\\\\' && !IS_WINDOWS && i2 + 1 < segment.length) {\n set += segment[++i2];\n continue;\n }\n // Closed\n else if (c2 === ']') {\n closed = i2;\n break;\n }\n // Otherwise\n else {\n set += c2;\n }\n }\n // Closed?\n if (closed >= 0) {\n // Cannot convert\n if (set.length > 1) {\n return '';\n }\n // Convert to literal\n if (set) {\n literal += set;\n i = closed;\n continue;\n }\n }\n // Otherwise fall thru\n }\n // Append\n literal += c;\n }\n return literal;\n }\n /**\n * Escapes regexp special characters\n * https://javascript.info/regexp-escaping\n */\n static regExpEscape(s) {\n return s.replace(/[[\\\\^$.|?*+()]/g, '\\\\$&');\n }\n}\nexports.Pattern = Pattern;\n//# sourceMappingURL=internal-pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass SearchState {\n constructor(path, level) {\n this.path = path;\n this.level = level;\n }\n}\nexports.SearchState = SearchState;\n//# sourceMappingURL=internal-search-state.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' +\n Buffer.from(this.username + ':' + this.password).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] = 'Bearer ' + this.token;\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","/*\n * Copyright 2020 Adobe. All rights reserved.\n * This file is licensed to you under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License. You may obtain a copy\n * of the License at http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under\n * the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS\n * OF ANY KIND, either express or implied. See the License for the specific language\n * governing permissions and limitations under the License.\n */\n'use strict';\n\nconst AbortController = require('abort-controller');\nconst fetch = require('node-fetch');\nconst {FetchError} = fetch;\n\n/**\n * Retry\n * @param {RetryOptions} retryOptions whether or not to retry on all http error codes or just >500\n * @param {Object} error error object if the fetch request returned an error\n * @param {Object} response fetch call response\n * @returns {Boolean} whether or not to retry the request\n */\nfunction retry(retryOptions, error, response) {\n if (retryOptions) {\n const totalMillisToWait = (retryOptions.retryInitialDelay) + (Date.now() - retryOptions.startTime);\n return ((totalMillisToWait < retryOptions.retryMaxDuration) &&\n (error !== null || (retryOptions.retryOnHttpResponse && (retryOptions.retryOnHttpResponse(response))))\n );\n }\n return false;\n}\n\n/**\n * Retry Init to set up retry options used in `fetch-retry`\n * @param {Options} options object containing fetch options and retry options\n * @returns {RetryOptions|Boolean} object containing specific attributes for retries or `false` if no retries should be performed\n */\nfunction retryInit(options={}) {\n if (options.retryOptions !== false) {\n const retryOptions = options.retryOptions || {};\n checkParameters(retryOptions);\n\n // default settings (environment variables available to help unit testing)\n const DEFAULT_MAX_RETRY = parseInt(process.env.NODE_FETCH_RETRY_MAX_RETRY) || 60000;\n const DEFAULT_INITIAL_WAIT = parseInt(process.env.NODE_FETCH_RETRY_INITIAL_WAIT) || 100;\n const DEFAULT_BACKOFF = parseInt(process.env.NODE_FETCH_RETRY_BACKOFF) || 2.0;\n const DEFAULT_SOCKET_TIMEOUT = parseInt(process.env.NODE_FETCH_RETRY_SOCKET_TIMEOUT) || 30000;\n const DEFAULT_FORCE_TIMEOUT = process.env.NODE_FETCH_RETRY_FORCE_TIMEOUT || false;\n\n let retryMaxDuration = retryOptions.retryMaxDuration || DEFAULT_MAX_RETRY;\n // take into account action timeout if running in the context of an OpenWhisk action\n const timeTillActionTimeout = process.env.__OW_ACTION_DEADLINE && ( process.env.__OW_ACTION_DEADLINE - Date.now()); // duration until action timeout\n if (timeTillActionTimeout && (retryMaxDuration > timeTillActionTimeout) ) {\n retryMaxDuration = timeTillActionTimeout;\n }\n let socketTimeoutValue = retryOptions.socketTimeout || DEFAULT_SOCKET_TIMEOUT;\n if (socketTimeoutValue >= retryMaxDuration) {\n socketTimeoutValue = retryMaxDuration * 0.5; // make socket timeout half of retryMaxDuration to force at least one retry\n }\n if ((retryOptions.forceSocketTimeout || (DEFAULT_FORCE_TIMEOUT === 'true') || DEFAULT_FORCE_TIMEOUT === true)) { // for unit test only - test also for boolean type\n // force the use of set timeout, do not ignore if larger than retryMaxDuration\n console.log('Forced to use socket timeout of (ms):', retryOptions.socketTimeout);\n socketTimeoutValue = retryOptions.socketTimeout;\n }\n\n return {\n startTime: Date.now(),\n retryMaxDuration: retryMaxDuration,\n retryInitialDelay: retryOptions.retryInitialDelay || DEFAULT_INITIAL_WAIT,\n retryBackoff: retryOptions.retryBackoff || DEFAULT_BACKOFF,\n retryOnHttpResponse: ((typeof retryOptions.retryOnHttpResponse === 'function') && retryOptions.retryOnHttpResponse) ||\n ((response) => { return response.status >= 500; }),\n socketTimeout: socketTimeoutValue\n };\n }\n return false;\n}\n\n/**\n * Calculate the retry delay\n *\n * @param {RetryOptions|Boolean} retryOptions Retry options\n * @param {Boolean} [random=true] Add randomness\n */\nfunction retryDelay(retryOptions, random = true) {\n return retryOptions.retryInitialDelay +\n (random ? Math.floor(Math.random() * 100) : 99);\n}\n\n/**\n * Check parameters\n * @param {RetryOptions} retryOptions\n * @returns an Error if a parameter is malformed or nothing\n */\n\nfunction checkParameters(retryOptions) {\n if (retryOptions.retryMaxDuration && !(Number.isInteger(retryOptions.retryMaxDuration) && retryOptions.retryMaxDuration >= 0)) {\n throw new Error('`retryMaxDuration` must not be a negative integer');\n }\n if (retryOptions.retryInitialDelay && !(Number.isInteger(retryOptions.retryInitialDelay) && retryOptions.retryInitialDelay >= 0)) {\n throw new Error('`retryInitialDelay` must not be a negative integer');\n }\n if (retryOptions.retryOnHttpResponse && !(typeof retryOptions.retryOnHttpResponse === 'function')) {\n throw new Error(`'retryOnHttpResponse' must be a function: ${retryOptions.retryOnHttpResponse}`);\n }\n if (typeof retryOptions.retryBackoff !== 'undefined'\n && !(Number.isInteger(retryOptions.retryBackoff) && retryOptions.retryBackoff >= 1.0)) {\n throw new Error('`retryBackoff` must be a positive integer >= 1');\n }\n if (retryOptions.socketTimeout && !(Number.isInteger(retryOptions.socketTimeout) && retryOptions.socketTimeout >= 0)) {\n throw new Error('`socketTimeout` must not be a negative integer');\n }\n}\n\n/**\n * @typedef {Object} RetryOptions options for retry or false if want to disable retry\n * @property {Integer} retryMaxDuration time (in milliseconds) to retry until throwing an error\n * @property {Integer} retryInitialDelay time to wait between retries in milliseconds\n * @property {Function} retryOnHttpResponse a function determining whether to retry on a specific HTTP code\n * @property {Integer} retryBackoff backoff factor for wait time between retries (defaults to 2.0)\n * @property {Integer} socketTimeout Optional socket timeout in milliseconds (defaults to 60000ms)\n * @property {Boolean} forceSocketTimeout If true, socket timeout will be forced to use `socketTimeout` property declared (defaults to false)\n */\n/**\n * @typedef {Function} retryOnHttpResponse determines whether to do a retry on the response\n * @property {Number} response response from the http fetch call\n * @returns {Boolean} true if want to retry on this response, false if do not want to retry on the response\n */\n/**\n * @typedef {Object} Options options for fetch-retry\n * @property {Object} RetryOptions options for retry or false if want to disable retry\n * ... other options for fetch call (method, headers, etc...)\n */\n/**\n * Fetch retry that wraps around `node-fetch` library\n * @param {String} url request url\n * @param {Options} options options for fetch request (e.g. headers, RetryOptions for retries or `false` if no do not want to perform retries)\n * @returns {Object} json response of calling fetch \n */\nmodule.exports = async function (url, options) {\n options = options || {};\n const retryOptions = retryInit(options); // set up retry options or set to default settings if not set\n delete options.retryOptions; // remove retry options from options passed to actual fetch\n let attempt = 0;\n\n return new Promise(function (resolve, reject) {\n const wrappedFetch = async () => {\n ++attempt;\n\n let timeoutHandler;\n if (retryOptions.socketTimeout) {\n const controller = new AbortController();\n timeoutHandler = setTimeout(() => controller.abort(), retryOptions.socketTimeout);\n options.signal = controller.signal;\n }\n\n try {\n const response = await fetch(url, options);\n clearTimeout(timeoutHandler);\n\n if (!retry(retryOptions, null, response)) {\n // response.timeout should reflect the actual timeout\n response.timeout = retryOptions.socketTimeout;\n return resolve(response);\n }\n\n console.error(`Retrying in ${retryOptions.retryInitialDelay} milliseconds, attempt ${attempt - 1} failed (status ${response.status}): ${response.statusText}`);\n } catch (error) {\n clearTimeout(timeoutHandler);\n\n if (!retry(retryOptions, error, null)) {\n if (error.name === 'AbortError') {\n return reject(new FetchError(`network timeout at ${url}`, 'request-timeout'));\n }\n\n return reject(error);\n }\n\n console.error(`Retrying in ${retryOptions.retryInitialDelay} milliseconds, attempt ${attempt - 1} error: ${error.message}`);\n }\n\n retryOptions.retryInitialDelay *= retryOptions.retryBackoff; // update retry interval\n const waitTime = retryDelay(retryOptions);\n setTimeout(() => { wrappedFetch(); }, waitTime);\n };\n wrappedFetch();\n });\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar listenersMap = new WeakMap();\nvar abortedMap = new WeakMap();\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nvar AbortSignal = /** @class */ (function () {\n function AbortSignal() {\n /**\n * onabort event listener.\n */\n this.onabort = null;\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n Object.defineProperty(AbortSignal.prototype, \"aborted\", {\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n get: function () {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n return abortedMap.get(this);\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(AbortSignal, \"none\", {\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n get: function () {\n return new AbortSignal();\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n AbortSignal.prototype.addEventListener = function (\n // tslint:disable-next-line:variable-name\n _type, listener) {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n var listeners = listenersMap.get(this);\n listeners.push(listener);\n };\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n AbortSignal.prototype.removeEventListener = function (\n // tslint:disable-next-line:variable-name\n _type, listener) {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n var listeners = listenersMap.get(this);\n var index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n };\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n AbortSignal.prototype.dispatchEvent = function (_event) {\n throw new Error(\"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\");\n };\n return AbortSignal;\n}());\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nfunction abortSignal(signal) {\n if (signal.aborted) {\n return;\n }\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n var listeners = listenersMap.get(signal);\n if (listeners) {\n listeners.forEach(function (listener) {\n listener.call(signal, { type: \"abort\" });\n });\n }\n abortedMap.set(signal, true);\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nvar AbortError = /** @class */ (function (_super) {\n tslib.__extends(AbortError, _super);\n function AbortError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"AbortError\";\n return _this;\n }\n return AbortError;\n}(Error));\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nvar AbortController = /** @class */ (function () {\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n function AbortController(parentSignals) {\n var _this = this;\n this._signal = new AbortSignal();\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {\n var parentSignal = parentSignals_1[_i];\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n }\n else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", function () {\n _this.abort();\n });\n }\n }\n }\n Object.defineProperty(AbortController.prototype, \"signal\", {\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n get: function () {\n return this._signal;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n AbortController.prototype.abort = function () {\n abortSignal(this._signal);\n };\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n AbortController.timeout = function (ms) {\n var signal = new AbortSignal();\n var timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n };\n return AbortController;\n}());\n\nexports.AbortController = AbortController;\nexports.AbortError = AbortError;\nexports.AbortSignal = AbortSignal;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","\"use strict\";\nif (typeof Symbol === undefined || !Symbol.asyncIterator) {\n Symbol.asyncIterator = Symbol.for(\"Symbol.asyncIterator\");\n}\n//# sourceMappingURL=index.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nvar AzureKeyCredential = /** @class */ (function () {\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n function AzureKeyCredential(key) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n this._key = key;\n }\n Object.defineProperty(AzureKeyCredential.prototype, \"key\", {\n /**\n * The value of the key to be used in authentication\n */\n get: function () {\n return this._key;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n AzureKeyCredential.prototype.update = function (newKey) {\n this._key = newKey;\n };\n return AzureKeyCredential;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nvar AzureSASCredential = /** @class */ (function () {\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n function AzureSASCredential(signature) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n this._signature = signature;\n }\n Object.defineProperty(AzureSASCredential.prototype, \"signature\", {\n /**\n * The value of the shared access signature to be used in authentication\n */\n get: function () {\n return this._signature;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n AzureSASCredential.prototype.update = function (newSignature) {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n this._signature = newSignature;\n };\n return AzureSASCredential;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nfunction isTokenCredential(credential) {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n var castCredential = credential;\n return (castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0));\n}\n\nexports.AzureKeyCredential = AzureKeyCredential;\nexports.AzureSASCredential = AzureSASCredential;\nexports.isTokenCredential = isTokenCredential;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar tslib = require('tslib');\nvar uuid = require('uuid');\nvar tough = require('tough-cookie');\nvar http = require('http');\nvar https = require('https');\nvar node_fetch = _interopDefault(require('node-fetch'));\nvar abortController = require('@azure/abort-controller');\nvar FormData = _interopDefault(require('form-data'));\nvar util = require('util');\nvar url = require('url');\nvar stream = require('stream');\nvar logger$1 = require('@azure/logger');\nvar tunnel = require('tunnel');\nvar coreAuth = require('@azure/core-auth');\nvar xml2js = require('xml2js');\nvar os = require('os');\nvar coreTracing = require('@azure/core-tracing');\nvar api = require('@opentelemetry/api');\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName) {\n return headerName.toLowerCase();\n}\nfunction isHttpHeadersLike(object) {\n if (object && typeof object === \"object\") {\n var castObject = object;\n if (typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\") {\n return true;\n }\n }\n return false;\n}\n/**\n * A collection of HTTP header key/value pairs.\n */\nvar HttpHeaders = /** @class */ (function () {\n function HttpHeaders(rawHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (var headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n HttpHeaders.prototype.set = function (headerName, headerValue) {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString()\n };\n };\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n HttpHeaders.prototype.get = function (headerName) {\n var header = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n };\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n HttpHeaders.prototype.contains = function (headerName) {\n return !!this._headersMap[getHeaderKey(headerName)];\n };\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n HttpHeaders.prototype.remove = function (headerName) {\n var result = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n };\n /**\n * Get the headers that are contained this collection as an object.\n */\n HttpHeaders.prototype.rawHeaders = function () {\n var result = {};\n for (var headerKey in this._headersMap) {\n var header = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n };\n /**\n * Get the headers that are contained in this collection as an array.\n */\n HttpHeaders.prototype.headersArray = function () {\n var headers = [];\n for (var headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n };\n /**\n * Get the header names that are contained in this collection.\n */\n HttpHeaders.prototype.headerNames = function () {\n var headerNames = [];\n var headers = this.headersArray();\n for (var i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n };\n /**\n * Get the header values that are contained in this collection.\n */\n HttpHeaders.prototype.headerValues = function () {\n var headerValues = [];\n var headers = this.headersArray();\n for (var i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n };\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n HttpHeaders.prototype.toJson = function () {\n return this.rawHeaders();\n };\n /**\n * Get the string representation of this HTTP header collection.\n */\n HttpHeaders.prototype.toString = function () {\n return JSON.stringify(this.toJson());\n };\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n HttpHeaders.prototype.clone = function () {\n return new HttpHeaders(this.rawHeaders());\n };\n return HttpHeaders;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nfunction encodeString(value) {\n return Buffer.from(value).toString(\"base64\");\n}\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nfunction encodeByteArray(value) {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer);\n return bufferValue.toString(\"base64\");\n}\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nfunction decodeString(value) {\n return Buffer.from(value, \"base64\");\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"1.2.3\",\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\"\n },\n StatusCodes: {\n TooManyRequests: 429\n }\n },\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\"\n }\n};\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Default key used to access the XML attributes.\n */\nvar XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nvar XML_CHARKEY = \"_\";\n\n// Copyright (c) Microsoft Corporation.\nvar validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nvar isNode = typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nfunction encodeUri(uri) {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nfunction stripResponse(response) {\n var strippedResponse = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nfunction stripRequest(request) {\n var strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nfunction isValidUuid(uuid) {\n return validUuidRegex.test(uuid);\n}\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nfunction generateUuid() {\n return uuid.v4();\n}\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nfunction executePromisesSequentially(promiseFactories, kickstart) {\n var result = Promise.resolve(kickstart);\n promiseFactories.forEach(function (promiseFactory) {\n result = result.then(promiseFactory);\n });\n return result;\n}\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param t - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @returns Resolved promise\n */\nfunction delay(t, value) {\n return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); });\n}\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nfunction promiseToCallback(promise) {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return function (cb) {\n promise\n .then(function (data) {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch(function (err) {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nfunction promiseToServiceCallback(promise) {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return function (cb) {\n promise\n .then(function (data) {\n return process.nextTick(cb, undefined, data.parsedBody, data.request, data);\n })\n .catch(function (err) {\n process.nextTick(cb, err);\n });\n };\n}\nfunction prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) {\n var _a, _b, _c;\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n if (!xmlNamespaceKey || !xmlNamespace) {\n return _a = {}, _a[elementName] = obj, _a;\n }\n var result = (_b = {}, _b[elementName] = obj, _b);\n result[XML_ATTRKEY] = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c);\n return result;\n}\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nfunction applyMixins(targetCtorParam, sourceCtors) {\n var castTargetCtorParam = targetCtorParam;\n sourceCtors.forEach(function (sourceCtor) {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach(function (name) {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\nvar validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nfunction isDuration(value) {\n return validateISODuration.test(value);\n}\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nfunction replaceAll(value, searchValue, replaceValue) {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nfunction isPrimitiveType(value) {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\nfunction getEnvironmentValue(name) {\n if (process.env[name]) {\n return process.env[name];\n }\n else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar Serializer = /** @class */ (function () {\n function Serializer(modelMappers, isXML) {\n if (modelMappers === void 0) { modelMappers = {}; }\n this.modelMappers = modelMappers;\n this.isXML = isXML;\n }\n Serializer.prototype.validateConstraints = function (mapper, value, objectName) {\n var failValidation = function (constraintName, constraintValue) {\n throw new Error(\"\\\"\" + objectName + \"\\\" with value \\\"\" + value + \"\\\" should satisfy the constraint \\\"\" + constraintName + \"\\\": \" + constraintValue + \".\");\n };\n if (mapper.constraints && value != undefined) {\n var valueAsNumber = value;\n var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n var valueAsArray = value;\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n var pattern = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (UniqueItems &&\n valueAsArray.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n };\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param mapper - The mapper which defines the metadata of the serializable object\n * @param object - A valid Javascript object to be serialized\n * @param objectName - Name of the serialized object\n * @param options - additional options to deserialization\n * @returns A valid serialized Javascript object\n */\n Serializer.prototype.serialize = function (mapper, object, objectName, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n var payload = {};\n var mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n var required = mapper.required, nullable = mapper.nullable;\n if (required && nullable && object === undefined) {\n throw new Error(objectName + \" cannot be undefined.\");\n }\n if (required && !nullable && object == undefined) {\n throw new Error(objectName + \" cannot be null or undefined.\");\n }\n if (!required && nullable === false && object === null) {\n throw new Error(objectName + \" cannot be null.\");\n }\n if (object == undefined) {\n payload = object;\n }\n else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n }\n else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n }\n else if (mapperType.match(/^Enum$/i) !== null) {\n var enumMapper = mapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n }\n else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) {\n payload = serializeDateTypes(mapperType, object, objectName);\n }\n else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object);\n }\n else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n }\n else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);\n }\n }\n return payload;\n };\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param mapper - The mapper which defines the metadata of the serializable object\n * @param responseBody - A valid Javascript entity to be deserialized\n * @param objectName - Name of the deserialized object\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object\n */\n Serializer.prototype.deserialize = function (mapper, responseBody, objectName, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n var payload;\n var mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName;\n }\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions);\n }\n else {\n if (this.isXML) {\n var xmlCharKey = updatedOptions.xmlCharKey;\n var castResponseBody = responseBody;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n }\n else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n }\n else if (responseBody === \"false\") {\n payload = false;\n }\n else {\n payload = responseBody;\n }\n }\n else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n }\n else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody);\n }\n else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody);\n }\n else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = decodeString(responseBody);\n }\n else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody);\n }\n else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions);\n }\n else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions);\n }\n }\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n return payload;\n };\n return Serializer;\n}());\nfunction trimEnd(str, ch) {\n var len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\nfunction bufferToBase64Url(buffer) {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(\"Please provide an input of type Uint8Array for converting to Base64Url.\");\n }\n // Uint8Array to Base64.\n var str = encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\")\n .replace(/\\+/g, \"-\")\n .replace(/\\//g, \"_\");\n}\nfunction base64UrlToByteArray(str) {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return decodeString(str);\n}\nfunction splitSerializeName(prop) {\n var classes = [];\n var partialclass = \"\";\n if (prop) {\n var subwords = prop.split(\".\");\n for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) {\n var item = subwords_1[_i];\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n }\n else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n return classes;\n}\nfunction dateToUnixTime(d) {\n if (!d) {\n return undefined;\n }\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d);\n }\n return Math.floor(d.getTime() / 1000);\n}\nfunction unixTimeToDate(n) {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\nfunction serializeBasicTypes(typeName, objectName, value) {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(objectName + \" with value \" + value + \" must be of type number.\");\n }\n }\n else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(objectName + \" with value \\\"\" + value + \"\\\" must be of type string.\");\n }\n }\n else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && isValidUuid(value))) {\n throw new Error(objectName + \" with value \\\"\" + value + \"\\\" must be of type string and a valid uuid.\");\n }\n }\n else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(objectName + \" with value \" + value + \" must be of type boolean.\");\n }\n }\n else if (typeName.match(/^Stream$/i) !== null) {\n var objectType = typeof value;\n if (objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)) {\n throw new Error(objectName + \" must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.\");\n }\n }\n }\n return value;\n}\nfunction serializeEnumType(objectName, allowedValues, value) {\n if (!allowedValues) {\n throw new Error(\"Please provide a set of allowedValues to validate \" + objectName + \" as an Enum Type.\");\n }\n var isPresent = allowedValues.some(function (item) {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(value + \" is not a valid value for \" + objectName + \". The valid values are: \" + JSON.stringify(allowedValues) + \".\");\n }\n return value;\n}\nfunction serializeByteArrayType(objectName, value) {\n var returnValue = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(objectName + \" must be of type Uint8Array.\");\n }\n returnValue = encodeByteArray(value);\n }\n return returnValue;\n}\nfunction serializeBase64UrlType(objectName, value) {\n var returnValue = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(objectName + \" must be of type Uint8Array.\");\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\nfunction serializeDateTypes(typeName, value, objectName) {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in ISO8601 format.\");\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n }\n else if (typeName.match(/^DateTime$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in ISO8601 format.\");\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n }\n else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in RFC-1123 format.\");\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n }\n else if (typeName.match(/^UnixTime$/i) !== null) {\n if (!(value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value))))) {\n throw new Error(objectName + \" must be an instanceof Date or a string in RFC-1123/ISO8601 format \" +\n \"for it to be serialized in UnixTime/Epoch format.\");\n }\n value = dateToUnixTime(value);\n }\n else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!isDuration(value)) {\n throw new Error(objectName + \" must be a string in ISO 8601 format. Instead was \\\"\" + value + \"\\\".\");\n }\n }\n }\n return value;\n}\nfunction serializeSequenceType(serializer, mapper, object, objectName, isXml, options) {\n var _a, _b;\n if (!Array.isArray(object)) {\n throw new Error(objectName + \" must be of type Array.\");\n }\n var elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\"element\\\" metadata for an Array must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName + \".\"));\n }\n var tempArray = [];\n for (var i = 0; i < object.length; i++) {\n var serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n if (isXml && elementType.xmlNamespace) {\n var xmlnsKey = elementType.xmlNamespacePrefix\n ? \"xmlns:\" + elementType.xmlNamespacePrefix\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = tslib.__assign({}, serializedValue);\n tempArray[i][XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a);\n }\n else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b);\n }\n }\n else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\nfunction serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) {\n var _a;\n if (typeof object !== \"object\") {\n throw new Error(objectName + \" must be of type object.\");\n }\n var valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\"\\\"value\\\" metadata for a Dictionary must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName + \".\"));\n }\n var tempDictionary = {};\n for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) {\n var key = _b[_i];\n var serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n var xmlnsKey = mapper.xmlNamespacePrefix ? \"xmlns:\" + mapper.xmlNamespacePrefix : \"xmlns\";\n var result = tempDictionary;\n result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a);\n return result;\n }\n return tempDictionary;\n}\n/**\n * Resolves the additionalProperties property from a referenced mapper\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveAdditionalProperties(serializer, mapper, objectName) {\n var additionalProperties = mapper.type.additionalProperties;\n if (!additionalProperties && mapper.type.className) {\n var modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties;\n }\n return additionalProperties;\n}\n/**\n * Finds the mapper referenced by className\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(serializer, mapper, objectName) {\n var className = mapper.type.className;\n if (!className) {\n throw new Error(\"Class name for model \\\"\" + objectName + \"\\\" is not provided in the mapper \\\"\" + JSON.stringify(mapper, undefined, 2) + \"\\\".\");\n }\n return serializer.modelMappers[className];\n}\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(serializer, mapper, objectName) {\n var modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n var modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(\"mapper() cannot be null or undefined for model \\\"\" + mapper.type.className + \"\\\".\");\n }\n modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\"modelProperties cannot be null or undefined in the \" +\n (\"mapper \\\"\" + JSON.stringify(modelMapper) + \"\\\" of type \\\"\" + mapper.type.className + \"\\\" for object \\\"\" + objectName + \"\\\".\"));\n }\n }\n return modelProps;\n}\nfunction serializeCompositeType(serializer, mapper, object, objectName, isXml, options) {\n var _a, _b;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n if (object != undefined) {\n var payload = {};\n var modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) {\n var key = _c[_i];\n var propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n var propName = void 0;\n var parentObject = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n }\n else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n }\n else {\n var paths = splitSerializeName(propertyMapper.serializedName);\n propName = paths.pop();\n for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) {\n var pathName = paths_1[_d];\n var childObject = parentObject[pathName];\n if (childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n var xmlnsKey = mapper.xmlNamespacePrefix\n ? \"xmlns:\" + mapper.xmlNamespacePrefix\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = tslib.__assign(tslib.__assign({}, parentObject[XML_ATTRKEY]), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a));\n }\n var propertyObjectName = propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n var toSerialize = object[key];\n var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined) {\n toSerialize = mapper.serializedName;\n }\n var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options);\n if (serializedValue !== undefined && propName != undefined) {\n var value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n }\n else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b);\n }\n else {\n parentObject[propName] = value;\n }\n }\n }\n }\n var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n var propNames = Object.keys(modelProps);\n var _loop_1 = function (clientPropName) {\n var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; });\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '[\"' + clientPropName + '\"]', options);\n }\n };\n for (var clientPropName in object) {\n _loop_1(clientPropName);\n }\n }\n return payload;\n }\n return object;\n}\nfunction getXmlObjectValue(propertyMapper, serializedValue, isXml, options) {\n var _a;\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n var xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? \"xmlns:\" + propertyMapper.xmlNamespacePrefix\n : \"xmlns\";\n var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a);\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n }\n else {\n var result_1 = tslib.__assign({}, serializedValue);\n result_1[XML_ATTRKEY] = xmlNamespace;\n return result_1;\n }\n }\n var result = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\nfunction isSpecialXmlProperty(propertyName, options) {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\nfunction deserializeCompositeType(serializer, mapper, responseBody, objectName, options) {\n var _a;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n var modelProps = resolveModelProperties(serializer, mapper, objectName);\n var instance = {};\n var handledPropertyNames = [];\n for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) {\n var key = _b[_i];\n var propertyMapper = modelProps[key];\n var paths = splitSerializeName(modelProps[key].serializedName);\n handledPropertyNames.push(paths[0]);\n var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName;\n var propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n var headerCollectionPrefix = propertyMapper.headerCollectionPrefix;\n if (headerCollectionPrefix) {\n var dictionary = {};\n for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) {\n var headerKey = _d[_c];\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options);\n }\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n }\n else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options);\n }\n else {\n var propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n var wrapped = responseBody[xmlName];\n var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : [];\n instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options);\n }\n else {\n var property = responseBody[propertyName];\n instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options);\n }\n }\n }\n else {\n // deserialize the property if it is present in the provided responseBody instance\n var propertyInstance = void 0;\n var res = responseBody;\n // traversing the object step by step.\n for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) {\n var item = paths_2[_e];\n if (!res)\n break;\n res = res[item];\n }\n propertyInstance = res;\n var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined) {\n propertyInstance = mapper.serializedName;\n }\n var serializedValue = void 0;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n instance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);\n }\n else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);\n instance[key] = serializedValue;\n }\n }\n }\n var additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n var isAdditionalProperty = function (responsePropName) {\n for (var clientPropName in modelProps) {\n var paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n for (var responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '[\"' + responsePropName + '\"]', options);\n }\n }\n }\n else if (responseBody) {\n for (var _f = 0, _g = Object.keys(responseBody); _f < _g.length; _f++) {\n var key = _g[_f];\n if (instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)) {\n instance[key] = responseBody[key];\n }\n }\n }\n return instance;\n}\nfunction deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) {\n var value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\"\\\"value\\\" metadata for a Dictionary must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName));\n }\n if (responseBody) {\n var tempDictionary = {};\n for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) {\n var key = _a[_i];\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\nfunction deserializeSequenceType(serializer, mapper, responseBody, objectName, options) {\n var element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\"element\\\" metadata for an Array must be defined in the \" +\n (\"mapper and it must of type \\\"object\\\" in \" + objectName));\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n var tempArray = [];\n for (var i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + \"[\" + i + \"]\", options);\n }\n return tempArray;\n }\n return responseBody;\n}\nfunction getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) {\n var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n var discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n var typeName = mapper.type.uberParent || mapper.type.className;\n var indexDiscriminator = discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\nfunction getPolymorphicDiscriminatorRecursively(serializer, mapper) {\n return (mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className));\n}\nfunction getPolymorphicDiscriminatorSafely(serializer, typeName) {\n return (typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator);\n}\n// TODO: why is this here?\nfunction serializeObject(toSerialize) {\n var castToSerialize = toSerialize;\n if (toSerialize == undefined)\n return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = encodeByteArray(toSerialize);\n return toSerialize;\n }\n else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n }\n else if (Array.isArray(toSerialize)) {\n var array = [];\n for (var i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n }\n else if (typeof toSerialize === \"object\") {\n var dictionary = {};\n for (var property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o) {\n var result = {};\n for (var _i = 0, o_1 = o; _i < o_1.length; _i++) {\n var key = o_1[_i];\n result[key] = key;\n }\n return result;\n}\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nvar MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\"\n]);\n\n// Copyright (c) Microsoft Corporation.\nfunction isWebResourceLike(object) {\n if (object && typeof object === \"object\") {\n var castObject = object;\n if (typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\") {\n return true;\n }\n }\n return false;\n}\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nvar WebResource = /** @class */ (function () {\n function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n WebResource.prototype.validateRequestProperties = function () {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n };\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n WebResource.prototype.prepare = function (options) {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n if (options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n if (options.url && options.pathTemplate) {\n throw new Error(\"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\");\n }\n if ((options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n // set the method\n if (options.method) {\n var validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error('The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods));\n }\n }\n this.method = options.method.toUpperCase();\n // construct the url if path template is provided\n if (options.pathTemplate) {\n var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters;\n if (typeof pathTemplate_1 !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n var baseUrl = options.baseUrl;\n var url_1 = baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate_1.startsWith(\"/\") ? pathTemplate_1.slice(1) : pathTemplate_1);\n var segments = url_1.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters_1) {\n throw new Error(\"pathTemplate: \" + pathTemplate_1 + \" has been provided. Hence, options.pathParameters must also be provided.\");\n }\n segments.forEach(function (item) {\n var pathParamName = item.slice(1, -1);\n var pathParam = pathParameters_1[pathParamName];\n if (pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")) {\n var stringifiedPathParameters = JSON.stringify(pathParameters_1, undefined, 2);\n throw new Error(\"pathTemplate: \" + pathTemplate_1 + \" contains the path parameter \" + pathParamName +\n (\" however, it is not present in parameters: \" + stringifiedPathParameters + \".\") +\n (\"The value of the path parameter can either be a \\\"string\\\" of the form { \" + pathParamName + \": \\\"some sample value\\\" } or \") +\n (\"it can be an \\\"object\\\" of the form { \\\"\" + pathParamName + \"\\\": { value: \\\"some sample value\\\", skipUrlEncoding: true } }.\"));\n }\n if (typeof pathParam.valueOf() === \"string\") {\n url_1 = url_1.replace(item, encodeURIComponent(pathParam));\n }\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\"options.pathParameters[\" + pathParamName + \"] is of type \\\"object\\\" but it does not contain a \\\"value\\\" property.\");\n }\n if (pathParam.skipUrlEncoding) {\n url_1 = url_1.replace(item, pathParam.value);\n }\n else {\n url_1 = url_1.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url_1;\n }\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n var queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\"options.queryParameters must be of type object. It should be a JSON object \" +\n \"of \\\"query-parameter-name\\\" as the key and the \\\"query-parameter-value\\\" as the value. \" +\n \"The \\\"query-parameter-value\\\" may be fo type \\\"string\\\" or an \\\"object\\\" of the form { value: \\\"query-parameter-value\\\", skipUrlEncoding: true }.\");\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n var queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (var queryParamName in queryParameters) {\n var queryParam = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n }\n else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\"options.queryParameters[\" + queryParamName + \"] is of type \\\"object\\\" but it does not contain a \\\"value\\\" property.\");\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n }\n else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n // add headers to the request if they are provided\n if (options.headers) {\n var headers = options.headers;\n for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) {\n var headerName = _a[_i];\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n }\n else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, \"requestBody\");\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n return this;\n };\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n WebResource.prototype.clone = function () {\n var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes);\n if (this.formData) {\n result.formData = this.formData;\n }\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n return result;\n };\n return WebResource;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar custom = util.inspect.custom;\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nvar URLQuery = /** @class */ (function () {\n function URLQuery() {\n this._rawQuery = {};\n }\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n URLQuery.prototype.any = function () {\n return Object.keys(this._rawQuery).length > 0;\n };\n /**\n * Get the keys of the query string.\n */\n URLQuery.prototype.keys = function () {\n return Object.keys(this._rawQuery);\n };\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n URLQuery.prototype.set = function (parameterName, parameterValue) {\n var caseParameterValue = parameterValue;\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n var newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n }\n else {\n delete this._rawQuery[parameterName];\n }\n }\n };\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n URLQuery.prototype.get = function (parameterName) {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n };\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n URLQuery.prototype.toString = function () {\n var result = \"\";\n for (var parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n var parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n var parameterStrings = [];\n for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) {\n var parameterValueElement = parameterValue_1[_i];\n parameterStrings.push(parameterName + \"=\" + parameterValueElement);\n }\n result += parameterStrings.join(\"&\");\n }\n else {\n result += parameterName + \"=\" + parameterValue;\n }\n }\n return result;\n };\n /**\n * Parse a URLQuery from the provided text.\n */\n URLQuery.parse = function (text) {\n var result = new URLQuery();\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n var currentState = \"ParameterName\";\n var parameterName = \"\";\n var parameterValue = \"\";\n for (var i = 0; i < text.length; ++i) {\n var currentCharacter = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n return result;\n };\n return URLQuery;\n}());\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nvar URLBuilder = /** @class */ (function () {\n function URLBuilder() {\n }\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setScheme = function (scheme) {\n if (!scheme) {\n this._scheme = undefined;\n }\n else {\n this.set(scheme, \"SCHEME\");\n }\n };\n /**\n * Get the scheme that has been set in this URL.\n */\n URLBuilder.prototype.getScheme = function () {\n return this._scheme;\n };\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setHost = function (host) {\n if (!host) {\n this._host = undefined;\n }\n else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n };\n /**\n * Get the host that has been set in this URL.\n */\n URLBuilder.prototype.getHost = function () {\n return this._host;\n };\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n URLBuilder.prototype.setPort = function (port) {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n }\n else {\n this.set(port.toString(), \"PORT\");\n }\n };\n /**\n * Get the port that has been set in this URL.\n */\n URLBuilder.prototype.getPort = function () {\n return this._port;\n };\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n URLBuilder.prototype.setPath = function (path) {\n if (!path) {\n this._path = undefined;\n }\n else {\n var schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n var schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n }\n else {\n this.set(path, \"PATH\");\n }\n }\n };\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n URLBuilder.prototype.appendPath = function (path) {\n if (path) {\n var currentPath = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n };\n /**\n * Get the path that has been set in this URL.\n */\n URLBuilder.prototype.getPath = function () {\n return this._path;\n };\n /**\n * Set the query in this URL.\n */\n URLBuilder.prototype.setQuery = function (query) {\n if (!query) {\n this._query = undefined;\n }\n else {\n this._query = URLQuery.parse(query);\n }\n };\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n };\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) {\n return this._query ? this._query.get(queryParameterName) : undefined;\n };\n /**\n * Get the query in this URL.\n */\n URLBuilder.prototype.getQuery = function () {\n return this._query ? this._query.toString() : undefined;\n };\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n URLBuilder.prototype.set = function (text, startState) {\n var tokenizer = new URLTokenizer(text, startState);\n while (tokenizer.next()) {\n var token = tokenizer.current();\n var tokenPath = void 0;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n default:\n throw new Error(\"Unrecognized URLTokenType: \" + token.type);\n }\n }\n }\n };\n URLBuilder.prototype.toString = function () {\n var result = \"\";\n if (this._scheme) {\n result += this._scheme + \"://\";\n }\n if (this._host) {\n result += this._host;\n }\n if (this._port) {\n result += \":\" + this._port;\n }\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n if (this._query && this._query.any()) {\n result += \"?\" + this._query.toString();\n }\n return result;\n };\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n };\n URLBuilder.parse = function (text) {\n var result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n };\n return URLBuilder;\n}());\nvar URLToken = /** @class */ (function () {\n function URLToken(text, type) {\n this.text = text;\n this.type = type;\n }\n URLToken.scheme = function (text) {\n return new URLToken(text, \"SCHEME\");\n };\n URLToken.host = function (text) {\n return new URLToken(text, \"HOST\");\n };\n URLToken.port = function (text) {\n return new URLToken(text, \"PORT\");\n };\n URLToken.path = function (text) {\n return new URLToken(text, \"PATH\");\n };\n URLToken.query = function (text) {\n return new URLToken(text, \"QUERY\");\n };\n return URLToken;\n}());\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nfunction isAlphaNumericCharacter(character) {\n var characterCode = character.charCodeAt(0);\n return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */);\n}\n/**\n * A class that tokenizes URL strings.\n */\nvar URLTokenizer = /** @class */ (function () {\n function URLTokenizer(_text, state) {\n this._text = _text;\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n URLTokenizer.prototype.current = function () {\n return this._currentToken;\n };\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n URLTokenizer.prototype.next = function () {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n }\n else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n case \"HOST\":\n nextHost(this);\n break;\n case \"PORT\":\n nextPort(this);\n break;\n case \"PATH\":\n nextPath(this);\n break;\n case \"QUERY\":\n nextQuery(this);\n break;\n default:\n throw new Error(\"Unrecognized URLTokenizerState: \" + this._currentState);\n }\n }\n return !!this._currentToken;\n };\n return URLTokenizer;\n}());\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer) {\n var result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer) {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer) {\n return tokenizer._text[tokenizer._currentIndex];\n}\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer, step) {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer, charactersToPeek) {\n var endIndex = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer, condition) {\n var result = \"\";\n while (hasCurrentCharacter(tokenizer)) {\n var currentCharacter = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n }\n else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n return result;\n}\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer) {\n return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); });\n}\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer) {\n var terminatingCharacters = [];\n for (var _i = 1; _i < arguments.length; _i++) {\n terminatingCharacters[_i - 1] = arguments[_i];\n }\n return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; });\n}\nfunction nextScheme(tokenizer) {\n var scheme = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else {\n tokenizer._currentState = \"HOST\";\n }\n}\nfunction nextSchemeOrHost(tokenizer) {\n var schemeOrHost = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n }\n else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n }\n else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\nfunction nextHost(tokenizer) {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n var host = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n }\n else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextPort(tokenizer) {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n var port = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextPath(tokenizer) {\n var path = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n }\n else {\n tokenizer._currentState = \"QUERY\";\n }\n}\nfunction nextQuery(tokenizer) {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n var query = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n\n// Copyright (c) Microsoft Corporation.\nvar RedactedString = \"REDACTED\";\nvar defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n \"Accept\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\"\n];\nvar defaultAllowedQueryParameters = [\"api-version\"];\nvar Sanitizer = /** @class */ (function () {\n function Sanitizer(_a) {\n var _b = _a === void 0 ? {} : _a, _c = _b.allowedHeaderNames, allowedHeaderNames = _c === void 0 ? [] : _c, _d = _b.allowedQueryParameters, allowedQueryParameters = _d === void 0 ? [] : _d;\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n this.allowedHeaderNames = new Set(allowedHeaderNames.map(function (n) { return n.toLowerCase(); }));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map(function (p) { return p.toLowerCase(); }));\n }\n Sanitizer.prototype.sanitize = function (obj) {\n return JSON.stringify(obj, this.replacer.bind(this), 2);\n };\n Sanitizer.prototype.replacer = function (key, value) {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return tslib.__assign(tslib.__assign({}, value), { name: value.name, message: value.message });\n }\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(key, value);\n }\n else if (key === \"url\") {\n return this.sanitizeUrl(value);\n }\n else if (key === \"query\") {\n return this.sanitizeQuery(value);\n }\n else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n }\n else if (key === \"response\") {\n // Don't log response again\n return undefined;\n }\n else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n }\n return value;\n };\n Sanitizer.prototype.sanitizeHeaders = function (_, value) {\n return this.sanitizeObject(value, this.allowedHeaderNames, function (v, k) { return v[k].value; });\n };\n Sanitizer.prototype.sanitizeQuery = function (value) {\n return this.sanitizeObject(value, this.allowedQueryParameters, function (v, k) { return v[k]; });\n };\n Sanitizer.prototype.sanitizeObject = function (value, allowedKeys, accessor) {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n var sanitized = {};\n for (var _i = 0, _a = Object.keys(value); _i < _a.length; _i++) {\n var k = _a[_i];\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n }\n else {\n sanitized[k] = RedactedString;\n }\n }\n return sanitized;\n };\n Sanitizer.prototype.sanitizeUrl = function (value) {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n var urlBuilder = URLBuilder.parse(value);\n var queryString = urlBuilder.getQuery();\n if (!queryString) {\n return value;\n }\n var query = URLQuery.parse(queryString);\n for (var _i = 0, _a = query.keys(); _i < _a.length; _i++) {\n var k = _a[_i];\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n };\n return Sanitizer;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar errorSanitizer = new Sanitizer();\nvar RestError = /** @class */ (function (_super) {\n tslib.__extends(RestError, _super);\n function RestError(message, code, statusCode, request, response) {\n var _this = _super.call(this, message) || this;\n _this.name = \"RestError\";\n _this.code = code;\n _this.statusCode = statusCode;\n _this.request = request;\n _this.response = response;\n Object.setPrototypeOf(_this, RestError.prototype);\n return _this;\n }\n /**\n * Logging method for util.inspect in Node\n */\n RestError.prototype[custom] = function () {\n return \"RestError: \" + this.message + \" \\n \" + errorSanitizer.sanitize(this);\n };\n RestError.REQUEST_SEND_ERROR = \"REQUEST_SEND_ERROR\";\n RestError.PARSE_ERROR = \"PARSE_ERROR\";\n return RestError;\n}(Error));\n\n// Copyright (c) Microsoft Corporation.\nvar logger = logger$1.createClientLogger(\"core-http\");\n\n// Copyright (c) Microsoft Corporation.\nvar ReportTransform = /** @class */ (function (_super) {\n tslib.__extends(ReportTransform, _super);\n function ReportTransform(progressCallback) {\n var _this = _super.call(this) || this;\n _this.progressCallback = progressCallback;\n _this.loadedBytes = 0;\n return _this;\n }\n ReportTransform.prototype._transform = function (chunk, _encoding, callback) {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback({ loadedBytes: this.loadedBytes });\n callback(undefined);\n };\n return ReportTransform;\n}(stream.Transform));\nvar FetchHttpClient = /** @class */ (function () {\n function FetchHttpClient() {\n }\n FetchHttpClient.prototype.sendRequest = function (httpRequest) {\n var _a;\n return tslib.__awaiter(this, void 0, void 0, function () {\n var abortController$1, abortListener, formData, requestForm_1, appendFormValue, _i, _b, formKey, formValue, j, contentType, body, onUploadProgress, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, streaming, _c, onDownloadProgress, responseBody, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone;\n var _d;\n return tslib.__generator(this, function (_e) {\n switch (_e.label) {\n case 0:\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\");\n }\n abortController$1 = new abortController.AbortController();\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new abortController.AbortError(\"The operation was aborted.\");\n }\n abortListener = function (event) {\n if (event.type === \"abort\") {\n abortController$1.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n if (httpRequest.timeout) {\n setTimeout(function () {\n abortController$1.abort();\n }, httpRequest.timeout);\n }\n if (httpRequest.formData) {\n formData = httpRequest.formData;\n requestForm_1 = new FormData();\n appendFormValue = function (key, value) {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")) {\n requestForm_1.append(key, value.value, value.options);\n }\n else {\n requestForm_1.append(key, value);\n }\n };\n for (_i = 0, _b = Object.keys(formData); _i < _b.length; _i++) {\n formKey = _b[_i];\n formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n }\n else {\n appendFormValue(formKey, formValue);\n }\n }\n httpRequest.body = requestForm_1;\n httpRequest.formData = undefined;\n contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm_1.getBoundary === \"function\") {\n httpRequest.headers.set(\"Content-Type\", \"multipart/form-data; boundary=\" + requestForm_1.getBoundary());\n }\n else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n onUploadProgress = httpRequest.onUploadProgress;\n uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n }\n else {\n uploadReportStream.end(body);\n }\n body = uploadReportStream;\n }\n return [4 /*yield*/, this.prepareRequest(httpRequest)];\n case 1:\n platformSpecificRequestInit = _e.sent();\n requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: \"manual\" }, platformSpecificRequestInit);\n _e.label = 2;\n case 2:\n _e.trys.push([2, 8, 9, 10]);\n return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)];\n case 3:\n response = _e.sent();\n headers = parseHeaders(response.headers);\n streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) ||\n httpRequest.streamResponseBody;\n _d = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? response.body\n : undefined\n };\n if (!!streaming) return [3 /*break*/, 5];\n return [4 /*yield*/, response.text()];\n case 4:\n _c = _e.sent();\n return [3 /*break*/, 6];\n case 5:\n _c = undefined;\n _e.label = 6;\n case 6:\n operationResponse = (_d.bodyAsText = _c,\n _d);\n onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n responseBody = response.body || undefined;\n if (isReadableStream(responseBody)) {\n downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n }\n else {\n length_1 = parseInt(headers.get(\"Content-Length\")) || undefined;\n if (length_1) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length_1 });\n }\n }\n }\n return [4 /*yield*/, this.processRequest(operationResponse)];\n case 7:\n _e.sent();\n return [2 /*return*/, operationResponse];\n case 8:\n error_1 = _e.sent();\n fetchError = error_1;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest);\n }\n else if (fetchError.type === \"aborted\") {\n throw new abortController.AbortError(\"The operation was aborted.\");\n }\n throw fetchError;\n case 9:\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);\n }\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(function () {\n var _a;\n (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener(\"abort\", abortListener);\n return;\n })\n .catch(function (e) {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n return [7 /*endfinally*/];\n case 10: return [2 /*return*/];\n }\n });\n });\n };\n return FetchHttpClient;\n}());\nfunction isReadableStream(body) {\n return body && typeof body.pipe === \"function\";\n}\nfunction isStreamComplete(stream) {\n return new Promise(function (resolve) {\n stream.on(\"close\", resolve);\n stream.on(\"end\", resolve);\n stream.on(\"error\", resolve);\n });\n}\nfunction parseHeaders(headers) {\n var httpHeaders = new HttpHeaders();\n headers.forEach(function (value, key) {\n httpHeaders.set(key, value);\n });\n return httpHeaders;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction createProxyAgent(requestUrl, proxySettings, headers) {\n var host = URLBuilder.parse(proxySettings.host).getHost();\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n var tunnelOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {}\n }\n };\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy.proxyAuth = proxySettings.username + \":\" + proxySettings.password;\n }\n var isRequestHttps = isUrlHttps(requestUrl);\n var isProxyHttps = isUrlHttps(proxySettings.host);\n var proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions)\n };\n return proxyAgent;\n}\nfunction isUrlHttps(url) {\n var urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\nfunction createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n }\n else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n }\n else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n }\n else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\nfunction isValidPort(port) {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction getCachedAgent(isHttps, agentCache) {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\nvar NodeFetchHttpClient = /** @class */ (function (_super) {\n tslib.__extends(NodeFetchHttpClient, _super);\n function NodeFetchHttpClient() {\n var _this = _super !== null && _super.apply(this, arguments) || this;\n _this.proxyAgents = {};\n _this.keepAliveAgents = {};\n _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n return _this;\n }\n NodeFetchHttpClient.prototype.getOrCreateAgent = function (httpRequest) {\n var isHttps = isUrlHttps(httpRequest.url);\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n var agent = getCachedAgent(isHttps, this.proxyAgents);\n if (agent) {\n return agent;\n }\n var tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n this.proxyAgents.httpsAgent = tunnel.agent;\n }\n else {\n this.proxyAgents.httpAgent = tunnel.agent;\n }\n return agent;\n }\n else if (httpRequest.keepAlive) {\n var agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n var agentOptions = {\n keepAlive: httpRequest.keepAlive\n };\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n }\n else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n return agent;\n }\n else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n };\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n NodeFetchHttpClient.prototype.fetch = function (input, init) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, node_fetch(input, init)];\n });\n });\n };\n NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var requestInit, cookieString;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n requestInit = {};\n if (!(this.cookieJar && !httpRequest.headers.get(\"Cookie\"))) return [3 /*break*/, 2];\n return [4 /*yield*/, new Promise(function (resolve, reject) {\n _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) {\n if (err) {\n reject(err);\n }\n else {\n resolve(cookie);\n }\n });\n })];\n case 1:\n cookieString = _a.sent();\n httpRequest.headers.set(\"Cookie\", cookieString);\n _a.label = 2;\n case 2:\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n requestInit.compress = httpRequest.decompressResponse;\n return [2 /*return*/, requestInit];\n }\n });\n });\n };\n NodeFetchHttpClient.prototype.processRequest = function (operationResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var setCookieHeader_1;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!this.cookieJar) return [3 /*break*/, 2];\n setCookieHeader_1 = operationResponse.headers.get(\"Set-Cookie\");\n if (!(setCookieHeader_1 !== undefined)) return [3 /*break*/, 2];\n return [4 /*yield*/, new Promise(function (resolve, reject) {\n _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) {\n if (err) {\n reject(err);\n }\n else {\n resolve();\n }\n });\n })];\n case 1:\n _a.sent();\n _a.label = 2;\n case 2: return [2 /*return*/];\n }\n });\n });\n };\n return NodeFetchHttpClient;\n}(FetchHttpClient));\n\n// Copyright (c) Microsoft Corporation.\n(function (HttpPipelineLogLevel) {\n /**\n * A log level that indicates that no logs will be logged.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"OFF\"] = 0] = \"OFF\";\n /**\n * An error log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"ERROR\"] = 1] = \"ERROR\";\n /**\n * A warning log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"WARNING\"] = 2] = \"WARNING\";\n /**\n * An information log.\n */\n HttpPipelineLogLevel[HttpPipelineLogLevel[\"INFO\"] = 3] = \"INFO\";\n})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {}));\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nfunction operationOptionsToRequestOptionsBase(opts) {\n var requestOptions = opts.requestOptions, tracingOptions = opts.tracingOptions, additionalOptions = tslib.__rest(opts, [\"requestOptions\", \"tracingOptions\"]);\n var result = additionalOptions;\n if (requestOptions) {\n result = tslib.__assign(tslib.__assign({}, result), requestOptions);\n }\n if (tracingOptions) {\n result.spanOptions = tracingOptions.spanOptions;\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar BaseRequestPolicy = /** @class */ (function () {\n function BaseRequestPolicy(_nextPolicy, _options) {\n this._nextPolicy = _nextPolicy;\n this._options = _options;\n }\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n BaseRequestPolicy.prototype.shouldLog = function (logLevel) {\n return this._options.shouldLog(logLevel);\n };\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n BaseRequestPolicy.prototype.log = function (logLevel, message) {\n this._options.log(logLevel, message);\n };\n return BaseRequestPolicy;\n}());\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nvar RequestPolicyOptions = /** @class */ (function () {\n function RequestPolicyOptions(_logger) {\n this._logger = _logger;\n }\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n RequestPolicyOptions.prototype.shouldLog = function (logLevel) {\n return (!!this._logger &&\n logLevel !== exports.HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel);\n };\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n RequestPolicyOptions.prototype.log = function (logLevel, message) {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n };\n return RequestPolicyOptions;\n}());\n\n// Copyright (c) Microsoft Corporation.\nfunction logPolicy(loggingOptions) {\n if (loggingOptions === void 0) { loggingOptions = {}; }\n return {\n create: function (nextPolicy, options) {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n }\n };\n}\nvar LogPolicy = /** @class */ (function (_super) {\n tslib.__extends(LogPolicy, _super);\n function LogPolicy(nextPolicy, options, _a) {\n var _b = _a === void 0 ? {} : _a, _c = _b.logger, logger$1 = _c === void 0 ? logger.info : _c, _d = _b.allowedHeaderNames, allowedHeaderNames = _d === void 0 ? [] : _d, _e = _b.allowedQueryParameters, allowedQueryParameters = _e === void 0 ? [] : _e;\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.logger = logger$1;\n _this.sanitizer = new Sanitizer({ allowedHeaderNames: allowedHeaderNames, allowedQueryParameters: allowedQueryParameters });\n return _this;\n }\n Object.defineProperty(LogPolicy.prototype, \"allowedHeaderNames\", {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n get: function () {\n return this.sanitizer.allowedHeaderNames;\n },\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n set: function (allowedHeaderNames) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(LogPolicy.prototype, \"allowedQueryParameters\", {\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n get: function () {\n return this.sanitizer.allowedQueryParameters;\n },\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n set: function (allowedQueryParameters) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n },\n enumerable: false,\n configurable: true\n });\n LogPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n if (!this.logger.enabled)\n return this._nextPolicy.sendRequest(request);\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then(function (response) { return _this.logResponse(response); });\n };\n LogPolicy.prototype.logRequest = function (request) {\n this.logger(\"Request: \" + this.sanitizer.sanitize(request));\n };\n LogPolicy.prototype.logResponse = function (response) {\n this.logger(\"Response status code: \" + response.status);\n this.logger(\"Headers: \" + this.sanitizer.sanitize(response.headers));\n return response;\n };\n return LogPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nfunction getPathStringFromParameter(parameter) {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\nfunction getPathStringFromParameterPath(parameterPath, mapper) {\n var result;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n }\n else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n }\n else {\n result = mapper.serializedName;\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Gets the list of status codes for streaming responses.\n * @internal @hidden\n */\nfunction getStreamResponseStatusCodes(operationSpec) {\n var result = new Set();\n for (var statusCode in operationSpec.responses) {\n var operationResponse = operationSpec.responses[statusCode];\n if (operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n\n// Copyright (c) Microsoft Corporation.\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nvar xml2jsDefaultOptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\"\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false\n};\n// The xml2js settings for general XML parsing operations.\nvar xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n// The xml2js settings for general XML building operations.\nvar xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false\n};\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nfunction stringifyXML(obj, opts) {\n var _a;\n if (opts === void 0) { opts = {}; }\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n var builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nfunction parseXML(str, opts) {\n var _a;\n if (opts === void 0) { opts = {}; }\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n var xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise(function (resolve, reject) {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n }\n else {\n xmlParser.parseString(str, function (err, res) {\n if (err) {\n reject(err);\n }\n else {\n resolve(res);\n }\n });\n }\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nfunction deserializationPolicy(deserializationContentTypes, parsingOptions) {\n return {\n create: function (nextPolicy, options) {\n return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions);\n }\n };\n}\nvar defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nvar defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\nvar DefaultDeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes\n }\n};\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nvar DeserializationPolicy = /** @class */ (function (_super) {\n tslib.__extends(DeserializationPolicy, _super);\n function DeserializationPolicy(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions) {\n if (parsingOptions === void 0) { parsingOptions = {}; }\n var _a;\n var _this = _super.call(this, nextPolicy, requestPolicyOptions) || this;\n _this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n _this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n _this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY;\n return _this;\n }\n DeserializationPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this._nextPolicy.sendRequest(request).then(function (response) {\n return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response, {\n xmlCharKey: _this.xmlCharKey\n });\n })];\n });\n });\n };\n return DeserializationPolicy;\n}(BaseRequestPolicy));\nfunction getOperationResponse(parsedResponse) {\n var result;\n var request = parsedResponse.request;\n var operationSpec = request.operationSpec;\n if (operationSpec) {\n var operationResponseGetter = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n }\n else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\nfunction shouldDeserializeResponse(parsedResponse) {\n var shouldDeserialize = parsedResponse.request.shouldDeserialize;\n var result;\n if (shouldDeserialize === undefined) {\n result = true;\n }\n else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n }\n else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\nfunction deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options) {\n var _a, _b, _c;\n if (options === void 0) { options = {}; }\n var updatedOptions = {\n rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : \"\",\n includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false,\n xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(function (parsedResponse) {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n var operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n var responseSpec = getOperationResponse(parsedResponse);\n var _a = handleErrorResponse(parsedResponse, operationSpec, responseSpec), error = _a.error, shouldReturnResponse = _a.shouldReturnResponse;\n if (error) {\n throw error;\n }\n else if (shouldReturnResponse) {\n return parsedResponse;\n }\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n var valueToDeserialize = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, \"operationRes.parsedBody\", options);\n }\n catch (innerError) {\n var restError = new RestError(\"Error \" + innerError + \" occurred in deserializing the responseBody - \" + parsedResponse.bodyAsText, undefined, parsedResponse.status, parsedResponse.request, parsedResponse);\n throw restError;\n }\n }\n else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), \"operationRes.parsedHeaders\", options);\n }\n }\n return parsedResponse;\n });\n}\nfunction isOperationSpecEmpty(operationSpec) {\n var expectedStatusCodes = Object.keys(operationSpec.responses);\n return (expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\"));\n}\nfunction handleErrorResponse(parsedResponse, operationSpec, responseSpec) {\n var _a;\n var isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n var isExpectedStatusCode = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n }\n else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n var errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default;\n var streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) ||\n parsedResponse.request.streamResponseBody;\n var initialErrorMessage = streaming\n ? \"Unexpected status code: \" + parsedResponse.status\n : parsedResponse.bodyAsText;\n var error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse);\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n var defaultBodyMapper = errorResponseSpec.bodyMapper;\n var defaultHeadersMapper = errorResponseSpec.headersMapper;\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n var parsedBody = parsedResponse.parsedBody;\n var parsedError = void 0;\n if (defaultBodyMapper) {\n var valueToDeserialize = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName] : [];\n }\n parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, \"error.response.parsedBody\");\n }\n var internalError = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n if (defaultBodyMapper) {\n error.response.parsedBody = parsedError;\n }\n }\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), \"operationRes.parsedHeaders\");\n }\n }\n catch (defaultError) {\n error.message = \"Error \\\"\" + defaultError.message + \"\\\" occurred in deserializing the responseBody - \\\"\" + parsedResponse.bodyAsText + \"\\\" for the default response.\";\n }\n return { error: error, shouldReturnResponse: false };\n}\nfunction parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) {\n var _a;\n var errorHandler = function (err) {\n var msg = \"Error \\\"\" + err + \"\\\" occurred while parsing the response body - \" + operationResponse.bodyAsText + \".\";\n var errCode = err.code || RestError.PARSE_ERROR;\n var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse);\n return Promise.reject(e);\n };\n var streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n var text_1 = operationResponse.bodyAsText;\n var contentType = operationResponse.headers.get(\"Content-Type\") || \"\";\n var contentComponents = !contentType\n ? []\n : contentType.split(\";\").map(function (component) { return component.toLowerCase(); });\n if (contentComponents.length === 0 ||\n contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) {\n return new Promise(function (resolve) {\n operationResponse.parsedBody = JSON.parse(text_1);\n resolve(operationResponse);\n }).catch(errorHandler);\n }\n else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) {\n return parseXML(text_1, opts)\n .then(function (body) {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n return Promise.resolve(operationResponse);\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nvar DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nvar DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nvar DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\nfunction isNumber(n) {\n return typeof n === \"number\";\n}\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(retryLimit, predicate, retryData, response, error) {\n if (!predicate(response, error)) {\n return false;\n }\n return retryData.retryCount < retryLimit;\n}\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nfunction updateRetryData(retryOptions, retryData, err) {\n if (retryData === void 0) { retryData = { retryCount: 0, retryInterval: 0 }; }\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n retryData.error = err;\n }\n // Adjust retry count\n retryData.retryCount++;\n // Adjust retry interval\n var incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n var boundedRandDelta = retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval);\n return retryData;\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) {\n return {\n create: function (nextPolicy, options) {\n return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval);\n }\n };\n}\n(function (RetryMode) {\n RetryMode[RetryMode[\"Exponential\"] = 0] = \"Exponential\";\n})(exports.RetryMode || (exports.RetryMode = {}));\nvar DefaultRetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL\n};\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nvar ExponentialRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(ExponentialRetryPolicy, _super);\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n _this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n return _this;\n }\n ExponentialRetryPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .then(function (response) { return retry(_this, request, response); })\n .catch(function (error) { return retry(_this, request, error.response, undefined, error); });\n };\n return ExponentialRetryPolicy;\n}(BaseRequestPolicy));\nfunction retry(policy, request, response, retryData, requestError) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n function shouldPolicyRetry(responseParam) {\n var statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status;\n if (statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505) {\n return false;\n }\n return true;\n }\n var isAborted, res, err_1, err;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n retryData = updateRetryData({\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval\n }, retryData, requestError);\n isAborted = request.abortSignal && request.abortSignal.aborted;\n if (!(!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response))) return [3 /*break*/, 6];\n logger.info(\"Retrying request in \" + retryData.retryInterval);\n _a.label = 1;\n case 1:\n _a.trys.push([1, 4, , 5]);\n return [4 /*yield*/, delay(retryData.retryInterval)];\n case 2:\n _a.sent();\n return [4 /*yield*/, policy._nextPolicy.sendRequest(request.clone())];\n case 3:\n res = _a.sent();\n return [2 /*return*/, retry(policy, request, res, retryData)];\n case 4:\n err_1 = _a.sent();\n return [2 /*return*/, retry(policy, request, response, retryData, err_1)];\n case 5: return [3 /*break*/, 7];\n case 6:\n if (isAborted || requestError || !response) {\n err = retryData.error ||\n new RestError(\"Failed to send the request.\", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response);\n throw err;\n }\n else {\n return [2 /*return*/, response];\n }\n case 7: return [2 /*return*/];\n }\n });\n });\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction generateClientRequestIdPolicy(requestIdHeaderName) {\n if (requestIdHeaderName === void 0) { requestIdHeaderName = \"x-ms-client-request-id\"; }\n return {\n create: function (nextPolicy, options) {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n }\n };\n}\nvar GenerateClientRequestIdPolicy = /** @class */ (function (_super) {\n tslib.__extends(GenerateClientRequestIdPolicy, _super);\n function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._requestIdHeaderName = _requestIdHeaderName;\n return _this;\n }\n GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n };\n return GenerateClientRequestIdPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction getDefaultUserAgentKey() {\n return Constants.HeaderConstants.USER_AGENT;\n}\nfunction getPlatformSpecificData() {\n var runtimeInfo = {\n key: \"Node\",\n value: process.version\n };\n var osInfo = {\n key: \"OS\",\n value: \"(\" + os.arch() + \"-\" + os.type() + \"-\" + os.release() + \")\"\n };\n return [runtimeInfo, osInfo];\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction getRuntimeInfo() {\n var msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion\n };\n return [msRestRuntime];\n}\nfunction getUserAgentString(telemetryInfo, keySeparator, valueSeparator) {\n if (keySeparator === void 0) { keySeparator = \" \"; }\n if (valueSeparator === void 0) { valueSeparator = \"/\"; }\n return telemetryInfo\n .map(function (info) {\n var value = info.value ? \"\" + valueSeparator + info.value : \"\";\n return \"\" + info.key + value;\n })\n .join(keySeparator);\n}\nvar getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\nfunction getDefaultUserAgentValue() {\n var runtimeInfo = getRuntimeInfo();\n var platformSpecificData = getPlatformSpecificData();\n var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\nfunction userAgentPolicy(userAgentData) {\n var key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n var value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n return {\n create: function (nextPolicy, options) {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n }\n };\n}\nvar UserAgentPolicy = /** @class */ (function (_super) {\n tslib.__extends(UserAgentPolicy, _super);\n function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) {\n var _this = _super.call(this, _nextPolicy, _options) || this;\n _this._nextPolicy = _nextPolicy;\n _this._options = _options;\n _this.headerKey = headerKey;\n _this.headerValue = headerValue;\n return _this;\n }\n UserAgentPolicy.prototype.sendRequest = function (request) {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n };\n UserAgentPolicy.prototype.addUserAgentHeader = function (request) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n };\n return UserAgentPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nvar allowedRedirect = [\"GET\", \"HEAD\"];\nvar DefaultRedirectOptions = {\n handleRedirects: true,\n maxRetries: 20\n};\nfunction redirectPolicy(maximumRetries) {\n if (maximumRetries === void 0) { maximumRetries = 20; }\n return {\n create: function (nextPolicy, options) {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n }\n };\n}\nvar RedirectPolicy = /** @class */ (function (_super) {\n tslib.__extends(RedirectPolicy, _super);\n function RedirectPolicy(nextPolicy, options, maxRetries) {\n if (maxRetries === void 0) { maxRetries = 20; }\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.maxRetries = maxRetries;\n return _this;\n }\n RedirectPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request)\n .then(function (response) { return handleRedirect(_this, response, 0); });\n };\n return RedirectPolicy;\n}(BaseRequestPolicy));\nfunction handleRedirect(policy, response, currentRetries) {\n var request = response.request, status = response.status;\n var locationHeader = response.headers.get(\"location\");\n if (locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)) {\n var builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n return policy._nextPolicy\n .sendRequest(request)\n .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); });\n }\n return Promise.resolve(response);\n}\n\n// Copyright (c) Microsoft Corporation.\nfunction rpRegistrationPolicy(retryTimeout) {\n if (retryTimeout === void 0) { retryTimeout = 30; }\n return {\n create: function (nextPolicy, options) {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n }\n };\n}\nvar RPRegistrationPolicy = /** @class */ (function (_super) {\n tslib.__extends(RPRegistrationPolicy, _super);\n function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) {\n if (_retryTimeout === void 0) { _retryTimeout = 30; }\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._retryTimeout = _retryTimeout;\n return _this;\n }\n RPRegistrationPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .then(function (response) { return registerIfNeeded(_this, request, response); });\n };\n return RPRegistrationPolicy;\n}(BaseRequestPolicy));\nfunction registerIfNeeded(policy, request, response) {\n if (response.status === 409) {\n var rpName = checkRPNotRegisteredError(response.bodyAsText);\n if (rpName) {\n var urlPrefix = extractSubscriptionUrl(request.url);\n return (registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(function () { return false; })\n .then(function (registrationStatus) {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n }));\n }\n }\n return Promise.resolve(response);\n}\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(originalRequest, reuseUrlToo) {\n if (reuseUrlToo === void 0) { reuseUrlToo = false; }\n var reqOptions = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", generateUuid());\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n return reqOptions;\n}\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body) {\n var result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n }\n catch (err) {\n // do nothing;\n }\n if (responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\") {\n var matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url) {\n var result;\n var matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n }\n else {\n throw new Error(\"Unable to extract subscriptionId from the given url - \" + url + \".\");\n }\n return result;\n}\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param callback - The callback that handles the RP registration\n */\nfunction registerRP(policy, urlPrefix, provider, originalRequest) {\n var postUrl = urlPrefix + \"providers/\" + provider + \"/register?api-version=2016-02-01\";\n var getUrl = urlPrefix + \"providers/\" + provider + \"?api-version=2016-02-01\";\n var reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n return policy._nextPolicy.sendRequest(reqOptions).then(function (response) {\n if (response.status !== 200) {\n throw new Error(\"Autoregistration of \" + provider + \" failed. Please try registering manually.\");\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nfunction getRegistrationStatus(policy, url, originalRequest) {\n var reqOptions = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n return policy._nextPolicy.sendRequest(reqOptions).then(function (res) {\n var obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n }\n else {\n return delay(policy._retryTimeout * 1000)\n .then(function () { return getRegistrationStatus(policy, url, originalRequest); });\n }\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Defines the default token refresh buffer duration.\n */\nvar TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n */\nvar ExpiringAccessTokenCache = /** @class */ (function () {\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n function ExpiringAccessTokenCache(tokenRefreshBufferMs) {\n if (tokenRefreshBufferMs === void 0) { tokenRefreshBufferMs = TokenRefreshBufferMs; }\n this.cachedToken = undefined;\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n ExpiringAccessTokenCache.prototype.setCachedToken = function (accessToken) {\n this.cachedToken = accessToken;\n };\n ExpiringAccessTokenCache.prototype.getCachedToken = function () {\n if (this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) {\n this.cachedToken = undefined;\n }\n return this.cachedToken;\n };\n return ExpiringAccessTokenCache;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n */\nvar AccessTokenRefresher = /** @class */ (function () {\n function AccessTokenRefresher(credential, scopes, requiredMillisecondsBeforeNewRefresh) {\n if (requiredMillisecondsBeforeNewRefresh === void 0) { requiredMillisecondsBeforeNewRefresh = 30000; }\n this.credential = credential;\n this.scopes = scopes;\n this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh;\n this.lastCalled = 0;\n }\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n AccessTokenRefresher.prototype.isReady = function () {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh);\n };\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n AccessTokenRefresher.prototype.getToken = function (options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var token;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n this.lastCalled = Date.now();\n return [4 /*yield*/, this.credential.getToken(this.scopes, options)];\n case 1:\n token = _a.sent();\n this.promise = undefined;\n return [2 /*return*/, token || undefined];\n }\n });\n });\n };\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n AccessTokenRefresher.prototype.refresh = function (options) {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n return this.promise;\n };\n return AccessTokenRefresher;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * The automated token refresh will only start to happen at the\n * expiration date minus the value of timeBetweenRefreshAttemptsInMs,\n * which is by default 30 seconds.\n */\nvar timeBetweenRefreshAttemptsInMs = 30000;\n/**\n * Creates a new BearerTokenAuthenticationPolicy factory.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nfunction bearerTokenAuthenticationPolicy(credential, scopes) {\n var tokenCache = new ExpiringAccessTokenCache();\n var tokenRefresher = new AccessTokenRefresher(credential, scopes, timeBetweenRefreshAttemptsInMs);\n return {\n create: function (nextPolicy, options) {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options, tokenCache, tokenRefresher);\n }\n };\n}\n/**\n *\n * Provides a RequestPolicy that can request a token from a TokenCredential\n * implementation and then apply it to the Authorization header of a request\n * as a Bearer token.\n *\n */\nvar BearerTokenAuthenticationPolicy = /** @class */ (function (_super) {\n tslib.__extends(BearerTokenAuthenticationPolicy, _super);\n /**\n * Creates a new BearerTokenAuthenticationPolicy object.\n *\n * @param nextPolicy - The next RequestPolicy in the request pipeline.\n * @param options - Options for this RequestPolicy.\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n * @param tokenCache - The cache for the most recent AccessToken returned from the TokenCredential.\n */\n function BearerTokenAuthenticationPolicy(nextPolicy, options, tokenCache, tokenRefresher) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.tokenCache = tokenCache;\n _this.tokenRefresher = tokenRefresher;\n return _this;\n }\n /**\n * Applies the Bearer token to the request through the Authorization header.\n */\n BearerTokenAuthenticationPolicy.prototype.sendRequest = function (webResource) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var token;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!webResource.headers)\n webResource.headers = new HttpHeaders();\n return [4 /*yield*/, this.getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n spanOptions: webResource.spanOptions\n }\n })];\n case 1:\n token = _a.sent();\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, \"Bearer \" + token);\n return [2 /*return*/, this._nextPolicy.sendRequest(webResource)];\n }\n });\n });\n };\n /**\n * Attempts a token update if any other time related conditionals have been reached based on the tokenRefresher class.\n */\n BearerTokenAuthenticationPolicy.prototype.updateTokenIfNeeded = function (options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var accessToken;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!this.tokenRefresher.isReady()) return [3 /*break*/, 2];\n return [4 /*yield*/, this.tokenRefresher.refresh(options)];\n case 1:\n accessToken = _a.sent();\n this.tokenCache.setCachedToken(accessToken);\n _a.label = 2;\n case 2: return [2 /*return*/];\n }\n });\n });\n };\n BearerTokenAuthenticationPolicy.prototype.getToken = function (options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var accessToken;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n accessToken = this.tokenCache.getCachedToken();\n if (!(accessToken === undefined)) return [3 /*break*/, 2];\n return [4 /*yield*/, this.tokenRefresher.refresh(options)];\n case 1:\n // Waiting for the next refresh only if the cache is unable to retrieve the access token,\n // which means that it has expired, or it has never been set.\n accessToken = _a.sent();\n this.tokenCache.setCachedToken(accessToken);\n return [3 /*break*/, 3];\n case 2:\n // If we still have a cached access token,\n // And any other time related conditionals have been reached based on the tokenRefresher class,\n // then attempt to refresh without waiting.\n this.updateTokenIfNeeded(options);\n _a.label = 3;\n case 3: return [2 /*return*/, accessToken ? accessToken.token : undefined];\n }\n });\n });\n };\n return BearerTokenAuthenticationPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) {\n return {\n create: function (nextPolicy, options) {\n return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval);\n }\n };\n}\n/**\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nvar SystemErrorRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(SystemErrorRetryPolicy, _super);\n function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n _this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n _this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n return _this;\n }\n SystemErrorRetryPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch(function (error) { return retry$1(_this, request, error.response, error); });\n };\n return SystemErrorRetryPolicy;\n}(BaseRequestPolicy));\nfunction retry$1(policy, request, operationResponse, err, retryData) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n function shouldPolicyRetry(_response, error) {\n if (error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")) {\n return true;\n }\n return false;\n }\n var nestedErr_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n retryData = updateRetryData(policy, retryData, err);\n if (!shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) return [3 /*break*/, 5];\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n return [4 /*yield*/, delay(retryData.retryInterval)];\n case 2:\n _a.sent();\n return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())];\n case 3:\n nestedErr_1 = _a.sent();\n return [2 /*return*/, retry$1(policy, request, operationResponse, nestedErr_1, retryData)];\n case 4: return [3 /*break*/, 6];\n case 5:\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return [2 /*return*/, Promise.reject(retryData.error)];\n }\n return [2 /*return*/, operationResponse];\n case 6: return [2 /*return*/];\n }\n });\n });\n}\n\n// Copyright (c) Microsoft Corporation.\n(function (QueryCollectionFormat) {\n QueryCollectionFormat[\"Csv\"] = \",\";\n QueryCollectionFormat[\"Ssv\"] = \" \";\n QueryCollectionFormat[\"Tsv\"] = \"\\t\";\n QueryCollectionFormat[\"Pipes\"] = \"|\";\n QueryCollectionFormat[\"Multi\"] = \"Multi\";\n})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {}));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * @internal\n */\nvar noProxyList = loadNoProxy();\nvar byPassedList = new Map();\nfunction loadEnvironmentProxyValue() {\n if (!process) {\n return undefined;\n }\n var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n var allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n return httpsProxy || allProxy || httpProxy;\n}\n// Check whether the host of a given `uri` is in the noProxyList.\n// If there's a match, any request sent to the same host won't have the proxy settings set.\n// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\nfunction isBypassed(uri) {\n if (noProxyList.length === 0) {\n return false;\n }\n var host = URLBuilder.parse(uri).getHost();\n if (byPassedList.has(host)) {\n return byPassedList.get(host);\n }\n var isBypassedFlag = false;\n for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) {\n var pattern = noProxyList_1[_i];\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n }\n else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n }\n else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n byPassedList.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n/**\n * @internal\n */\nfunction loadNoProxy() {\n var noProxy = getEnvironmentValue(Constants.NO_PROXY);\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map(function (item) { return item.trim(); })\n .filter(function (item) { return item.length; });\n }\n return [];\n}\nfunction getDefaultProxySettings(proxyUrl) {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth;\n var parsedUrl = URLBuilder.parse(urlWithoutAuth);\n var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username: username,\n password: password\n };\n}\nfunction proxyPolicy(proxySettings) {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n return {\n create: function (nextPolicy, options) {\n return new ProxyPolicy(nextPolicy, options, proxySettings);\n }\n };\n}\nfunction extractAuthFromUrl(url) {\n var atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n var schemeIndex = url.indexOf(\"://\");\n var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n var auth = url.substring(authStart, atIndex);\n var colonIndex = auth.indexOf(\":\");\n var hasPassword = colonIndex !== -1;\n var username = hasPassword ? auth.substring(0, colonIndex) : auth;\n var password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username: username,\n password: password,\n urlWithoutAuth: urlWithoutAuth\n };\n}\nvar ProxyPolicy = /** @class */ (function (_super) {\n tslib.__extends(ProxyPolicy, _super);\n function ProxyPolicy(nextPolicy, options, proxySettings) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.proxySettings = proxySettings;\n return _this;\n }\n ProxyPolicy.prototype.sendRequest = function (request) {\n if (!request.proxySettings && !isBypassed(request.url)) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n };\n return ProxyPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar StatusCodes = Constants.HttpConstants.StatusCodes;\nfunction throttlingRetryPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nvar ThrottlingRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(ThrottlingRetryPolicy, _super);\n function ThrottlingRetryPolicy(nextPolicy, options, _handleResponse) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this._handleResponse = _handleResponse || _this._defaultResponseHandler;\n return _this;\n }\n ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) {\n if (response.status !== StatusCodes.TooManyRequests) {\n return response;\n }\n else {\n return _this._handleResponse(httpRequest, response);\n }\n })];\n });\n });\n };\n ThrottlingRetryPolicy.prototype._defaultResponseHandler = function (httpRequest, httpResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var retryAfterHeader, delayInMs;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER);\n if (retryAfterHeader) {\n delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n return [2 /*return*/, delay(delayInMs).then(function (_) { return _this._nextPolicy.sendRequest(httpRequest); })];\n }\n }\n return [2 /*return*/, httpResponse];\n });\n });\n };\n ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) {\n var retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n }\n else {\n return retryAfterInSeconds * 1000;\n }\n };\n ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) {\n try {\n var now = Date.now();\n var date = Date.parse(headerValue);\n var diff = date - now;\n return Number.isNaN(diff) ? undefined : diff;\n }\n catch (error) {\n return undefined;\n }\n };\n return ThrottlingRetryPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction signingPolicy(authenticationProvider) {\n return {\n create: function (nextPolicy, options) {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n }\n };\n}\nvar SigningPolicy = /** @class */ (function (_super) {\n tslib.__extends(SigningPolicy, _super);\n function SigningPolicy(nextPolicy, options, authenticationProvider) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.authenticationProvider = authenticationProvider;\n return _this;\n }\n SigningPolicy.prototype.signRequest = function (request) {\n return this.authenticationProvider.signRequest(request);\n };\n SigningPolicy.prototype.sendRequest = function (request) {\n var _this = this;\n return this.signRequest(request).then(function (nextRequest) {\n return _this._nextPolicy.sendRequest(nextRequest);\n });\n };\n return SigningPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar DefaultKeepAliveOptions = {\n enable: true\n};\nfunction keepAlivePolicy(keepAliveOptions) {\n return {\n create: function (nextPolicy, options) {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n }\n };\n}\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nvar KeepAlivePolicy = /** @class */ (function (_super) {\n tslib.__extends(KeepAlivePolicy, _super);\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n function KeepAlivePolicy(nextPolicy, options, keepAliveOptions) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.keepAliveOptions = keepAliveOptions;\n return _this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n KeepAlivePolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n request.keepAlive = this.keepAliveOptions.enable;\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return KeepAlivePolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction tracingPolicy(tracingOptions) {\n if (tracingOptions === void 0) { tracingOptions = {}; }\n return {\n create: function (nextPolicy, options) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n }\n };\n}\nvar TracingPolicy = /** @class */ (function (_super) {\n tslib.__extends(TracingPolicy, _super);\n function TracingPolicy(nextPolicy, options, tracingOptions) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.userAgent = tracingOptions.userAgent;\n return _this;\n }\n TracingPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var tracer, spanOptions, path, span, spanContext, traceParentHeader, traceState, response, serviceRequestId, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!request.spanOptions || !request.spanOptions.parent) {\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n }\n tracer = coreTracing.getTracer();\n spanOptions = tslib.__assign(tslib.__assign({}, request.spanOptions), { kind: api.SpanKind.CLIENT });\n path = URLBuilder.parse(request.url).getPath() || \"/\";\n span = tracer.startSpan(path, spanOptions);\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId\n });\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n spanContext = span.context();\n traceParentHeader = coreTracing.getTraceParentHeader(spanContext);\n if (traceParentHeader) {\n request.headers.set(\"traceparent\", traceParentHeader);\n traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return [4 /*yield*/, this._nextPolicy.sendRequest(request)];\n case 2:\n response = _a.sent();\n span.setAttribute(\"http.status_code\", response.status);\n serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.end();\n return [2 /*return*/, response];\n case 3:\n err_1 = _a.sent();\n span.end();\n throw err_1;\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n return TracingPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nfunction disableResponseDecompressionPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nvar DisableResponseDecompressionPolicy = /** @class */ (function (_super) {\n tslib.__extends(DisableResponseDecompressionPolicy, _super);\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n function DisableResponseDecompressionPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n DisableResponseDecompressionPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n request.decompressResponse = false;\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return DisableResponseDecompressionPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nfunction ndJsonPolicy() {\n return {\n create: function (nextPolicy, options) {\n return new NdJsonPolicy(nextPolicy, options);\n }\n };\n}\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nvar NdJsonPolicy = /** @class */ (function (_super) {\n tslib.__extends(NdJsonPolicy, _super);\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n function NdJsonPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends a request.\n */\n NdJsonPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var body;\n return tslib.__generator(this, function (_a) {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map(function (item) { return JSON.stringify(item) + \"\\n\"; }).join(\"\");\n }\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return NdJsonPolicy;\n}(BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation.\nvar cachedHttpClient;\nfunction getCachedDefaultHttpClient() {\n if (!cachedHttpClient) {\n cachedHttpClient = new NodeFetchHttpClient();\n }\n return cachedHttpClient;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * ServiceClient sends service requests and receives responses.\n */\nvar ServiceClient = /** @class */ (function () {\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n function ServiceClient(credentials, \n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options) {\n var _this = this;\n if (!options) {\n options = {};\n }\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n var requestPolicyFactories;\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n }\n else {\n var authPolicyFactory = undefined;\n if (coreAuth.isTokenCredential(credentials)) {\n logger.info(\"ServiceClient: creating bearer token authentication policy from provided credentials\");\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n var wrappedPolicyFactory = function () {\n var bearerTokenPolicyFactory = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n var serviceClient = _this;\n var serviceClientOptions = options;\n return {\n create: function (nextPolicy, createOptions) {\n var credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri);\n if (!credentialScopes) {\n throw new Error(\"When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy\");\n }\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes);\n }\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n }\n };\n };\n authPolicyFactory = wrappedPolicyFactory();\n }\n else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n }\n else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n /**\n * Send the provided httpRequest.\n */\n ServiceClient.prototype.sendRequest = function (options) {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n var httpRequest;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n }\n else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n }\n catch (error) {\n return Promise.reject(error);\n }\n var httpPipeline = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions);\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n };\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) {\n var _a;\n return tslib.__awaiter(this, void 0, void 0, function () {\n var serializerOptions, httpRequest, result, baseUri, requestUrl, _i, _b, urlParameter, urlParameterValue, _c, _d, queryParameter, queryParameterValue, index, item, index, contentType, _e, _f, headerParameter, headerValue, headerCollectionPrefix, _g, _h, key, options, customHeaderName, rawResponse, sendRequestError, error_1, error_2, cb;\n return tslib.__generator(this, function (_j) {\n switch (_j.label) {\n case 0:\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions;\n httpRequest = new WebResource();\n _j.label = 1;\n case 1:\n _j.trys.push([1, 6, , 7]);\n baseUri = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\");\n }\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n requestUrl = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (_i = 0, _b = operationSpec.urlParameters; _i < _b.length; _i++) {\n urlParameter = _b[_i];\n urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer);\n urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions);\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\"{\" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + \"}\", urlParameterValue);\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (_c = 0, _d = operationSpec.queryParameters; _c < _d.length; _c++) {\n queryParameter = _d[_c];\n queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer);\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions);\n if (queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null) {\n if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n }\n else {\n for (index in queryParameterValue) {\n item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n }\n else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (index in queryParameterValue) {\n if (queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n }\n else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue);\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n if (operationSpec.headerParameters) {\n for (_e = 0, _f = operationSpec.headerParameters; _e < _f.length; _e++) {\n headerParameter = _f[_e];\n headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer);\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions);\n headerCollectionPrefix = headerParameter.mapper\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (_g = 0, _h = Object.keys(headerValue); _g < _h.length; _g++) {\n key = _h[_g];\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n }\n else {\n httpRequest.headers.set(headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter), headerValue);\n }\n }\n }\n }\n options = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n if (options.spanOptions) {\n httpRequest.spanOptions = options.spanOptions;\n }\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n httpRequest.withCredentials = this._withCredentials;\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n rawResponse = void 0;\n sendRequestError = void 0;\n _j.label = 2;\n case 2:\n _j.trys.push([2, 4, , 5]);\n return [4 /*yield*/, this.sendRequest(httpRequest)];\n case 3:\n rawResponse = _j.sent();\n return [3 /*break*/, 5];\n case 4:\n error_1 = _j.sent();\n sendRequestError = error_1;\n return [3 /*break*/, 5];\n case 5:\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]);\n }\n result = Promise.reject(sendRequestError);\n }\n else {\n result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status]));\n }\n return [3 /*break*/, 7];\n case 6:\n error_2 = _j.sent();\n result = Promise.reject(error_2);\n return [3 /*break*/, 7];\n case 7:\n cb = callback;\n if (cb) {\n result\n .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); })\n .catch(function (err) { return cb(err); });\n }\n return [2 /*return*/, result];\n }\n });\n });\n };\n return ServiceClient;\n}());\nfunction serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) {\n var _a, _b, _c, _d, _e, _f;\n var serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {};\n var updatedOptions = {\n rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : \"\",\n includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false,\n xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY\n };\n var xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer);\n var bodyMapper = operationSpec.requestBody.mapper;\n var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix;\n var typeName = bodyMapper.type.name;\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody);\n httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions);\n var isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n var xmlnsKey = xmlNamespacePrefix ? \"xmlns:\" + xmlNamespacePrefix : \"xmlns\";\n var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions);\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), {\n rootName: xmlName || serializedName,\n xmlCharKey: xmlCharKey\n });\n }\n else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey: xmlCharKey\n });\n }\n }\n else if (typeName === MapperType.String &&\n (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match(\"text/plain\")) || operationSpec.mediaType === \"text\")) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n }\n else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n }\n catch (error) {\n throw new Error(\"Error \\\"\" + error.message + \"\\\" occurred in serializing the payload - \" + JSON.stringify(serializedName, undefined, \" \") + \".\");\n }\n }\n else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (var _i = 0, _g = operationSpec.formDataParameters; _i < _g.length; _i++) {\n var formDataParameter = _g[_i];\n var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer);\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions);\n }\n }\n }\n}\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) {\n var _a;\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n var result = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = xmlNamespace, _a);\n return result;\n }\n return serializedValue;\n}\nfunction getValueOrFunctionResult(value, defaultValueCreator) {\n var result;\n if (typeof value === \"string\") {\n result = value;\n }\n else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\nfunction createDefaultRequestPolicyFactories(authPolicyFactory, options) {\n var factories = [];\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName);\n var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue);\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n {\n factories.push(proxyPolicy(options.proxySettings));\n }\n factories.push(logPolicy({ logger: logger.info }));\n return factories;\n}\nfunction createPipelineFromOptions(pipelineOptions, authPolicyFactory) {\n var requestPolicyFactories = [];\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n var userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n var userAgentInfo = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n var defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n userAgentValue = userAgentInfo.join(\" \");\n }\n var keepAliveOptions = tslib.__assign(tslib.__assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);\n var retryOptions = tslib.__assign(tslib.__assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);\n var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);\n {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n var deserializationOptions = tslib.__assign(tslib.__assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);\n var loggingOptions = tslib.__assign({}, pipelineOptions.loggingOptions);\n requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs));\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n requestPolicyFactories.push(logPolicy(loggingOptions));\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories: requestPolicyFactories\n };\n}\nfunction getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) {\n return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer);\n}\nfunction getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) {\n var _a;\n var value;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n var serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n }\n else {\n var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath);\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n var useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n // Serialize just for validation purposes.\n var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper);\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n }\n else {\n if (parameterMapper.required) {\n value = {};\n }\n for (var propertyName in parameterPath) {\n var propertyMapper = parameterMapper.type.modelProperties[propertyName];\n var propertyPath = parameterPath[propertyName];\n var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer);\n // Serialize just for validation purposes.\n var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper);\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\nfunction getPropertyFromParameterPath(parent, parameterPath) {\n var result = { propertyFound: false };\n var i = 0;\n for (; i < parameterPath.length; ++i) {\n var parameterPathPart = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n }\n else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\nfunction flattenResponse(_response, responseSpec) {\n var parsedHeaders = _response.parsedHeaders;\n var bodyMapper = responseSpec && responseSpec.bodyMapper;\n var addOperationResponse = function (obj) {\n return Object.defineProperty(obj, \"_response\", {\n value: _response\n });\n };\n if (bodyMapper) {\n var typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody }));\n }\n var modelProperties_1 = (typeName === \"Composite\" && bodyMapper.type.modelProperties) || {};\n var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === \"\"; });\n if (typeName === \"Sequence\" || isPageableResponse) {\n var arrayResponse = tslib.__spreadArrays((_response.parsedBody || []));\n for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) {\n var key = _a[_i];\n if (modelProperties_1[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n if (parsedHeaders) {\n for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) {\n var key = _c[_b];\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody));\n }\n }\n if (bodyMapper ||\n _response.request.method === \"HEAD\" ||\n isPrimitiveType(_response.parsedBody)) {\n // primitive body types and HEAD booleans\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody }));\n }\n return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody));\n}\nfunction getCredentialScopes(options, baseUri) {\n if (options === null || options === void 0 ? void 0 : options.credentialScopes) {\n var scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map(function (scope) { return new url.URL(scope).toString(); })\n : new url.URL(scopes).toString();\n }\n if (baseUri) {\n return baseUri + \"/.default\";\n }\n return undefined;\n}\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Creates a function called createSpan to create spans using the global tracer.\n * @hidden\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nfunction createSpanFunction(_a) {\n var packagePrefix = _a.packagePrefix, namespace = _a.namespace;\n return function (operationName, operationOptions) {\n var tracer = coreTracing.getTracer();\n var tracingOptions = operationOptions.tracingOptions || {};\n var spanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { kind: api.SpanKind.INTERNAL });\n var span = tracer.startSpan(packagePrefix + \".\" + operationName, spanOptions);\n span.setAttribute(\"az.namespace\", namespace);\n var newSpanOptions = tracingOptions.spanOptions || {};\n if (span.isRecording()) {\n newSpanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { parent: span.context(), attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { \"az.namespace\": namespace }) });\n }\n var newTracingOptions = tslib.__assign(tslib.__assign({}, tracingOptions), { spanOptions: newSpanOptions });\n var newOperationOptions = tslib.__assign(tslib.__assign({}, operationOptions), { tracingOptions: newTracingOptions });\n return {\n span: span,\n updatedOptions: newOperationOptions\n };\n };\n}\n\n// Copyright (c) Microsoft Corporation.\nvar HeaderConstants = Constants.HeaderConstants;\nvar DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\nvar BasicAuthenticationCredentials = /** @class */ (function () {\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n function BasicAuthenticationCredentials(userName, password, authorizationScheme) {\n if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; }\n this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME;\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n BasicAuthenticationCredentials.prototype.signRequest = function (webResource) {\n var credentials = this.userName + \":\" + this.password;\n var encodedCredentials = this.authorizationScheme + \" \" + encodeString(credentials);\n if (!webResource.headers)\n webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n };\n return BasicAuthenticationCredentials;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Authenticates to a service using an API key.\n */\nvar ApiKeyCredentials = /** @class */ (function () {\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n function ApiKeyCredentials(options) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\"options cannot be null or undefined. Either \\\"inHeader\\\" or \\\"inQuery\\\" property of the options object needs to be provided.\");\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n ApiKeyCredentials.prototype.signRequest = function (webResource) {\n if (!webResource) {\n return Promise.reject(new Error(\"webResource cannot be null or undefined and must be of type \\\"object\\\".\"));\n }\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (var headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(\"url cannot be null in the request object.\"));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (var key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += key + \"=\" + this.inQuery[key];\n }\n }\n return Promise.resolve(webResource);\n };\n return ApiKeyCredentials;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar TopicCredentials = /** @class */ (function (_super) {\n tslib.__extends(TopicCredentials, _super);\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n function TopicCredentials(topicKey) {\n var _this = this;\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n var options = {\n inHeader: {\n \"aeg-sas-key\": topicKey\n }\n };\n _this = _super.call(this, options) || this;\n return _this;\n }\n return TopicCredentials;\n}(ApiKeyCredentials));\n\nObject.defineProperty(exports, 'isTokenCredential', {\n enumerable: true,\n get: function () {\n return coreAuth.isTokenCredential;\n }\n});\nexports.AccessTokenRefresher = AccessTokenRefresher;\nexports.ApiKeyCredentials = ApiKeyCredentials;\nexports.BaseRequestPolicy = BaseRequestPolicy;\nexports.BasicAuthenticationCredentials = BasicAuthenticationCredentials;\nexports.Constants = Constants;\nexports.DefaultHttpClient = NodeFetchHttpClient;\nexports.ExpiringAccessTokenCache = ExpiringAccessTokenCache;\nexports.HttpHeaders = HttpHeaders;\nexports.MapperType = MapperType;\nexports.RequestPolicyOptions = RequestPolicyOptions;\nexports.RestError = RestError;\nexports.Serializer = Serializer;\nexports.ServiceClient = ServiceClient;\nexports.TopicCredentials = TopicCredentials;\nexports.URLBuilder = URLBuilder;\nexports.URLQuery = URLQuery;\nexports.WebResource = WebResource;\nexports.XML_ATTRKEY = XML_ATTRKEY;\nexports.XML_CHARKEY = XML_CHARKEY;\nexports.applyMixins = applyMixins;\nexports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy;\nexports.createPipelineFromOptions = createPipelineFromOptions;\nexports.createSpanFunction = createSpanFunction;\nexports.delay = delay;\nexports.deserializationPolicy = deserializationPolicy;\nexports.deserializeResponseBody = deserializeResponseBody;\nexports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy;\nexports.encodeUri = encodeUri;\nexports.executePromisesSequentially = executePromisesSequentially;\nexports.exponentialRetryPolicy = exponentialRetryPolicy;\nexports.flattenResponse = flattenResponse;\nexports.generateClientRequestIdPolicy = generateClientRequestIdPolicy;\nexports.generateUuid = generateUuid;\nexports.getDefaultProxySettings = getDefaultProxySettings;\nexports.getDefaultUserAgentValue = getDefaultUserAgentValue;\nexports.isDuration = isDuration;\nexports.isNode = isNode;\nexports.isValidUuid = isValidUuid;\nexports.keepAlivePolicy = keepAlivePolicy;\nexports.logPolicy = logPolicy;\nexports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase;\nexports.parseXML = parseXML;\nexports.promiseToCallback = promiseToCallback;\nexports.promiseToServiceCallback = promiseToServiceCallback;\nexports.proxyPolicy = proxyPolicy;\nexports.redirectPolicy = redirectPolicy;\nexports.serializeObject = serializeObject;\nexports.signingPolicy = signingPolicy;\nexports.stringifyXML = stringifyXML;\nexports.stripRequest = stripRequest;\nexports.stripResponse = stripResponse;\nexports.systemErrorRetryPolicy = systemErrorRetryPolicy;\nexports.throttlingRetryPolicy = throttlingRetryPolicy;\nexports.tracingPolicy = tracingPolicy;\nexports.userAgentPolicy = userAgentPolicy;\n//# sourceMappingURL=index.js.map\n","var CombinedStream = require('combined-stream');\nvar util = require('util');\nvar path = require('path');\nvar http = require('http');\nvar https = require('https');\nvar parseUrl = require('url').parse;\nvar fs = require('fs');\nvar mime = require('mime-types');\nvar asynckit = require('asynckit');\nvar populate = require('./populate.js');\n\n// Public API\nmodule.exports = FormData;\n\n// make it a Stream\nutil.inherits(FormData, CombinedStream);\n\n/**\n * Create readable \"multipart/form-data\" streams.\n * Can be used to submit forms\n * and file uploads to other web applications.\n *\n * @constructor\n * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream\n */\nfunction FormData(options) {\n if (!(this instanceof FormData)) {\n return new FormData(options);\n }\n\n this._overheadLength = 0;\n this._valueLength = 0;\n this._valuesToMeasure = [];\n\n CombinedStream.call(this);\n\n options = options || {};\n for (var option in options) {\n this[option] = options[option];\n }\n}\n\nFormData.LINE_BREAK = '\\r\\n';\nFormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';\n\nFormData.prototype.append = function(field, value, options) {\n\n options = options || {};\n\n // allow filename as single option\n if (typeof options == 'string') {\n options = {filename: options};\n }\n\n var append = CombinedStream.prototype.append.bind(this);\n\n // all that streamy business can't handle numbers\n if (typeof value == 'number') {\n value = '' + value;\n }\n\n // https://github.com/felixge/node-form-data/issues/38\n if (util.isArray(value)) {\n // Please convert your array into string\n // the way web server expects it\n this._error(new Error('Arrays are not supported.'));\n return;\n }\n\n var header = this._multiPartHeader(field, value, options);\n var footer = this._multiPartFooter();\n\n append(header);\n append(value);\n append(footer);\n\n // pass along options.knownLength\n this._trackLength(header, value, options);\n};\n\nFormData.prototype._trackLength = function(header, value, options) {\n var valueLength = 0;\n\n // used w/ getLengthSync(), when length is known.\n // e.g. for streaming directly from a remote server,\n // w/ a known file a size, and not wanting to wait for\n // incoming file to finish to get its size.\n if (options.knownLength != null) {\n valueLength += +options.knownLength;\n } else if (Buffer.isBuffer(value)) {\n valueLength = value.length;\n } else if (typeof value === 'string') {\n valueLength = Buffer.byteLength(value);\n }\n\n this._valueLength += valueLength;\n\n // @check why add CRLF? does this account for custom/multiple CRLFs?\n this._overheadLength +=\n Buffer.byteLength(header) +\n FormData.LINE_BREAK.length;\n\n // empty or either doesn't have path or not an http response\n if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) {\n return;\n }\n\n // no need to bother with the length\n if (!options.knownLength) {\n this._valuesToMeasure.push(value);\n }\n};\n\nFormData.prototype._lengthRetriever = function(value, callback) {\n\n if (value.hasOwnProperty('fd')) {\n\n // take read range into a account\n // `end` = Infinity –> read file till the end\n //\n // TODO: Looks like there is bug in Node fs.createReadStream\n // it doesn't respect `end` options without `start` options\n // Fix it when node fixes it.\n // https://github.com/joyent/node/issues/7819\n if (value.end != undefined && value.end != Infinity && value.start != undefined) {\n\n // when end specified\n // no need to calculate range\n // inclusive, starts with 0\n callback(null, value.end + 1 - (value.start ? value.start : 0));\n\n // not that fast snoopy\n } else {\n // still need to fetch file size from fs\n fs.stat(value.path, function(err, stat) {\n\n var fileSize;\n\n if (err) {\n callback(err);\n return;\n }\n\n // update final size based on the range options\n fileSize = stat.size - (value.start ? value.start : 0);\n callback(null, fileSize);\n });\n }\n\n // or http response\n } else if (value.hasOwnProperty('httpVersion')) {\n callback(null, +value.headers['content-length']);\n\n // or request stream http://github.com/mikeal/request\n } else if (value.hasOwnProperty('httpModule')) {\n // wait till response come back\n value.on('response', function(response) {\n value.pause();\n callback(null, +response.headers['content-length']);\n });\n value.resume();\n\n // something else\n } else {\n callback('Unknown stream');\n }\n};\n\nFormData.prototype._multiPartHeader = function(field, value, options) {\n // custom header specified (as string)?\n // it becomes responsible for boundary\n // (e.g. to handle extra CRLFs on .NET servers)\n if (typeof options.header == 'string') {\n return options.header;\n }\n\n var contentDisposition = this._getContentDisposition(value, options);\n var contentType = this._getContentType(value, options);\n\n var contents = '';\n var headers = {\n // add custom disposition as third element or keep it two elements if not\n 'Content-Disposition': ['form-data', 'name=\"' + field + '\"'].concat(contentDisposition || []),\n // if no content type. allow it to be empty array\n 'Content-Type': [].concat(contentType || [])\n };\n\n // allow custom headers.\n if (typeof options.header == 'object') {\n populate(headers, options.header);\n }\n\n var header;\n for (var prop in headers) {\n if (!headers.hasOwnProperty(prop)) continue;\n header = headers[prop];\n\n // skip nullish headers.\n if (header == null) {\n continue;\n }\n\n // convert all headers to arrays.\n if (!Array.isArray(header)) {\n header = [header];\n }\n\n // add non-empty headers.\n if (header.length) {\n contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK;\n }\n }\n\n return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;\n};\n\nFormData.prototype._getContentDisposition = function(value, options) {\n\n var filename\n , contentDisposition\n ;\n\n if (typeof options.filepath === 'string') {\n // custom filepath for relative paths\n filename = path.normalize(options.filepath).replace(/\\\\/g, '/');\n } else if (options.filename || value.name || value.path) {\n // custom filename take precedence\n // formidable and the browser add a name property\n // fs- and request- streams have path property\n filename = path.basename(options.filename || value.name || value.path);\n } else if (value.readable && value.hasOwnProperty('httpVersion')) {\n // or try http response\n filename = path.basename(value.client._httpMessage.path || '');\n }\n\n if (filename) {\n contentDisposition = 'filename=\"' + filename + '\"';\n }\n\n return contentDisposition;\n};\n\nFormData.prototype._getContentType = function(value, options) {\n\n // use custom content-type above all\n var contentType = options.contentType;\n\n // or try `name` from formidable, browser\n if (!contentType && value.name) {\n contentType = mime.lookup(value.name);\n }\n\n // or try `path` from fs-, request- streams\n if (!contentType && value.path) {\n contentType = mime.lookup(value.path);\n }\n\n // or if it's http-reponse\n if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {\n contentType = value.headers['content-type'];\n }\n\n // or guess it from the filepath or filename\n if (!contentType && (options.filepath || options.filename)) {\n contentType = mime.lookup(options.filepath || options.filename);\n }\n\n // fallback to the default content type if `value` is not simple value\n if (!contentType && typeof value == 'object') {\n contentType = FormData.DEFAULT_CONTENT_TYPE;\n }\n\n return contentType;\n};\n\nFormData.prototype._multiPartFooter = function() {\n return function(next) {\n var footer = FormData.LINE_BREAK;\n\n var lastPart = (this._streams.length === 0);\n if (lastPart) {\n footer += this._lastBoundary();\n }\n\n next(footer);\n }.bind(this);\n};\n\nFormData.prototype._lastBoundary = function() {\n return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;\n};\n\nFormData.prototype.getHeaders = function(userHeaders) {\n var header;\n var formHeaders = {\n 'content-type': 'multipart/form-data; boundary=' + this.getBoundary()\n };\n\n for (header in userHeaders) {\n if (userHeaders.hasOwnProperty(header)) {\n formHeaders[header.toLowerCase()] = userHeaders[header];\n }\n }\n\n return formHeaders;\n};\n\nFormData.prototype.setBoundary = function(boundary) {\n this._boundary = boundary;\n};\n\nFormData.prototype.getBoundary = function() {\n if (!this._boundary) {\n this._generateBoundary();\n }\n\n return this._boundary;\n};\n\nFormData.prototype.getBuffer = function() {\n var dataBuffer = new Buffer.alloc( 0 );\n var boundary = this.getBoundary();\n\n // Create the form content. Add Line breaks to the end of data.\n for (var i = 0, len = this._streams.length; i < len; i++) {\n if (typeof this._streams[i] !== 'function') {\n\n // Add content to the buffer.\n if(Buffer.isBuffer(this._streams[i])) {\n dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]);\n }else {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]);\n }\n\n // Add break after content.\n if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] );\n }\n }\n }\n\n // Add the footer and return the Buffer object.\n return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] );\n};\n\nFormData.prototype._generateBoundary = function() {\n // This generates a 50 character boundary similar to those used by Firefox.\n // They are optimized for boyer-moore parsing.\n var boundary = '--------------------------';\n for (var i = 0; i < 24; i++) {\n boundary += Math.floor(Math.random() * 10).toString(16);\n }\n\n this._boundary = boundary;\n};\n\n// Note: getLengthSync DOESN'T calculate streams length\n// As workaround one can calculate file size manually\n// and add it as knownLength option\nFormData.prototype.getLengthSync = function() {\n var knownLength = this._overheadLength + this._valueLength;\n\n // Don't get confused, there are 3 \"internal\" streams for each keyval pair\n // so it basically checks if there is any value added to the form\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n // https://github.com/form-data/form-data/issues/40\n if (!this.hasKnownLength()) {\n // Some async length retrievers are present\n // therefore synchronous length calculation is false.\n // Please use getLength(callback) to get proper length\n this._error(new Error('Cannot calculate proper length in synchronous way.'));\n }\n\n return knownLength;\n};\n\n// Public API to check if length of added values is known\n// https://github.com/form-data/form-data/issues/196\n// https://github.com/form-data/form-data/issues/262\nFormData.prototype.hasKnownLength = function() {\n var hasKnownLength = true;\n\n if (this._valuesToMeasure.length) {\n hasKnownLength = false;\n }\n\n return hasKnownLength;\n};\n\nFormData.prototype.getLength = function(cb) {\n var knownLength = this._overheadLength + this._valueLength;\n\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n if (!this._valuesToMeasure.length) {\n process.nextTick(cb.bind(this, null, knownLength));\n return;\n }\n\n asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) {\n if (err) {\n cb(err);\n return;\n }\n\n values.forEach(function(length) {\n knownLength += length;\n });\n\n cb(null, knownLength);\n });\n};\n\nFormData.prototype.submit = function(params, cb) {\n var request\n , options\n , defaults = {method: 'post'}\n ;\n\n // parse provided url if it's string\n // or treat it as options object\n if (typeof params == 'string') {\n\n params = parseUrl(params);\n options = populate({\n port: params.port,\n path: params.pathname,\n host: params.hostname,\n protocol: params.protocol\n }, defaults);\n\n // use custom params\n } else {\n\n options = populate(params, defaults);\n // if no port provided use default one\n if (!options.port) {\n options.port = options.protocol == 'https:' ? 443 : 80;\n }\n }\n\n // put that good code in getHeaders to some use\n options.headers = this.getHeaders(params.headers);\n\n // https if specified, fallback to http in any other case\n if (options.protocol == 'https:') {\n request = https.request(options);\n } else {\n request = http.request(options);\n }\n\n // get content length and fire away\n this.getLength(function(err, length) {\n if (err) {\n this._error(err);\n return;\n }\n\n // add content length\n request.setHeader('Content-Length', length);\n\n this.pipe(request);\n if (cb) {\n var onResponse;\n\n var callback = function (error, responce) {\n request.removeListener('error', callback);\n request.removeListener('response', onResponse);\n\n return cb.call(this, error, responce);\n };\n\n onResponse = callback.bind(this, null);\n\n request.on('error', callback);\n request.on('response', onResponse);\n }\n }.bind(this));\n\n return request;\n};\n\nFormData.prototype._error = function(err) {\n if (!this.error) {\n this.error = err;\n this.pause();\n this.emit('error', err);\n }\n};\n\nFormData.prototype.toString = function () {\n return '[object FormData]';\n};\n","// populates missing values\nmodule.exports = function(dst, src) {\n\n Object.keys(src).forEach(function(prop)\n {\n dst[prop] = dst[prop] || src[prop];\n });\n\n return dst;\n};\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst punycode = require(\"punycode\");\nconst urlParse = require(\"url\").parse;\nconst util = require(\"util\");\nconst pubsuffix = require(\"./pubsuffix-psl\");\nconst Store = require(\"./store\").Store;\nconst MemoryCookieStore = require(\"./memstore\").MemoryCookieStore;\nconst pathMatch = require(\"./pathMatch\").pathMatch;\nconst VERSION = require(\"./version\");\nconst { fromCallback } = require(\"universalify\");\n\n// From RFC6265 S4.1.1\n// note that it excludes \\x3B \";\"\nconst COOKIE_OCTETS = /^[\\x21\\x23-\\x2B\\x2D-\\x3A\\x3C-\\x5B\\x5D-\\x7E]+$/;\n\nconst CONTROL_CHARS = /[\\x00-\\x1F]/;\n\n// From Chromium // '\\r', '\\n' and '\\0' should be treated as a terminator in\n// the \"relaxed\" mode, see:\n// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60\nconst TERMINATORS = [\"\\n\", \"\\r\", \"\\0\"];\n\n// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or \";\"'\n// Note ';' is \\x3B\nconst PATH_VALUE = /[\\x20-\\x3A\\x3C-\\x7E]+/;\n\n// date-time parsing constants (RFC6265 S5.1.1)\n\nconst DATE_DELIM = /[\\x09\\x20-\\x2F\\x3B-\\x40\\x5B-\\x60\\x7B-\\x7E]/;\n\nconst MONTH_TO_NUM = {\n jan: 0,\n feb: 1,\n mar: 2,\n apr: 3,\n may: 4,\n jun: 5,\n jul: 6,\n aug: 7,\n sep: 8,\n oct: 9,\n nov: 10,\n dec: 11\n};\n\nconst MAX_TIME = 2147483647000; // 31-bit max\nconst MIN_TIME = 0; // 31-bit min\nconst SAME_SITE_CONTEXT_VAL_ERR =\n 'Invalid sameSiteContext option for getCookies(); expected one of \"strict\", \"lax\", or \"none\"';\n\nfunction checkSameSiteContext(value) {\n const context = String(value).toLowerCase();\n if (context === \"none\" || context === \"lax\" || context === \"strict\") {\n return context;\n } else {\n return null;\n }\n}\n\nconst PrefixSecurityEnum = Object.freeze({\n SILENT: \"silent\",\n STRICT: \"strict\",\n DISABLED: \"unsafe-disabled\"\n});\n\n// Dumped from ip-regex@4.0.0, with the following changes:\n// * all capturing groups converted to non-capturing -- \"(?:)\"\n// * support for IPv6 Scoped Literal (\"%eth1\") removed\n// * lowercase hexadecimal only\nvar IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$)|(?:^(?:(?:[a-f\\d]{1,4}:){7}(?:[a-f\\d]{1,4}|:)|(?:[a-f\\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|:[a-f\\d]{1,4}|:)|(?:[a-f\\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,2}|:)|(?:[a-f\\d]{1,4}:){4}(?:(?::[a-f\\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,3}|:)|(?:[a-f\\d]{1,4}:){3}(?:(?::[a-f\\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,4}|:)|(?:[a-f\\d]{1,4}:){2}(?:(?::[a-f\\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,5}|:)|(?:[a-f\\d]{1,4}:){1}(?:(?::[a-f\\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}|(?::[a-f\\d]{1,4}){1,7}|:)))$)/;\n\n/*\n * Parses a Natural number (i.e., non-negative integer) with either the\n * *DIGIT ( non-digit *OCTET )\n * or\n * *DIGIT\n * grammar (RFC6265 S5.1.1).\n *\n * The \"trailingOK\" boolean controls if the grammar accepts a\n * \"( non-digit *OCTET )\" trailer.\n */\nfunction parseDigits(token, minDigits, maxDigits, trailingOK) {\n let count = 0;\n while (count < token.length) {\n const c = token.charCodeAt(count);\n // \"non-digit = %x00-2F / %x3A-FF\"\n if (c <= 0x2f || c >= 0x3a) {\n break;\n }\n count++;\n }\n\n // constrain to a minimum and maximum number of digits.\n if (count < minDigits || count > maxDigits) {\n return null;\n }\n\n if (!trailingOK && count != token.length) {\n return null;\n }\n\n return parseInt(token.substr(0, count), 10);\n}\n\nfunction parseTime(token) {\n const parts = token.split(\":\");\n const result = [0, 0, 0];\n\n /* RF6256 S5.1.1:\n * time = hms-time ( non-digit *OCTET )\n * hms-time = time-field \":\" time-field \":\" time-field\n * time-field = 1*2DIGIT\n */\n\n if (parts.length !== 3) {\n return null;\n }\n\n for (let i = 0; i < 3; i++) {\n // \"time-field\" must be strictly \"1*2DIGIT\", HOWEVER, \"hms-time\" can be\n // followed by \"( non-digit *OCTET )\" so therefore the last time-field can\n // have a trailer\n const trailingOK = i == 2;\n const num = parseDigits(parts[i], 1, 2, trailingOK);\n if (num === null) {\n return null;\n }\n result[i] = num;\n }\n\n return result;\n}\n\nfunction parseMonth(token) {\n token = String(token)\n .substr(0, 3)\n .toLowerCase();\n const num = MONTH_TO_NUM[token];\n return num >= 0 ? num : null;\n}\n\n/*\n * RFC6265 S5.1.1 date parser (see RFC for full grammar)\n */\nfunction parseDate(str) {\n if (!str) {\n return;\n }\n\n /* RFC6265 S5.1.1:\n * 2. Process each date-token sequentially in the order the date-tokens\n * appear in the cookie-date\n */\n const tokens = str.split(DATE_DELIM);\n if (!tokens) {\n return;\n }\n\n let hour = null;\n let minute = null;\n let second = null;\n let dayOfMonth = null;\n let month = null;\n let year = null;\n\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i].trim();\n if (!token.length) {\n continue;\n }\n\n let result;\n\n /* 2.1. If the found-time flag is not set and the token matches the time\n * production, set the found-time flag and set the hour- value,\n * minute-value, and second-value to the numbers denoted by the digits in\n * the date-token, respectively. Skip the remaining sub-steps and continue\n * to the next date-token.\n */\n if (second === null) {\n result = parseTime(token);\n if (result) {\n hour = result[0];\n minute = result[1];\n second = result[2];\n continue;\n }\n }\n\n /* 2.2. If the found-day-of-month flag is not set and the date-token matches\n * the day-of-month production, set the found-day-of- month flag and set\n * the day-of-month-value to the number denoted by the date-token. Skip\n * the remaining sub-steps and continue to the next date-token.\n */\n if (dayOfMonth === null) {\n // \"day-of-month = 1*2DIGIT ( non-digit *OCTET )\"\n result = parseDigits(token, 1, 2, true);\n if (result !== null) {\n dayOfMonth = result;\n continue;\n }\n }\n\n /* 2.3. If the found-month flag is not set and the date-token matches the\n * month production, set the found-month flag and set the month-value to\n * the month denoted by the date-token. Skip the remaining sub-steps and\n * continue to the next date-token.\n */\n if (month === null) {\n result = parseMonth(token);\n if (result !== null) {\n month = result;\n continue;\n }\n }\n\n /* 2.4. If the found-year flag is not set and the date-token matches the\n * year production, set the found-year flag and set the year-value to the\n * number denoted by the date-token. Skip the remaining sub-steps and\n * continue to the next date-token.\n */\n if (year === null) {\n // \"year = 2*4DIGIT ( non-digit *OCTET )\"\n result = parseDigits(token, 2, 4, true);\n if (result !== null) {\n year = result;\n /* From S5.1.1:\n * 3. If the year-value is greater than or equal to 70 and less\n * than or equal to 99, increment the year-value by 1900.\n * 4. If the year-value is greater than or equal to 0 and less\n * than or equal to 69, increment the year-value by 2000.\n */\n if (year >= 70 && year <= 99) {\n year += 1900;\n } else if (year >= 0 && year <= 69) {\n year += 2000;\n }\n }\n }\n }\n\n /* RFC 6265 S5.1.1\n * \"5. Abort these steps and fail to parse the cookie-date if:\n * * at least one of the found-day-of-month, found-month, found-\n * year, or found-time flags is not set,\n * * the day-of-month-value is less than 1 or greater than 31,\n * * the year-value is less than 1601,\n * * the hour-value is greater than 23,\n * * the minute-value is greater than 59, or\n * * the second-value is greater than 59.\n * (Note that leap seconds cannot be represented in this syntax.)\"\n *\n * So, in order as above:\n */\n if (\n dayOfMonth === null ||\n month === null ||\n year === null ||\n second === null ||\n dayOfMonth < 1 ||\n dayOfMonth > 31 ||\n year < 1601 ||\n hour > 23 ||\n minute > 59 ||\n second > 59\n ) {\n return;\n }\n\n return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second));\n}\n\nfunction formatDate(date) {\n return date.toUTCString();\n}\n\n// S5.1.2 Canonicalized Host Names\nfunction canonicalDomain(str) {\n if (str == null) {\n return null;\n }\n str = str.trim().replace(/^\\./, \"\"); // S4.1.2.3 & S5.2.3: ignore leading .\n\n // convert to IDN if any non-ASCII characters\n if (punycode && /[^\\u0001-\\u007f]/.test(str)) {\n str = punycode.toASCII(str);\n }\n\n return str.toLowerCase();\n}\n\n// S5.1.3 Domain Matching\nfunction domainMatch(str, domStr, canonicalize) {\n if (str == null || domStr == null) {\n return null;\n }\n if (canonicalize !== false) {\n str = canonicalDomain(str);\n domStr = canonicalDomain(domStr);\n }\n\n /*\n * S5.1.3:\n * \"A string domain-matches a given domain string if at least one of the\n * following conditions hold:\"\n *\n * \" o The domain string and the string are identical. (Note that both the\n * domain string and the string will have been canonicalized to lower case at\n * this point)\"\n */\n if (str == domStr) {\n return true;\n }\n\n /* \" o All of the following [three] conditions hold:\" */\n\n /* \"* The domain string is a suffix of the string\" */\n const idx = str.indexOf(domStr);\n if (idx <= 0) {\n return false; // it's a non-match (-1) or prefix (0)\n }\n\n // next, check it's a proper suffix\n // e.g., \"a.b.c\".indexOf(\"b.c\") === 2\n // 5 === 3+2\n if (str.length !== domStr.length + idx) {\n return false; // it's not a suffix\n }\n\n /* \" * The last character of the string that is not included in the\n * domain string is a %x2E (\".\") character.\" */\n if (str.substr(idx-1,1) !== '.') {\n return false; // doesn't align on \".\"\n }\n\n /* \" * The string is a host name (i.e., not an IP address).\" */\n if (IP_REGEX_LOWERCASE.test(str)) {\n return false; // it's an IP address\n }\n\n return true;\n}\n\n// RFC6265 S5.1.4 Paths and Path-Match\n\n/*\n * \"The user agent MUST use an algorithm equivalent to the following algorithm\n * to compute the default-path of a cookie:\"\n *\n * Assumption: the path (and not query part or absolute uri) is passed in.\n */\nfunction defaultPath(path) {\n // \"2. If the uri-path is empty or if the first character of the uri-path is not\n // a %x2F (\"/\") character, output %x2F (\"/\") and skip the remaining steps.\n if (!path || path.substr(0, 1) !== \"/\") {\n return \"/\";\n }\n\n // \"3. If the uri-path contains no more than one %x2F (\"/\") character, output\n // %x2F (\"/\") and skip the remaining step.\"\n if (path === \"/\") {\n return path;\n }\n\n const rightSlash = path.lastIndexOf(\"/\");\n if (rightSlash === 0) {\n return \"/\";\n }\n\n // \"4. Output the characters of the uri-path from the first character up to,\n // but not including, the right-most %x2F (\"/\").\"\n return path.slice(0, rightSlash);\n}\n\nfunction trimTerminator(str) {\n for (let t = 0; t < TERMINATORS.length; t++) {\n const terminatorIdx = str.indexOf(TERMINATORS[t]);\n if (terminatorIdx !== -1) {\n str = str.substr(0, terminatorIdx);\n }\n }\n\n return str;\n}\n\nfunction parseCookiePair(cookiePair, looseMode) {\n cookiePair = trimTerminator(cookiePair);\n\n let firstEq = cookiePair.indexOf(\"=\");\n if (looseMode) {\n if (firstEq === 0) {\n // '=' is immediately at start\n cookiePair = cookiePair.substr(1);\n firstEq = cookiePair.indexOf(\"=\"); // might still need to split on '='\n }\n } else {\n // non-loose mode\n if (firstEq <= 0) {\n // no '=' or is at start\n return; // needs to have non-empty \"cookie-name\"\n }\n }\n\n let cookieName, cookieValue;\n if (firstEq <= 0) {\n cookieName = \"\";\n cookieValue = cookiePair.trim();\n } else {\n cookieName = cookiePair.substr(0, firstEq).trim();\n cookieValue = cookiePair.substr(firstEq + 1).trim();\n }\n\n if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) {\n return;\n }\n\n const c = new Cookie();\n c.key = cookieName;\n c.value = cookieValue;\n return c;\n}\n\nfunction parse(str, options) {\n if (!options || typeof options !== \"object\") {\n options = {};\n }\n str = str.trim();\n\n // We use a regex to parse the \"name-value-pair\" part of S5.2\n const firstSemi = str.indexOf(\";\"); // S5.2 step 1\n const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi);\n const c = parseCookiePair(cookiePair, !!options.loose);\n if (!c) {\n return;\n }\n\n if (firstSemi === -1) {\n return c;\n }\n\n // S5.2.3 \"unparsed-attributes consist of the remainder of the set-cookie-string\n // (including the %x3B (\";\") in question).\" plus later on in the same section\n // \"discard the first \";\" and trim\".\n const unparsed = str.slice(firstSemi + 1).trim();\n\n // \"If the unparsed-attributes string is empty, skip the rest of these\n // steps.\"\n if (unparsed.length === 0) {\n return c;\n }\n\n /*\n * S5.2 says that when looping over the items \"[p]rocess the attribute-name\n * and attribute-value according to the requirements in the following\n * subsections\" for every item. Plus, for many of the individual attributes\n * in S5.3 it says to use the \"attribute-value of the last attribute in the\n * cookie-attribute-list\". Therefore, in this implementation, we overwrite\n * the previous value.\n */\n const cookie_avs = unparsed.split(\";\");\n while (cookie_avs.length) {\n const av = cookie_avs.shift().trim();\n if (av.length === 0) {\n // happens if \";;\" appears\n continue;\n }\n const av_sep = av.indexOf(\"=\");\n let av_key, av_value;\n\n if (av_sep === -1) {\n av_key = av;\n av_value = null;\n } else {\n av_key = av.substr(0, av_sep);\n av_value = av.substr(av_sep + 1);\n }\n\n av_key = av_key.trim().toLowerCase();\n\n if (av_value) {\n av_value = av_value.trim();\n }\n\n switch (av_key) {\n case \"expires\": // S5.2.1\n if (av_value) {\n const exp = parseDate(av_value);\n // \"If the attribute-value failed to parse as a cookie date, ignore the\n // cookie-av.\"\n if (exp) {\n // over and underflow not realistically a concern: V8's getTime() seems to\n // store something larger than a 32-bit time_t (even with 32-bit node)\n c.expires = exp;\n }\n }\n break;\n\n case \"max-age\": // S5.2.2\n if (av_value) {\n // \"If the first character of the attribute-value is not a DIGIT or a \"-\"\n // character ...[or]... If the remainder of attribute-value contains a\n // non-DIGIT character, ignore the cookie-av.\"\n if (/^-?[0-9]+$/.test(av_value)) {\n const delta = parseInt(av_value, 10);\n // \"If delta-seconds is less than or equal to zero (0), let expiry-time\n // be the earliest representable date and time.\"\n c.setMaxAge(delta);\n }\n }\n break;\n\n case \"domain\": // S5.2.3\n // \"If the attribute-value is empty, the behavior is undefined. However,\n // the user agent SHOULD ignore the cookie-av entirely.\"\n if (av_value) {\n // S5.2.3 \"Let cookie-domain be the attribute-value without the leading %x2E\n // (\".\") character.\"\n const domain = av_value.trim().replace(/^\\./, \"\");\n if (domain) {\n // \"Convert the cookie-domain to lower case.\"\n c.domain = domain.toLowerCase();\n }\n }\n break;\n\n case \"path\": // S5.2.4\n /*\n * \"If the attribute-value is empty or if the first character of the\n * attribute-value is not %x2F (\"/\"):\n * Let cookie-path be the default-path.\n * Otherwise:\n * Let cookie-path be the attribute-value.\"\n *\n * We'll represent the default-path as null since it depends on the\n * context of the parsing.\n */\n c.path = av_value && av_value[0] === \"/\" ? av_value : null;\n break;\n\n case \"secure\": // S5.2.5\n /*\n * \"If the attribute-name case-insensitively matches the string \"Secure\",\n * the user agent MUST append an attribute to the cookie-attribute-list\n * with an attribute-name of Secure and an empty attribute-value.\"\n */\n c.secure = true;\n break;\n\n case \"httponly\": // S5.2.6 -- effectively the same as 'secure'\n c.httpOnly = true;\n break;\n\n case \"samesite\": // RFC6265bis-02 S5.3.7\n const enforcement = av_value ? av_value.toLowerCase() : \"\";\n switch (enforcement) {\n case \"strict\":\n c.sameSite = \"strict\";\n break;\n case \"lax\":\n c.sameSite = \"lax\";\n break;\n default:\n // RFC6265bis-02 S5.3.7 step 1:\n // \"If cookie-av's attribute-value is not a case-insensitive match\n // for \"Strict\" or \"Lax\", ignore the \"cookie-av\".\"\n // This effectively sets it to 'none' from the prototype.\n break;\n }\n break;\n\n default:\n c.extensions = c.extensions || [];\n c.extensions.push(av);\n break;\n }\n }\n\n return c;\n}\n\n/**\n * If the cookie-name begins with a case-sensitive match for the\n * string \"__Secure-\", abort these steps and ignore the cookie\n * entirely unless the cookie's secure-only-flag is true.\n * @param cookie\n * @returns boolean\n */\nfunction isSecurePrefixConditionMet(cookie) {\n return !cookie.key.startsWith(\"__Secure-\") || cookie.secure;\n}\n\n/**\n * If the cookie-name begins with a case-sensitive match for the\n * string \"__Host-\", abort these steps and ignore the cookie\n * entirely unless the cookie meets all the following criteria:\n * 1. The cookie's secure-only-flag is true.\n * 2. The cookie's host-only-flag is true.\n * 3. The cookie-attribute-list contains an attribute with an\n * attribute-name of \"Path\", and the cookie's path is \"/\".\n * @param cookie\n * @returns boolean\n */\nfunction isHostPrefixConditionMet(cookie) {\n return (\n !cookie.key.startsWith(\"__Host-\") ||\n (cookie.secure &&\n cookie.hostOnly &&\n cookie.path != null &&\n cookie.path === \"/\")\n );\n}\n\n// avoid the V8 deoptimization monster!\nfunction jsonParse(str) {\n let obj;\n try {\n obj = JSON.parse(str);\n } catch (e) {\n return e;\n }\n return obj;\n}\n\nfunction fromJSON(str) {\n if (!str) {\n return null;\n }\n\n let obj;\n if (typeof str === \"string\") {\n obj = jsonParse(str);\n if (obj instanceof Error) {\n return null;\n }\n } else {\n // assume it's an Object\n obj = str;\n }\n\n const c = new Cookie();\n for (let i = 0; i < Cookie.serializableProperties.length; i++) {\n const prop = Cookie.serializableProperties[i];\n if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) {\n continue; // leave as prototype default\n }\n\n if (prop === \"expires\" || prop === \"creation\" || prop === \"lastAccessed\") {\n if (obj[prop] === null) {\n c[prop] = null;\n } else {\n c[prop] = obj[prop] == \"Infinity\" ? \"Infinity\" : new Date(obj[prop]);\n }\n } else {\n c[prop] = obj[prop];\n }\n }\n\n return c;\n}\n\n/* Section 5.4 part 2:\n * \"* Cookies with longer paths are listed before cookies with\n * shorter paths.\n *\n * * Among cookies that have equal-length path fields, cookies with\n * earlier creation-times are listed before cookies with later\n * creation-times.\"\n */\n\nfunction cookieCompare(a, b) {\n let cmp = 0;\n\n // descending for length: b CMP a\n const aPathLen = a.path ? a.path.length : 0;\n const bPathLen = b.path ? b.path.length : 0;\n cmp = bPathLen - aPathLen;\n if (cmp !== 0) {\n return cmp;\n }\n\n // ascending for time: a CMP b\n const aTime = a.creation ? a.creation.getTime() : MAX_TIME;\n const bTime = b.creation ? b.creation.getTime() : MAX_TIME;\n cmp = aTime - bTime;\n if (cmp !== 0) {\n return cmp;\n }\n\n // break ties for the same millisecond (precision of JavaScript's clock)\n cmp = a.creationIndex - b.creationIndex;\n\n return cmp;\n}\n\n// Gives the permutation of all possible pathMatch()es of a given path. The\n// array is in longest-to-shortest order. Handy for indexing.\nfunction permutePath(path) {\n if (path === \"/\") {\n return [\"/\"];\n }\n const permutations = [path];\n while (path.length > 1) {\n const lindex = path.lastIndexOf(\"/\");\n if (lindex === 0) {\n break;\n }\n path = path.substr(0, lindex);\n permutations.push(path);\n }\n permutations.push(\"/\");\n return permutations;\n}\n\nfunction getCookieContext(url) {\n if (url instanceof Object) {\n return url;\n }\n // NOTE: decodeURI will throw on malformed URIs (see GH-32).\n // Therefore, we will just skip decoding for such URIs.\n try {\n url = decodeURI(url);\n } catch (err) {\n // Silently swallow error\n }\n\n return urlParse(url);\n}\n\nconst cookieDefaults = {\n // the order in which the RFC has them:\n key: \"\",\n value: \"\",\n expires: \"Infinity\",\n maxAge: null,\n domain: null,\n path: null,\n secure: false,\n httpOnly: false,\n extensions: null,\n // set by the CookieJar:\n hostOnly: null,\n pathIsDefault: null,\n creation: null,\n lastAccessed: null,\n sameSite: \"none\"\n};\n\nclass Cookie {\n constructor(options = {}) {\n if (util.inspect.custom) {\n this[util.inspect.custom] = this.inspect;\n }\n\n Object.assign(this, cookieDefaults, options);\n this.creation = this.creation || new Date();\n\n // used to break creation ties in cookieCompare():\n Object.defineProperty(this, \"creationIndex\", {\n configurable: false,\n enumerable: false, // important for assert.deepEqual checks\n writable: true,\n value: ++Cookie.cookiesCreated\n });\n }\n\n inspect() {\n const now = Date.now();\n const hostOnly = this.hostOnly != null ? this.hostOnly : \"?\";\n const createAge = this.creation\n ? `${now - this.creation.getTime()}ms`\n : \"?\";\n const accessAge = this.lastAccessed\n ? `${now - this.lastAccessed.getTime()}ms`\n : \"?\";\n return `Cookie=\"${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}\"`;\n }\n\n toJSON() {\n const obj = {};\n\n for (const prop of Cookie.serializableProperties) {\n if (this[prop] === cookieDefaults[prop]) {\n continue; // leave as prototype default\n }\n\n if (\n prop === \"expires\" ||\n prop === \"creation\" ||\n prop === \"lastAccessed\"\n ) {\n if (this[prop] === null) {\n obj[prop] = null;\n } else {\n obj[prop] =\n this[prop] == \"Infinity\" // intentionally not ===\n ? \"Infinity\"\n : this[prop].toISOString();\n }\n } else if (prop === \"maxAge\") {\n if (this[prop] !== null) {\n // again, intentionally not ===\n obj[prop] =\n this[prop] == Infinity || this[prop] == -Infinity\n ? this[prop].toString()\n : this[prop];\n }\n } else {\n if (this[prop] !== cookieDefaults[prop]) {\n obj[prop] = this[prop];\n }\n }\n }\n\n return obj;\n }\n\n clone() {\n return fromJSON(this.toJSON());\n }\n\n validate() {\n if (!COOKIE_OCTETS.test(this.value)) {\n return false;\n }\n if (\n this.expires != Infinity &&\n !(this.expires instanceof Date) &&\n !parseDate(this.expires)\n ) {\n return false;\n }\n if (this.maxAge != null && this.maxAge <= 0) {\n return false; // \"Max-Age=\" non-zero-digit *DIGIT\n }\n if (this.path != null && !PATH_VALUE.test(this.path)) {\n return false;\n }\n\n const cdomain = this.cdomain();\n if (cdomain) {\n if (cdomain.match(/\\.$/)) {\n return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this\n }\n const suffix = pubsuffix.getPublicSuffix(cdomain);\n if (suffix == null) {\n // it's a public suffix\n return false;\n }\n }\n return true;\n }\n\n setExpires(exp) {\n if (exp instanceof Date) {\n this.expires = exp;\n } else {\n this.expires = parseDate(exp) || \"Infinity\";\n }\n }\n\n setMaxAge(age) {\n if (age === Infinity || age === -Infinity) {\n this.maxAge = age.toString(); // so JSON.stringify() works\n } else {\n this.maxAge = age;\n }\n }\n\n cookieString() {\n let val = this.value;\n if (val == null) {\n val = \"\";\n }\n if (this.key === \"\") {\n return val;\n }\n return `${this.key}=${val}`;\n }\n\n // gives Set-Cookie header format\n toString() {\n let str = this.cookieString();\n\n if (this.expires != Infinity) {\n if (this.expires instanceof Date) {\n str += `; Expires=${formatDate(this.expires)}`;\n } else {\n str += `; Expires=${this.expires}`;\n }\n }\n\n if (this.maxAge != null && this.maxAge != Infinity) {\n str += `; Max-Age=${this.maxAge}`;\n }\n\n if (this.domain && !this.hostOnly) {\n str += `; Domain=${this.domain}`;\n }\n if (this.path) {\n str += `; Path=${this.path}`;\n }\n\n if (this.secure) {\n str += \"; Secure\";\n }\n if (this.httpOnly) {\n str += \"; HttpOnly\";\n }\n if (this.sameSite && this.sameSite !== \"none\") {\n const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()];\n str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`;\n }\n if (this.extensions) {\n this.extensions.forEach(ext => {\n str += `; ${ext}`;\n });\n }\n\n return str;\n }\n\n // TTL() partially replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere)\n // S5.3 says to give the \"latest representable date\" for which we use Infinity\n // For \"expired\" we use 0\n TTL(now) {\n /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires\n * attribute, the Max-Age attribute has precedence and controls the\n * expiration date of the cookie.\n * (Concurs with S5.3 step 3)\n */\n if (this.maxAge != null) {\n return this.maxAge <= 0 ? 0 : this.maxAge * 1000;\n }\n\n let expires = this.expires;\n if (expires != Infinity) {\n if (!(expires instanceof Date)) {\n expires = parseDate(expires) || Infinity;\n }\n\n if (expires == Infinity) {\n return Infinity;\n }\n\n return expires.getTime() - (now || Date.now());\n }\n\n return Infinity;\n }\n\n // expiryTime() replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere)\n expiryTime(now) {\n if (this.maxAge != null) {\n const relativeTo = now || this.creation || new Date();\n const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000;\n return relativeTo.getTime() + age;\n }\n\n if (this.expires == Infinity) {\n return Infinity;\n }\n return this.expires.getTime();\n }\n\n // expiryDate() replaces the \"expiry-time\" parts of S5.3 step 3 (setCookie()\n // elsewhere), except it returns a Date\n expiryDate(now) {\n const millisec = this.expiryTime(now);\n if (millisec == Infinity) {\n return new Date(MAX_TIME);\n } else if (millisec == -Infinity) {\n return new Date(MIN_TIME);\n } else {\n return new Date(millisec);\n }\n }\n\n // This replaces the \"persistent-flag\" parts of S5.3 step 3\n isPersistent() {\n return this.maxAge != null || this.expires != Infinity;\n }\n\n // Mostly S5.1.2 and S5.2.3:\n canonicalizedDomain() {\n if (this.domain == null) {\n return null;\n }\n return canonicalDomain(this.domain);\n }\n\n cdomain() {\n return this.canonicalizedDomain();\n }\n}\n\nCookie.cookiesCreated = 0;\nCookie.parse = parse;\nCookie.fromJSON = fromJSON;\nCookie.serializableProperties = Object.keys(cookieDefaults);\nCookie.sameSiteLevel = {\n strict: 3,\n lax: 2,\n none: 1\n};\n\nCookie.sameSiteCanonical = {\n strict: \"Strict\",\n lax: \"Lax\"\n};\n\nfunction getNormalizedPrefixSecurity(prefixSecurity) {\n if (prefixSecurity != null) {\n const normalizedPrefixSecurity = prefixSecurity.toLowerCase();\n /* The three supported options */\n switch (normalizedPrefixSecurity) {\n case PrefixSecurityEnum.STRICT:\n case PrefixSecurityEnum.SILENT:\n case PrefixSecurityEnum.DISABLED:\n return normalizedPrefixSecurity;\n }\n }\n /* Default is SILENT */\n return PrefixSecurityEnum.SILENT;\n}\n\nclass CookieJar {\n constructor(store, options = { rejectPublicSuffixes: true }) {\n if (typeof options === \"boolean\") {\n options = { rejectPublicSuffixes: options };\n }\n this.rejectPublicSuffixes = options.rejectPublicSuffixes;\n this.enableLooseMode = !!options.looseMode;\n this.allowSpecialUseDomain = !!options.allowSpecialUseDomain;\n this.store = store || new MemoryCookieStore();\n this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity);\n this._cloneSync = syncWrap(\"clone\");\n this._importCookiesSync = syncWrap(\"_importCookies\");\n this.getCookiesSync = syncWrap(\"getCookies\");\n this.getCookieStringSync = syncWrap(\"getCookieString\");\n this.getSetCookieStringsSync = syncWrap(\"getSetCookieStrings\");\n this.removeAllCookiesSync = syncWrap(\"removeAllCookies\");\n this.setCookieSync = syncWrap(\"setCookie\");\n this.serializeSync = syncWrap(\"serialize\");\n }\n\n setCookie(cookie, url, options, cb) {\n let err;\n const context = getCookieContext(url);\n if (typeof options === \"function\") {\n cb = options;\n options = {};\n }\n\n const host = canonicalDomain(context.hostname);\n const loose = options.loose || this.enableLooseMode;\n\n let sameSiteContext = null;\n if (options.sameSiteContext) {\n sameSiteContext = checkSameSiteContext(options.sameSiteContext);\n if (!sameSiteContext) {\n return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));\n }\n }\n\n // S5.3 step 1\n if (typeof cookie === \"string\" || cookie instanceof String) {\n cookie = Cookie.parse(cookie, { loose: loose });\n if (!cookie) {\n err = new Error(\"Cookie failed to parse\");\n return cb(options.ignoreError ? null : err);\n }\n } else if (!(cookie instanceof Cookie)) {\n // If you're seeing this error, and are passing in a Cookie object,\n // it *might* be a Cookie object from another loaded version of tough-cookie.\n err = new Error(\n \"First argument to setCookie must be a Cookie object or string\"\n );\n return cb(options.ignoreError ? null : err);\n }\n\n // S5.3 step 2\n const now = options.now || new Date(); // will assign later to save effort in the face of errors\n\n // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie()\n\n // S5.3 step 4: NOOP; domain is null by default\n\n // S5.3 step 5: public suffixes\n if (this.rejectPublicSuffixes && cookie.domain) {\n const suffix = pubsuffix.getPublicSuffix(cookie.cdomain());\n if (suffix == null) {\n // e.g. \"com\"\n err = new Error(\"Cookie has domain set to a public suffix\");\n return cb(options.ignoreError ? null : err);\n }\n }\n\n // S5.3 step 6:\n if (cookie.domain) {\n if (!domainMatch(host, cookie.cdomain(), false)) {\n err = new Error(\n `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}`\n );\n return cb(options.ignoreError ? null : err);\n }\n\n if (cookie.hostOnly == null) {\n // don't reset if already set\n cookie.hostOnly = false;\n }\n } else {\n cookie.hostOnly = true;\n cookie.domain = host;\n }\n\n //S5.2.4 If the attribute-value is empty or if the first character of the\n //attribute-value is not %x2F (\"/\"):\n //Let cookie-path be the default-path.\n if (!cookie.path || cookie.path[0] !== \"/\") {\n cookie.path = defaultPath(context.pathname);\n cookie.pathIsDefault = true;\n }\n\n // S5.3 step 8: NOOP; secure attribute\n // S5.3 step 9: NOOP; httpOnly attribute\n\n // S5.3 step 10\n if (options.http === false && cookie.httpOnly) {\n err = new Error(\"Cookie is HttpOnly and this isn't an HTTP API\");\n return cb(options.ignoreError ? null : err);\n }\n\n // 6252bis-02 S5.4 Step 13 & 14:\n if (cookie.sameSite !== \"none\" && sameSiteContext) {\n // \"If the cookie's \"same-site-flag\" is not \"None\", and the cookie\n // is being set from a context whose \"site for cookies\" is not an\n // exact match for request-uri's host's registered domain, then\n // abort these steps and ignore the newly created cookie entirely.\"\n if (sameSiteContext === \"none\") {\n err = new Error(\n \"Cookie is SameSite but this is a cross-origin request\"\n );\n return cb(options.ignoreError ? null : err);\n }\n }\n\n /* 6265bis-02 S5.4 Steps 15 & 16 */\n const ignoreErrorForPrefixSecurity =\n this.prefixSecurity === PrefixSecurityEnum.SILENT;\n const prefixSecurityDisabled =\n this.prefixSecurity === PrefixSecurityEnum.DISABLED;\n /* If prefix checking is not disabled ...*/\n if (!prefixSecurityDisabled) {\n let errorFound = false;\n let errorMsg;\n /* Check secure prefix condition */\n if (!isSecurePrefixConditionMet(cookie)) {\n errorFound = true;\n errorMsg = \"Cookie has __Secure prefix but Secure attribute is not set\";\n } else if (!isHostPrefixConditionMet(cookie)) {\n /* Check host prefix condition */\n errorFound = true;\n errorMsg =\n \"Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'\";\n }\n if (errorFound) {\n return cb(\n options.ignoreError || ignoreErrorForPrefixSecurity\n ? null\n : new Error(errorMsg)\n );\n }\n }\n\n const store = this.store;\n\n if (!store.updateCookie) {\n store.updateCookie = function(oldCookie, newCookie, cb) {\n this.putCookie(newCookie, cb);\n };\n }\n\n function withCookie(err, oldCookie) {\n if (err) {\n return cb(err);\n }\n\n const next = function(err) {\n if (err) {\n return cb(err);\n } else {\n cb(null, cookie);\n }\n };\n\n if (oldCookie) {\n // S5.3 step 11 - \"If the cookie store contains a cookie with the same name,\n // domain, and path as the newly created cookie:\"\n if (options.http === false && oldCookie.httpOnly) {\n // step 11.2\n err = new Error(\"old Cookie is HttpOnly and this isn't an HTTP API\");\n return cb(options.ignoreError ? null : err);\n }\n cookie.creation = oldCookie.creation; // step 11.3\n cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker\n cookie.lastAccessed = now;\n // Step 11.4 (delete cookie) is implied by just setting the new one:\n store.updateCookie(oldCookie, cookie, next); // step 12\n } else {\n cookie.creation = cookie.lastAccessed = now;\n store.putCookie(cookie, next); // step 12\n }\n }\n\n store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie);\n }\n\n // RFC6365 S5.4\n getCookies(url, options, cb) {\n const context = getCookieContext(url);\n if (typeof options === \"function\") {\n cb = options;\n options = {};\n }\n\n const host = canonicalDomain(context.hostname);\n const path = context.pathname || \"/\";\n\n let secure = options.secure;\n if (\n secure == null &&\n context.protocol &&\n (context.protocol == \"https:\" || context.protocol == \"wss:\")\n ) {\n secure = true;\n }\n\n let sameSiteLevel = 0;\n if (options.sameSiteContext) {\n const sameSiteContext = checkSameSiteContext(options.sameSiteContext);\n sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext];\n if (!sameSiteLevel) {\n return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));\n }\n }\n\n let http = options.http;\n if (http == null) {\n http = true;\n }\n\n const now = options.now || Date.now();\n const expireCheck = options.expire !== false;\n const allPaths = !!options.allPaths;\n const store = this.store;\n\n function matchingCookie(c) {\n // \"Either:\n // The cookie's host-only-flag is true and the canonicalized\n // request-host is identical to the cookie's domain.\n // Or:\n // The cookie's host-only-flag is false and the canonicalized\n // request-host domain-matches the cookie's domain.\"\n if (c.hostOnly) {\n if (c.domain != host) {\n return false;\n }\n } else {\n if (!domainMatch(host, c.domain, false)) {\n return false;\n }\n }\n\n // \"The request-uri's path path-matches the cookie's path.\"\n if (!allPaths && !pathMatch(path, c.path)) {\n return false;\n }\n\n // \"If the cookie's secure-only-flag is true, then the request-uri's\n // scheme must denote a \"secure\" protocol\"\n if (c.secure && !secure) {\n return false;\n }\n\n // \"If the cookie's http-only-flag is true, then exclude the cookie if the\n // cookie-string is being generated for a \"non-HTTP\" API\"\n if (c.httpOnly && !http) {\n return false;\n }\n\n // RFC6265bis-02 S5.3.7\n if (sameSiteLevel) {\n const cookieLevel = Cookie.sameSiteLevel[c.sameSite || \"none\"];\n if (cookieLevel > sameSiteLevel) {\n // only allow cookies at or below the request level\n return false;\n }\n }\n\n // deferred from S5.3\n // non-RFC: allow retention of expired cookies by choice\n if (expireCheck && c.expiryTime() <= now) {\n store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored\n return false;\n }\n\n return true;\n }\n\n store.findCookies(\n host,\n allPaths ? null : path,\n this.allowSpecialUseDomain,\n (err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n cookies = cookies.filter(matchingCookie);\n\n // sorting of S5.4 part 2\n if (options.sort !== false) {\n cookies = cookies.sort(cookieCompare);\n }\n\n // S5.4 part 3\n const now = new Date();\n for (const cookie of cookies) {\n cookie.lastAccessed = now;\n }\n // TODO persist lastAccessed\n\n cb(null, cookies);\n }\n );\n }\n\n getCookieString(...args) {\n const cb = args.pop();\n const next = function(err, cookies) {\n if (err) {\n cb(err);\n } else {\n cb(\n null,\n cookies\n .sort(cookieCompare)\n .map(c => c.cookieString())\n .join(\"; \")\n );\n }\n };\n args.push(next);\n this.getCookies.apply(this, args);\n }\n\n getSetCookieStrings(...args) {\n const cb = args.pop();\n const next = function(err, cookies) {\n if (err) {\n cb(err);\n } else {\n cb(\n null,\n cookies.map(c => {\n return c.toString();\n })\n );\n }\n };\n args.push(next);\n this.getCookies.apply(this, args);\n }\n\n serialize(cb) {\n let type = this.store.constructor.name;\n if (type === \"Object\") {\n type = null;\n }\n\n // update README.md \"Serialization Format\" if you change this, please!\n const serialized = {\n // The version of tough-cookie that serialized this jar. Generally a good\n // practice since future versions can make data import decisions based on\n // known past behavior. When/if this matters, use `semver`.\n version: `tough-cookie@${VERSION}`,\n\n // add the store type, to make humans happy:\n storeType: type,\n\n // CookieJar configuration:\n rejectPublicSuffixes: !!this.rejectPublicSuffixes,\n\n // this gets filled from getAllCookies:\n cookies: []\n };\n\n if (\n !(\n this.store.getAllCookies &&\n typeof this.store.getAllCookies === \"function\"\n )\n ) {\n return cb(\n new Error(\n \"store does not support getAllCookies and cannot be serialized\"\n )\n );\n }\n\n this.store.getAllCookies((err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n serialized.cookies = cookies.map(cookie => {\n // convert to serialized 'raw' cookies\n cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie;\n\n // Remove the index so new ones get assigned during deserialization\n delete cookie.creationIndex;\n\n return cookie;\n });\n\n return cb(null, serialized);\n });\n }\n\n toJSON() {\n return this.serializeSync();\n }\n\n // use the class method CookieJar.deserialize instead of calling this directly\n _importCookies(serialized, cb) {\n let cookies = serialized.cookies;\n if (!cookies || !Array.isArray(cookies)) {\n return cb(new Error(\"serialized jar has no cookies array\"));\n }\n cookies = cookies.slice(); // do not modify the original\n\n const putNext = err => {\n if (err) {\n return cb(err);\n }\n\n if (!cookies.length) {\n return cb(err, this);\n }\n\n let cookie;\n try {\n cookie = fromJSON(cookies.shift());\n } catch (e) {\n return cb(e);\n }\n\n if (cookie === null) {\n return putNext(null); // skip this cookie\n }\n\n this.store.putCookie(cookie, putNext);\n };\n\n putNext();\n }\n\n clone(newStore, cb) {\n if (arguments.length === 1) {\n cb = newStore;\n newStore = null;\n }\n\n this.serialize((err, serialized) => {\n if (err) {\n return cb(err);\n }\n CookieJar.deserialize(serialized, newStore, cb);\n });\n }\n\n cloneSync(newStore) {\n if (arguments.length === 0) {\n return this._cloneSync();\n }\n if (!newStore.synchronous) {\n throw new Error(\n \"CookieJar clone destination store is not synchronous; use async API instead.\"\n );\n }\n return this._cloneSync(newStore);\n }\n\n removeAllCookies(cb) {\n const store = this.store;\n\n // Check that the store implements its own removeAllCookies(). The default\n // implementation in Store will immediately call the callback with a \"not\n // implemented\" Error.\n if (\n typeof store.removeAllCookies === \"function\" &&\n store.removeAllCookies !== Store.prototype.removeAllCookies\n ) {\n return store.removeAllCookies(cb);\n }\n\n store.getAllCookies((err, cookies) => {\n if (err) {\n return cb(err);\n }\n\n if (cookies.length === 0) {\n return cb(null);\n }\n\n let completedCount = 0;\n const removeErrors = [];\n\n function removeCookieCb(removeErr) {\n if (removeErr) {\n removeErrors.push(removeErr);\n }\n\n completedCount++;\n\n if (completedCount === cookies.length) {\n return cb(removeErrors.length ? removeErrors[0] : null);\n }\n }\n\n cookies.forEach(cookie => {\n store.removeCookie(\n cookie.domain,\n cookie.path,\n cookie.key,\n removeCookieCb\n );\n });\n });\n }\n\n static deserialize(strOrObj, store, cb) {\n if (arguments.length !== 3) {\n // store is optional\n cb = store;\n store = null;\n }\n\n let serialized;\n if (typeof strOrObj === \"string\") {\n serialized = jsonParse(strOrObj);\n if (serialized instanceof Error) {\n return cb(serialized);\n }\n } else {\n serialized = strOrObj;\n }\n\n const jar = new CookieJar(store, serialized.rejectPublicSuffixes);\n jar._importCookies(serialized, err => {\n if (err) {\n return cb(err);\n }\n cb(null, jar);\n });\n }\n\n static deserializeSync(strOrObj, store) {\n const serialized =\n typeof strOrObj === \"string\" ? JSON.parse(strOrObj) : strOrObj;\n const jar = new CookieJar(store, serialized.rejectPublicSuffixes);\n\n // catch this mistake early:\n if (!jar.store.synchronous) {\n throw new Error(\n \"CookieJar store is not synchronous; use async API instead.\"\n );\n }\n\n jar._importCookiesSync(serialized);\n return jar;\n }\n}\nCookieJar.fromJSON = CookieJar.deserializeSync;\n\n[\n \"_importCookies\",\n \"clone\",\n \"getCookies\",\n \"getCookieString\",\n \"getSetCookieStrings\",\n \"removeAllCookies\",\n \"serialize\",\n \"setCookie\"\n].forEach(name => {\n CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]);\n});\nCookieJar.deserialize = fromCallback(CookieJar.deserialize);\n\n// Use a closure to provide a true imperative API for synchronous stores.\nfunction syncWrap(method) {\n return function(...args) {\n if (!this.store.synchronous) {\n throw new Error(\n \"CookieJar store is not synchronous; use async API instead.\"\n );\n }\n\n let syncErr, syncResult;\n this[method](...args, (err, result) => {\n syncErr = err;\n syncResult = result;\n });\n\n if (syncErr) {\n throw syncErr;\n }\n return syncResult;\n };\n}\n\nexports.version = VERSION;\nexports.CookieJar = CookieJar;\nexports.Cookie = Cookie;\nexports.Store = Store;\nexports.MemoryCookieStore = MemoryCookieStore;\nexports.parseDate = parseDate;\nexports.formatDate = formatDate;\nexports.parse = parse;\nexports.fromJSON = fromJSON;\nexports.domainMatch = domainMatch;\nexports.defaultPath = defaultPath;\nexports.pathMatch = pathMatch;\nexports.getPublicSuffix = pubsuffix.getPublicSuffix;\nexports.cookieCompare = cookieCompare;\nexports.permuteDomain = require(\"./permuteDomain\").permuteDomain;\nexports.permutePath = permutePath;\nexports.canonicalDomain = canonicalDomain;\nexports.PrefixSecurityEnum = PrefixSecurityEnum;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst { fromCallback } = require(\"universalify\");\nconst Store = require(\"./store\").Store;\nconst permuteDomain = require(\"./permuteDomain\").permuteDomain;\nconst pathMatch = require(\"./pathMatch\").pathMatch;\nconst util = require(\"util\");\n\nclass MemoryCookieStore extends Store {\n constructor() {\n super();\n this.synchronous = true;\n this.idx = {};\n if (util.inspect.custom) {\n this[util.inspect.custom] = this.inspect;\n }\n }\n\n inspect() {\n return `{ idx: ${util.inspect(this.idx, false, 2)} }`;\n }\n\n findCookie(domain, path, key, cb) {\n if (!this.idx[domain]) {\n return cb(null, undefined);\n }\n if (!this.idx[domain][path]) {\n return cb(null, undefined);\n }\n return cb(null, this.idx[domain][path][key] || null);\n }\n findCookies(domain, path, allowSpecialUseDomain, cb) {\n const results = [];\n if (typeof allowSpecialUseDomain === \"function\") {\n cb = allowSpecialUseDomain;\n allowSpecialUseDomain = false;\n }\n if (!domain) {\n return cb(null, []);\n }\n\n let pathMatcher;\n if (!path) {\n // null means \"all paths\"\n pathMatcher = function matchAll(domainIndex) {\n for (const curPath in domainIndex) {\n const pathIndex = domainIndex[curPath];\n for (const key in pathIndex) {\n results.push(pathIndex[key]);\n }\n }\n };\n } else {\n pathMatcher = function matchRFC(domainIndex) {\n //NOTE: we should use path-match algorithm from S5.1.4 here\n //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)\n Object.keys(domainIndex).forEach(cookiePath => {\n if (pathMatch(path, cookiePath)) {\n const pathIndex = domainIndex[cookiePath];\n for (const key in pathIndex) {\n results.push(pathIndex[key]);\n }\n }\n });\n };\n }\n\n const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];\n const idx = this.idx;\n domains.forEach(curDomain => {\n const domainIndex = idx[curDomain];\n if (!domainIndex) {\n return;\n }\n pathMatcher(domainIndex);\n });\n\n cb(null, results);\n }\n\n putCookie(cookie, cb) {\n if (!this.idx[cookie.domain]) {\n this.idx[cookie.domain] = {};\n }\n if (!this.idx[cookie.domain][cookie.path]) {\n this.idx[cookie.domain][cookie.path] = {};\n }\n this.idx[cookie.domain][cookie.path][cookie.key] = cookie;\n cb(null);\n }\n updateCookie(oldCookie, newCookie, cb) {\n // updateCookie() may avoid updating cookies that are identical. For example,\n // lastAccessed may not be important to some stores and an equality\n // comparison could exclude that field.\n this.putCookie(newCookie, cb);\n }\n removeCookie(domain, path, key, cb) {\n if (\n this.idx[domain] &&\n this.idx[domain][path] &&\n this.idx[domain][path][key]\n ) {\n delete this.idx[domain][path][key];\n }\n cb(null);\n }\n removeCookies(domain, path, cb) {\n if (this.idx[domain]) {\n if (path) {\n delete this.idx[domain][path];\n } else {\n delete this.idx[domain];\n }\n }\n return cb(null);\n }\n removeAllCookies(cb) {\n this.idx = {};\n return cb(null);\n }\n getAllCookies(cb) {\n const cookies = [];\n const idx = this.idx;\n\n const domains = Object.keys(idx);\n domains.forEach(domain => {\n const paths = Object.keys(idx[domain]);\n paths.forEach(path => {\n const keys = Object.keys(idx[domain][path]);\n keys.forEach(key => {\n if (key !== null) {\n cookies.push(idx[domain][path][key]);\n }\n });\n });\n });\n\n // Sort by creationIndex so deserializing retains the creation order.\n // When implementing your own store, this SHOULD retain the order too\n cookies.sort((a, b) => {\n return (a.creationIndex || 0) - (b.creationIndex || 0);\n });\n\n cb(null, cookies);\n }\n}\n\n[\n \"findCookie\",\n \"findCookies\",\n \"putCookie\",\n \"updateCookie\",\n \"removeCookie\",\n \"removeCookies\",\n \"removeAllCookies\",\n \"getAllCookies\"\n].forEach(name => {\n MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]);\n});\n\nexports.MemoryCookieStore = MemoryCookieStore;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\n/*\n * \"A request-path path-matches a given cookie-path if at least one of the\n * following conditions holds:\"\n */\nfunction pathMatch(reqPath, cookiePath) {\n // \"o The cookie-path and the request-path are identical.\"\n if (cookiePath === reqPath) {\n return true;\n }\n\n const idx = reqPath.indexOf(cookiePath);\n if (idx === 0) {\n // \"o The cookie-path is a prefix of the request-path, and the last\n // character of the cookie-path is %x2F (\"/\").\"\n if (cookiePath.substr(-1) === \"/\") {\n return true;\n }\n\n // \" o The cookie-path is a prefix of the request-path, and the first\n // character of the request-path that is not included in the cookie- path\n // is a %x2F (\"/\") character.\"\n if (reqPath.substr(cookiePath.length, 1) === \"/\") {\n return true;\n }\n }\n\n return false;\n}\n\nexports.pathMatch = pathMatch;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst pubsuffix = require(\"./pubsuffix-psl\");\n\n// Gives the permutation of all possible domainMatch()es of a given domain. The\n// array is in shortest-to-longest order. Handy for indexing.\nconst SPECIAL_USE_DOMAINS = [\"local\"]; // RFC 6761\nfunction permuteDomain(domain, allowSpecialUseDomain) {\n let pubSuf = null;\n if (allowSpecialUseDomain) {\n const domainParts = domain.split(\".\");\n if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) {\n pubSuf = `${domainParts[domainParts.length - 2]}.${\n domainParts[domainParts.length - 1]\n }`;\n } else {\n pubSuf = pubsuffix.getPublicSuffix(domain);\n }\n } else {\n pubSuf = pubsuffix.getPublicSuffix(domain);\n }\n\n if (!pubSuf) {\n return null;\n }\n if (pubSuf == domain) {\n return [domain];\n }\n\n const prefix = domain.slice(0, -(pubSuf.length + 1)); // \".example.com\"\n const parts = prefix.split(\".\").reverse();\n let cur = pubSuf;\n const permutations = [cur];\n while (parts.length) {\n cur = `${parts.shift()}.${cur}`;\n permutations.push(cur);\n }\n return permutations;\n}\n\nexports.permuteDomain = permuteDomain;\n","/*!\n * Copyright (c) 2018, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\nconst psl = require(\"psl\");\n\nfunction getPublicSuffix(domain) {\n return psl.get(domain);\n}\n\nexports.getPublicSuffix = getPublicSuffix;\n","/*!\n * Copyright (c) 2015, Salesforce.com, Inc.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * 3. Neither the name of Salesforce.com nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n */\n\"use strict\";\n/*jshint unused:false */\n\nclass Store {\n constructor() {\n this.synchronous = false;\n }\n\n findCookie(domain, path, key, cb) {\n throw new Error(\"findCookie is not implemented\");\n }\n\n findCookies(domain, path, allowSpecialUseDomain, cb) {\n throw new Error(\"findCookies is not implemented\");\n }\n\n putCookie(cookie, cb) {\n throw new Error(\"putCookie is not implemented\");\n }\n\n updateCookie(oldCookie, newCookie, cb) {\n // recommended default implementation:\n // return this.putCookie(newCookie, cb);\n throw new Error(\"updateCookie is not implemented\");\n }\n\n removeCookie(domain, path, key, cb) {\n throw new Error(\"removeCookie is not implemented\");\n }\n\n removeCookies(domain, path, cb) {\n throw new Error(\"removeCookies is not implemented\");\n }\n\n removeAllCookies(cb) {\n throw new Error(\"removeAllCookies is not implemented\");\n }\n\n getAllCookies(cb) {\n throw new Error(\n \"getAllCookies is not implemented (therefore jar cannot be serialized)\"\n );\n }\n}\n\nexports.Store = Store;\n","// generated by genversion\nmodule.exports = '4.0.0'\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","import crypto from 'crypto';\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\nexport default function rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n crypto.randomFillSync(rnds8Pool);\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;","import REGEX from './regex.js';\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\n\nexport default validate;","import validate from './validate.js';\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\n\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js'; // **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\n\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || rng)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || stringify(b);\n}\n\nexport default v1;","import validate from './validate.js';\n\nfunction parse(uuid) {\n if (!validate(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nexport default parse;","import stringify from './stringify.js';\nimport parse from './parse.js';\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nexport const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexport const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexport default function (name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = parse(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return stringify(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","import crypto from 'crypto';\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return crypto.createHash('md5').update(bytes).digest();\n}\n\nexport default md5;","import v35 from './v35.js';\nimport md5 from './md5.js';\nconst v3 = v35('v3', 0x30, md5);\nexport default v3;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","import crypto from 'crypto';\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return crypto.createHash('sha1').update(bytes).digest();\n}\n\nexport default sha1;","import v35 from './v35.js';\nimport sha1 from './sha1.js';\nconst v5 = v35('v5', 0x50, sha1);\nexport default v5;","export default '00000000-0000-0000-0000-000000000000';","import validate from './validate.js';\n\nfunction version(uuid) {\n if (!validate(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nexport default version;","export { default as v1 } from './v1.js';\nexport { default as v3 } from './v3.js';\nexport { default as v4 } from './v4.js';\nexport { default as v5 } from './v5.js';\nexport { default as NIL } from './nil.js';\nexport { default as version } from './version.js';\nexport { default as validate } from './validate.js';\nexport { default as stringify } from './stringify.js';\nexport { default as parse } from './parse.js';","/*!\n * Copyright (c) Microsoft and contributors. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n * \n * Azure Core LRO SDK for JavaScript - 1.0.3\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nvar PollerStoppedError = /** @class */ (function (_super) {\n tslib.__extends(PollerStoppedError, _super);\n function PollerStoppedError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(_this, PollerStoppedError.prototype);\n return _this;\n }\n return PollerStoppedError;\n}(Error));\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nvar PollerCancelledError = /** @class */ (function (_super) {\n tslib.__extends(PollerCancelledError, _super);\n function PollerCancelledError(message) {\n var _this = _super.call(this, message) || this;\n _this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(_this, PollerCancelledError.prototype);\n return _this;\n }\n return PollerCancelledError;\n}(Error));\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nvar Poller = /** @class */ (function () {\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n function Poller(operation) {\n var _this = this;\n this.stopped = true;\n this.pollProgressCallbacks = [];\n this.operation = operation;\n this.promise = new Promise(function (resolve, reject) {\n _this.resolve = resolve;\n _this.reject = reject;\n });\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(function () {\n /* intentionally blank */\n });\n }\n /**\n * @internal\n * @hidden\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n Poller.prototype.startPolling = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (this.stopped) {\n this.stopped = false;\n }\n _a.label = 1;\n case 1:\n if (!(!this.isStopped() && !this.isDone())) return [3 /*break*/, 4];\n return [4 /*yield*/, this.poll()];\n case 2:\n _a.sent();\n return [4 /*yield*/, this.delay()];\n case 3:\n _a.sent();\n return [3 /*break*/, 1];\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * @internal\n * @hidden\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.pollOnce = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var state, _a, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n state = this.operation.state;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, , 5]);\n if (!!this.isDone()) return [3 /*break*/, 3];\n _a = this;\n return [4 /*yield*/, this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this)\n })];\n case 2:\n _a.operation = _b.sent();\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(state.result);\n }\n _b.label = 3;\n case 3: return [3 /*break*/, 5];\n case 4:\n e_1 = _b.sent();\n state.error = e_1;\n if (this.reject) {\n this.reject(e_1);\n }\n throw e_1;\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * @internal\n * @hidden\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n Poller.prototype.fireProgress = function (state) {\n for (var _i = 0, _a = this.pollProgressCallbacks; _i < _a.length; _i++) {\n var callback = _a[_i];\n callback(state);\n }\n };\n /**\n * @internal\n * @hidden\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n Poller.prototype.cancelOnce = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = this;\n return [4 /*yield*/, this.operation.cancel(options)];\n case 1:\n _a.operation = _b.sent();\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.poll = function (options) {\n var _this = this;\n if (options === void 0) { options = {}; }\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n var clearPollOncePromise = function () {\n _this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n };\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n Poller.prototype.pollUntilDone = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return [2 /*return*/, this.promise];\n });\n });\n };\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n Poller.prototype.onProgress = function (callback) {\n var _this = this;\n this.pollProgressCallbacks.push(callback);\n return function () {\n _this.pollProgressCallbacks = _this.pollProgressCallbacks.filter(function (c) { return c !== callback; });\n };\n };\n /**\n * Returns true if the poller has finished polling.\n */\n Poller.prototype.isDone = function () {\n var state = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n };\n /**\n * Stops the poller from continuing to poll.\n */\n Poller.prototype.stopPolling = function () {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n };\n /**\n * Returns true if the poller is stopped.\n */\n Poller.prototype.isStopped = function () {\n return this.stopped;\n };\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n Poller.prototype.cancelOperation = function (options) {\n if (options === void 0) { options = {}; }\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n }\n else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n };\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n Poller.prototype.getOperationState = function () {\n return this.operation.state;\n };\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n Poller.prototype.getResult = function () {\n var state = this.operation.state;\n return state.result;\n };\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n Poller.prototype.toString = function () {\n return this.operation.toString();\n };\n return Poller;\n}());\n\nexports.Poller = Poller;\nexports.PollerCancelledError = PollerCancelledError;\nexports.PollerStoppedError = PollerStoppedError;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","\"use strict\";\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nrequire(\"@azure/core-asynciterator-polyfill\");\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar api = require('@opentelemetry/api');\nvar tslib = require('tslib');\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A no-op implementation of Span that can safely be used without side-effects.\n */\nvar NoOpSpan = /** @class */ (function () {\n function NoOpSpan() {\n }\n /**\n * Returns the SpanContext associated with this Span.\n */\n NoOpSpan.prototype.context = function () {\n return {\n spanId: \"\",\n traceId: \"\",\n traceFlags: api.TraceFlags.NONE\n };\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime The time to use as the Span's end time. Defaults to\n * the current time.\n */\n NoOpSpan.prototype.end = function (_endTime) {\n /* Noop */\n };\n /**\n * Sets an attribute on the Span\n * @param _key the attribute key\n * @param _value the attribute value\n */\n NoOpSpan.prototype.setAttribute = function (_key, _value) {\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param _attributes the attributes to add\n */\n NoOpSpan.prototype.setAttributes = function (_attributes) {\n return this;\n };\n /**\n * Adds an event to the Span\n * @param _name The name of the event\n * @param _attributes The associated attributes to add for this event\n */\n NoOpSpan.prototype.addEvent = function (_name, _attributes) {\n return this;\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param _status The status to set.\n */\n NoOpSpan.prototype.setStatus = function (_status) {\n return this;\n };\n /**\n * Updates the name of the Span\n * @param _name the new Span name\n */\n NoOpSpan.prototype.updateName = function (_name) {\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n NoOpSpan.prototype.isRecording = function () {\n return false;\n };\n return NoOpSpan;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A no-op implementation of Tracer that can be used when tracing\n * is disabled.\n */\nvar NoOpTracer = /** @class */ (function () {\n function NoOpTracer() {\n }\n /**\n * Starts a new Span.\n * @param _name The name of the span.\n * @param _options The SpanOptions used during Span creation.\n */\n NoOpTracer.prototype.startSpan = function (_name, _options) {\n return new NoOpSpan();\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n NoOpTracer.prototype.getCurrentSpan = function () {\n return new NoOpSpan();\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span The span that provides the context.\n * @param fn The function to be executed.\n */\n NoOpTracer.prototype.withSpan = function (_span, fn) {\n return fn();\n };\n /**\n * Bind a Span as the target's scope\n * @param target An object to bind the scope.\n * @param _span A specific Span to use. Otherwise, use the current one.\n */\n NoOpTracer.prototype.bind = function (target, _span) {\n return target;\n };\n return NoOpTracer;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nfunction getGlobalObject() {\n return global;\n}\n\n// Copyright (c) Microsoft Corporation.\n// V1 = OpenTelemetry 0.1\n// V2 = OpenTelemetry 0.2\n// V3 = OpenTelemetry 0.6.1\nvar GLOBAL_TRACER_VERSION = 3;\n// preview5 shipped with @azure/core-tracing.tracerCache\n// and didn't have smart detection for collisions\nvar GLOBAL_TRACER_SYMBOL = Symbol.for(\"@azure/core-tracing.tracerCache2\");\nvar cache;\nfunction loadTracerCache() {\n var globalObj = getGlobalObject();\n var existingCache = globalObj[GLOBAL_TRACER_SYMBOL];\n var setGlobalCache = true;\n if (existingCache) {\n if (existingCache.version === GLOBAL_TRACER_VERSION) {\n cache = existingCache;\n }\n else {\n setGlobalCache = false;\n if (existingCache.tracer) {\n throw new Error(\"Two incompatible versions of @azure/core-tracing have been loaded.\\n This library is \" + GLOBAL_TRACER_VERSION + \", existing is \" + existingCache.version + \".\");\n }\n }\n }\n if (!cache) {\n cache = {\n tracer: undefined,\n version: GLOBAL_TRACER_VERSION\n };\n }\n if (setGlobalCache) {\n globalObj[GLOBAL_TRACER_SYMBOL] = cache;\n }\n}\nfunction getCache() {\n if (!cache) {\n loadTracerCache();\n }\n return cache;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar defaultTracer;\nfunction getDefaultTracer() {\n if (!defaultTracer) {\n defaultTracer = new NoOpTracer();\n }\n return defaultTracer;\n}\n/**\n * Sets the global tracer, enabling tracing for the Azure SDK.\n * @param tracer An OpenTelemetry Tracer instance.\n */\nfunction setTracer(tracer) {\n var cache = getCache();\n cache.tracer = tracer;\n}\n/**\n * Retrieves the active tracer, or returns a\n * no-op implementation if one is not set.\n */\nfunction getTracer() {\n var cache = getCache();\n if (!cache.tracer) {\n return getDefaultTracer();\n }\n return cache.tracer;\n}\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * @ignore\n * @internal\n */\nvar OpenCensusTraceStateWrapper = /** @class */ (function () {\n function OpenCensusTraceStateWrapper(state) {\n this._state = state;\n }\n OpenCensusTraceStateWrapper.prototype.get = function (_key) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) {\n throw new Error(\"Method not implemented.\");\n };\n OpenCensusTraceStateWrapper.prototype.unset = function (_key) {\n throw new Error(\"Method not implemented\");\n };\n OpenCensusTraceStateWrapper.prototype.serialize = function () {\n return this._state || \"\";\n };\n return OpenCensusTraceStateWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\nfunction isWrappedSpan(span) {\n return !!span && span.getWrappedSpan !== undefined;\n}\nfunction isTracer(tracerOrSpan) {\n return tracerOrSpan.getWrappedTracer !== undefined;\n}\n/**\n * An implementation of OpenTelemetry Span that wraps an OpenCensus Span.\n */\nvar OpenCensusSpanWrapper = /** @class */ (function () {\n function OpenCensusSpanWrapper(tracerOrSpan, name, options) {\n if (name === void 0) { name = \"\"; }\n if (options === void 0) { options = {}; }\n if (isTracer(tracerOrSpan)) {\n var parent = isWrappedSpan(options.parent) ? options.parent.getWrappedSpan() : undefined;\n this._span = tracerOrSpan.getWrappedTracer().startChildSpan({\n name: name,\n childOf: parent\n });\n this._span.start();\n if (options.links) {\n for (var _i = 0, _a = options.links; _i < _a.length; _i++) {\n var link = _a[_i];\n // Since there is no way to set the link relationship, leave it as Unspecified.\n this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes);\n }\n }\n }\n else {\n this._span = tracerOrSpan;\n }\n }\n /**\n * The underlying OpenCensus Span\n */\n OpenCensusSpanWrapper.prototype.getWrappedSpan = function () {\n return this._span;\n };\n /**\n * Marks the end of Span execution.\n * @param endTime The time to use as the Span's end time. Defaults to\n * the current time.\n */\n OpenCensusSpanWrapper.prototype.end = function (_endTime) {\n this._span.end();\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n OpenCensusSpanWrapper.prototype.context = function () {\n var openCensusSpanContext = this._span.spanContext;\n return {\n spanId: openCensusSpanContext.spanId,\n traceId: openCensusSpanContext.traceId,\n traceFlags: openCensusSpanContext.options,\n traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState)\n };\n };\n /**\n * Sets an attribute on the Span\n * @param key the attribute key\n * @param value the attribute value\n */\n OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) {\n this._span.addAttribute(key, value);\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes the attributes to add\n */\n OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) {\n this._span.attributes = attributes;\n return this;\n };\n /**\n * Adds an event to the Span\n * @param name The name of the event\n * @param attributes The associated attributes to add for this event\n */\n OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param status The status to set.\n */\n OpenCensusSpanWrapper.prototype.setStatus = function (status) {\n this._span.setStatus(status.code, status.message);\n return this;\n };\n /**\n * Updates the name of the Span\n * @param name the new Span name\n */\n OpenCensusSpanWrapper.prototype.updateName = function (name) {\n this._span.name = name;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n OpenCensusSpanWrapper.prototype.isRecording = function () {\n // NoRecordSpans have an empty traceId\n return !!this._span.traceId;\n };\n return OpenCensusSpanWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer.\n */\nvar OpenCensusTracerWrapper = /** @class */ (function () {\n /**\n * Create a new wrapper around a given OpenCensus Tracer.\n * @param tracer The OpenCensus Tracer to wrap.\n */\n function OpenCensusTracerWrapper(tracer) {\n this._tracer = tracer;\n }\n /**\n * The wrapped OpenCensus Tracer\n */\n OpenCensusTracerWrapper.prototype.getWrappedTracer = function () {\n return this._tracer;\n };\n /**\n * Starts a new Span.\n * @param name The name of the span.\n * @param options The SpanOptions used during Span creation.\n */\n OpenCensusTracerWrapper.prototype.startSpan = function (name, options) {\n return new OpenCensusSpanWrapper(this, name, options);\n };\n /**\n * Returns the current Span from the current context, if available.\n */\n OpenCensusTracerWrapper.prototype.getCurrentSpan = function () {\n return undefined;\n };\n /**\n * Executes the given function within the context provided by a Span.\n * @param _span The span that provides the context.\n * @param _fn The function to be executed.\n */\n OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) {\n throw new Error(\"Method not implemented.\");\n };\n /**\n * Bind a Span as the target's scope\n * @param target An object to bind the scope.\n * @param _span A specific Span to use. Otherwise, use the current one.\n */\n OpenCensusTracerWrapper.prototype.bind = function (_target, _span) {\n throw new Error(\"Method not implemented.\");\n };\n return OpenCensusTracerWrapper;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock span useful for testing.\n */\nvar TestSpan = /** @class */ (function (_super) {\n tslib.__extends(TestSpan, _super);\n /**\n * Starts a new Span.\n * @param parentTracer The tracer that created this Span\n * @param name The name of the span.\n * @param context The SpanContext this span belongs to\n * @param kind The SpanKind of this Span\n * @param parentSpanId The identifier of the parent Span\n * @param startTime The startTime of the event (defaults to now)\n */\n function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) {\n if (startTime === void 0) { startTime = Date.now(); }\n var _this = _super.call(this) || this;\n _this._tracer = parentTracer;\n _this.name = name;\n _this.kind = kind;\n _this.startTime = startTime;\n _this.parentSpanId = parentSpanId;\n _this.status = {\n code: api.CanonicalCode.OK\n };\n _this.endCalled = false;\n _this._context = context;\n _this.attributes = {};\n return _this;\n }\n /**\n * Returns the Tracer that created this Span\n */\n TestSpan.prototype.tracer = function () {\n return this._tracer;\n };\n /**\n * Returns the SpanContext associated with this Span.\n */\n TestSpan.prototype.context = function () {\n return this._context;\n };\n /**\n * Marks the end of Span execution.\n * @param _endTime The time to use as the Span's end time. Defaults to\n * the current time.\n */\n TestSpan.prototype.end = function (_endTime) {\n this.endCalled = true;\n };\n /**\n * Sets a status on the span. Overrides the default of CanonicalCode.OK.\n * @param status The status to set.\n */\n TestSpan.prototype.setStatus = function (status) {\n this.status = status;\n return this;\n };\n /**\n * Returns whether this span will be recorded\n */\n TestSpan.prototype.isRecording = function () {\n return true;\n };\n /**\n * Sets an attribute on the Span\n * @param key the attribute key\n * @param value the attribute value\n */\n TestSpan.prototype.setAttribute = function (key, value) {\n this.attributes[key] = value;\n return this;\n };\n /**\n * Sets attributes on the Span\n * @param attributes the attributes to add\n */\n TestSpan.prototype.setAttributes = function (attributes) {\n for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) {\n var key = _a[_i];\n this.attributes[key] = attributes[key];\n }\n return this;\n };\n return TestSpan;\n}(NoOpSpan));\n\n// Copyright (c) Microsoft Corporation.\n/**\n * A mock tracer useful for testing\n */\nvar TestTracer = /** @class */ (function (_super) {\n tslib.__extends(TestTracer, _super);\n function TestTracer() {\n var _this = _super !== null && _super.apply(this, arguments) || this;\n _this.traceIdCounter = 0;\n _this.spanIdCounter = 0;\n _this.rootSpans = [];\n _this.knownSpans = [];\n return _this;\n }\n TestTracer.prototype.getNextTraceId = function () {\n this.traceIdCounter++;\n return String(this.traceIdCounter);\n };\n TestTracer.prototype.getNextSpanId = function () {\n this.spanIdCounter++;\n return String(this.spanIdCounter);\n };\n /**\n * Returns all Spans that were created without a parent\n */\n TestTracer.prototype.getRootSpans = function () {\n return this.rootSpans;\n };\n /**\n * Returns all Spans this Tracer knows about\n */\n TestTracer.prototype.getKnownSpans = function () {\n return this.knownSpans;\n };\n /**\n * Returns all Spans where end() has not been called\n */\n TestTracer.prototype.getActiveSpans = function () {\n return this.knownSpans.filter(function (span) {\n return !span.endCalled;\n });\n };\n /**\n * Return all Spans for a particular trace, grouped by their\n * parent Span in a tree-like structure\n * @param traceId The traceId to return the graph for\n */\n TestTracer.prototype.getSpanGraph = function (traceId) {\n var traceSpans = this.knownSpans.filter(function (span) {\n return span.context().traceId === traceId;\n });\n var roots = [];\n var nodeMap = new Map();\n for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) {\n var span = traceSpans_1[_i];\n var spanId = span.context().spanId;\n var node = {\n name: span.name,\n children: []\n };\n nodeMap.set(spanId, node);\n if (span.parentSpanId) {\n var parent = nodeMap.get(span.parentSpanId);\n if (!parent) {\n throw new Error(\"Span with name \" + node.name + \" has an unknown parentSpan with id \" + span.parentSpanId);\n }\n parent.children.push(node);\n }\n else {\n roots.push(node);\n }\n }\n return {\n roots: roots\n };\n };\n /**\n * Starts a new Span.\n * @param name The name of the span.\n * @param options The SpanOptions used during Span creation.\n */\n TestTracer.prototype.startSpan = function (name, options) {\n if (options === void 0) { options = {}; }\n var parentContext = this._getParentContext(options);\n var traceId;\n var isRootSpan = false;\n if (parentContext && parentContext.traceId) {\n traceId = parentContext.traceId;\n }\n else {\n traceId = this.getNextTraceId();\n isRootSpan = true;\n }\n var context = {\n traceId: traceId,\n spanId: this.getNextSpanId(),\n traceFlags: api.TraceFlags.NONE\n };\n var span = new TestSpan(this, name, context, options.kind || api.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options.startTime);\n this.knownSpans.push(span);\n if (isRootSpan) {\n this.rootSpans.push(span);\n }\n return span;\n };\n TestTracer.prototype._getParentContext = function (options) {\n var parent = options.parent;\n var result;\n if (parent) {\n if (\"traceId\" in parent) {\n result = parent;\n }\n else {\n result = parent.context();\n }\n }\n return result;\n };\n return TestTracer;\n}(NoOpTracer));\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar VERSION = \"00\";\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nfunction extractSpanContextFromTraceParentHeader(traceParentHeader) {\n var parts = traceParentHeader.split(\"-\");\n if (parts.length !== 4) {\n return;\n }\n var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3];\n if (version !== VERSION) {\n return;\n }\n var traceFlags = parseInt(traceOptions, 16);\n var spanContext = {\n spanId: spanId,\n traceId: traceId,\n traceFlags: traceFlags\n };\n return spanContext;\n}\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nfunction getTraceParentHeader(spanContext) {\n var missingFields = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n if (missingFields.length) {\n return;\n }\n var flags = spanContext.traceFlags || 0 /* NONE */;\n var hexFlags = flags.toString(16);\n var traceFlags = hexFlags.length === 1 ? \"0\" + hexFlags : hexFlags;\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return VERSION + \"-\" + spanContext.traceId + \"-\" + spanContext.spanId + \"-\" + traceFlags;\n}\n\nexports.NoOpSpan = NoOpSpan;\nexports.NoOpTracer = NoOpTracer;\nexports.OpenCensusSpanWrapper = OpenCensusSpanWrapper;\nexports.OpenCensusTracerWrapper = OpenCensusTracerWrapper;\nexports.TestSpan = TestSpan;\nexports.TestTracer = TestTracer;\nexports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader;\nexports.getTraceParentHeader = getTraceParentHeader;\nexports.getTracer = getTracer;\nexports.setTracer = setTracer;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar tslib = require('tslib');\nvar util = _interopDefault(require('util'));\nvar os = require('os');\n\n// Copyright (c) Microsoft Corporation.\nfunction log(message) {\n var args = [];\n for (var _i = 1; _i < arguments.length; _i++) {\n args[_i - 1] = arguments[_i];\n }\n process.stderr.write(\"\" + util.format.apply(util, tslib.__spread([message], args)) + os.EOL);\n}\n\n// Copyright (c) Microsoft Corporation.\nvar debugEnvVariable = (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\nvar enabledString;\nvar enabledNamespaces = [];\nvar skippedNamespaces = [];\nvar debuggers = [];\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\nvar debugObj = Object.assign(function (namespace) {\n return createDebugger(namespace);\n}, {\n enable: enable,\n enabled: enabled,\n disable: disable,\n log: log\n});\nfunction enable(namespaces) {\n var e_1, _a, e_2, _b;\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n var wildcard = /\\*/g;\n var namespaceList = namespaces.split(\",\").map(function (ns) { return ns.trim().replace(wildcard, \".*?\"); });\n try {\n for (var namespaceList_1 = tslib.__values(namespaceList), namespaceList_1_1 = namespaceList_1.next(); !namespaceList_1_1.done; namespaceList_1_1 = namespaceList_1.next()) {\n var ns = namespaceList_1_1.value;\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(\"^\" + ns.substr(1) + \"$\"));\n }\n else {\n enabledNamespaces.push(new RegExp(\"^\" + ns + \"$\"));\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (namespaceList_1_1 && !namespaceList_1_1.done && (_a = namespaceList_1.return)) _a.call(namespaceList_1);\n }\n finally { if (e_1) throw e_1.error; }\n }\n try {\n for (var debuggers_1 = tslib.__values(debuggers), debuggers_1_1 = debuggers_1.next(); !debuggers_1_1.done; debuggers_1_1 = debuggers_1.next()) {\n var instance = debuggers_1_1.value;\n instance.enabled = enabled(instance.namespace);\n }\n }\n catch (e_2_1) { e_2 = { error: e_2_1 }; }\n finally {\n try {\n if (debuggers_1_1 && !debuggers_1_1.done && (_b = debuggers_1.return)) _b.call(debuggers_1);\n }\n finally { if (e_2) throw e_2.error; }\n }\n}\nfunction enabled(namespace) {\n var e_3, _a, e_4, _b;\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n try {\n for (var skippedNamespaces_1 = tslib.__values(skippedNamespaces), skippedNamespaces_1_1 = skippedNamespaces_1.next(); !skippedNamespaces_1_1.done; skippedNamespaces_1_1 = skippedNamespaces_1.next()) {\n var skipped = skippedNamespaces_1_1.value;\n if (skipped.test(namespace)) {\n return false;\n }\n }\n }\n catch (e_3_1) { e_3 = { error: e_3_1 }; }\n finally {\n try {\n if (skippedNamespaces_1_1 && !skippedNamespaces_1_1.done && (_a = skippedNamespaces_1.return)) _a.call(skippedNamespaces_1);\n }\n finally { if (e_3) throw e_3.error; }\n }\n try {\n for (var enabledNamespaces_1 = tslib.__values(enabledNamespaces), enabledNamespaces_1_1 = enabledNamespaces_1.next(); !enabledNamespaces_1_1.done; enabledNamespaces_1_1 = enabledNamespaces_1.next()) {\n var enabledNamespace = enabledNamespaces_1_1.value;\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n }\n catch (e_4_1) { e_4 = { error: e_4_1 }; }\n finally {\n try {\n if (enabledNamespaces_1_1 && !enabledNamespaces_1_1.done && (_b = enabledNamespaces_1.return)) _b.call(enabledNamespaces_1);\n }\n finally { if (e_4) throw e_4.error; }\n }\n return false;\n}\nfunction disable() {\n var result = enabledString || \"\";\n enable(\"\");\n return result;\n}\nfunction createDebugger(namespace) {\n var newDebugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy: destroy,\n log: debugObj.log,\n namespace: namespace,\n extend: extend\n });\n function debug() {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = namespace + \" \" + args[0];\n }\n newDebugger.log.apply(newDebugger, tslib.__spread(args));\n }\n debuggers.push(newDebugger);\n return newDebugger;\n}\nfunction destroy() {\n var index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\nfunction extend(namespace) {\n var newDebugger = createDebugger(this.namespace + \":\" + namespace);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar registeredLoggers = new Set();\nvar logLevelFromEnv = (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\nvar azureLogLevel;\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nvar AzureLogger = debugObj(\"azure\");\nAzureLogger.log = function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n debugObj.log.apply(debugObj, tslib.__spread(args));\n};\nvar AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n }\n else {\n console.error(\"AZURE_LOG_LEVEL set to unknown log level '\" + logLevelFromEnv + \"'; logging is not enabled. Acceptable values: \" + AZURE_LOG_LEVELS.join(\", \") + \".\");\n }\n}\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nfunction setLogLevel(level) {\n var e_1, _a;\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\"Unknown log level '\" + level + \"'. Acceptable values: \" + AZURE_LOG_LEVELS.join(\",\"));\n }\n azureLogLevel = level;\n var enabledNamespaces = [];\n try {\n for (var registeredLoggers_1 = tslib.__values(registeredLoggers), registeredLoggers_1_1 = registeredLoggers_1.next(); !registeredLoggers_1_1.done; registeredLoggers_1_1 = registeredLoggers_1.next()) {\n var logger = registeredLoggers_1_1.value;\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (registeredLoggers_1_1 && !registeredLoggers_1_1.done && (_a = registeredLoggers_1.return)) _a.call(registeredLoggers_1);\n }\n finally { if (e_1) throw e_1.error; }\n }\n debugObj.enable(enabledNamespaces.join(\",\"));\n}\n/**\n * Retrieves the currently specified log level.\n */\nfunction getLogLevel() {\n return azureLogLevel;\n}\nvar levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nfunction createClientLogger(namespace) {\n var clientRootLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\nfunction patchLogMethod(parent, child) {\n child.log = function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n parent.log.apply(parent, tslib.__spread(args));\n };\n}\nfunction createLogger(parent, level) {\n var logger = Object.assign(parent.extend(level), {\n level: level\n });\n patchLogMethod(parent, logger);\n if (shouldEnable(logger)) {\n var enabledNamespaces = debugObj.disable();\n debugObj.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n registeredLoggers.add(logger);\n return logger;\n}\nfunction shouldEnable(logger) {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n }\n else {\n return false;\n }\n}\nfunction isAzureLogLevel(logLevel) {\n return AZURE_LOG_LEVELS.includes(logLevel);\n}\n\nexports.AzureLogger = AzureLogger;\nexports.createClientLogger = createClientLogger;\nexports.getLogLevel = getLogLevel;\nexports.setLogLevel = setLogLevel;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar coreHttp = require('@azure/core-http');\nvar tslib = require('tslib');\nvar api = require('@opentelemetry/api');\nvar logger$1 = require('@azure/logger');\nvar abortController = require('@azure/abort-controller');\nvar os = require('os');\nvar stream = require('stream');\nrequire('@azure/core-paging');\nvar crypto = require('crypto');\nvar coreLro = require('@azure/core-lro');\nvar events = require('events');\nvar coreTracing = require('@azure/core-tracing');\nvar fs = require('fs');\nvar util = require('util');\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\nvar KeyInfo = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry\",\n required: true,\n serializedName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar UserDelegationKey = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n xmlName: \"SignedOid\",\n required: true,\n serializedName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n xmlName: \"SignedTid\",\n required: true,\n serializedName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n xmlName: \"SignedStart\",\n required: true,\n serializedName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n xmlName: \"SignedExpiry\",\n required: true,\n serializedName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n xmlName: \"SignedService\",\n required: true,\n serializedName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n xmlName: \"SignedVersion\",\n required: true,\n serializedName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n xmlName: \"Value\",\n required: true,\n serializedName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar StorageError = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n xmlName: \"Message\",\n serializedName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n xmlName: \"Code\",\n serializedName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar DataLakeStorageErrorError = {\n serializedName: \"DataLakeStorageError_error\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageErrorError\",\n modelProperties: {\n code: {\n xmlName: \"Code\",\n serializedName: \"Code\",\n type: {\n name: \"String\"\n }\n },\n message: {\n xmlName: \"Message\",\n serializedName: \"Message\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar DataLakeStorageError = {\n serializedName: \"DataLakeStorageError\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageError\",\n modelProperties: {\n dataLakeStorageErrorDetails: {\n xmlName: \"error\",\n serializedName: \"error\",\n type: {\n name: \"Composite\",\n className: \"DataLakeStorageErrorError\"\n }\n }\n }\n }\n};\nvar AccessPolicy = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n xmlName: \"Start\",\n serializedName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry\",\n serializedName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n xmlName: \"Permission\",\n serializedName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobPropertiesInternal = {\n xmlName: \"Properties\",\n serializedName: \"BlobPropertiesInternal\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n xmlName: \"Creation-Time\",\n serializedName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n xmlName: \"Last-Modified\",\n required: true,\n serializedName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n xmlName: \"Etag\",\n required: true,\n serializedName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n xmlName: \"Content-Length\",\n serializedName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n xmlName: \"Content-Type\",\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n xmlName: \"Content-Encoding\",\n serializedName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n xmlName: \"Content-Language\",\n serializedName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n xmlName: \"Content-MD5\",\n serializedName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n xmlName: \"Content-Disposition\",\n serializedName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n xmlName: \"Cache-Control\",\n serializedName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n xmlName: \"x-ms-blob-sequence-number\",\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n xmlName: \"BlobType\",\n serializedName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n leaseStatus: {\n xmlName: \"LeaseStatus\",\n serializedName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n leaseState: {\n xmlName: \"LeaseState\",\n serializedName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n xmlName: \"LeaseDuration\",\n serializedName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n copyId: {\n xmlName: \"CopyId\",\n serializedName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n xmlName: \"CopyStatus\",\n serializedName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n copySource: {\n xmlName: \"CopySource\",\n serializedName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n xmlName: \"CopyProgress\",\n serializedName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n xmlName: \"CopyCompletionTime\",\n serializedName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n xmlName: \"CopyStatusDescription\",\n serializedName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n xmlName: \"ServerEncrypted\",\n serializedName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n xmlName: \"IncrementalCopy\",\n serializedName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n xmlName: \"DestinationSnapshot\",\n serializedName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n xmlName: \"DeletedTime\",\n serializedName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n xmlName: \"RemainingRetentionDays\",\n serializedName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n xmlName: \"AccessTier\",\n serializedName: \"AccessTier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n xmlName: \"AccessTierInferred\",\n serializedName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n xmlName: \"ArchiveStatus\",\n serializedName: \"ArchiveStatus\",\n type: {\n name: \"String\"\n }\n },\n customerProvidedKeySha256: {\n xmlName: \"CustomerProvidedKeySha256\",\n serializedName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n xmlName: \"EncryptionScope\",\n serializedName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n xmlName: \"AccessTierChangeTime\",\n serializedName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n xmlName: \"TagCount\",\n serializedName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n xmlName: \"Expiry-Time\",\n serializedName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n xmlName: \"Sealed\",\n serializedName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n xmlName: \"RehydratePriority\",\n serializedName: \"RehydratePriority\",\n type: {\n name: \"String\"\n }\n },\n lastAccessedOn: {\n xmlName: \"LastAccessTime\",\n serializedName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar BlobTag = {\n xmlName: \"Tag\",\n serializedName: \"BlobTag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n xmlName: \"Key\",\n required: true,\n serializedName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n xmlName: \"Value\",\n required: true,\n serializedName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobTags = {\n xmlName: \"Tags\",\n serializedName: \"BlobTags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n xmlIsWrapped: true,\n xmlName: \"TagSet\",\n xmlElementName: \"Tag\",\n required: true,\n serializedName: \"BlobTagSet\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\nvar BlobItemInternal = {\n xmlName: \"Blob\",\n serializedName: \"BlobItemInternal\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n xmlName: \"Deleted\",\n required: true,\n serializedName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n xmlName: \"Snapshot\",\n required: true,\n serializedName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n xmlName: \"VersionId\",\n serializedName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n xmlName: \"IsCurrentVersion\",\n serializedName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n xmlName: \"Properties\",\n required: true,\n serializedName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n xmlName: \"Metadata\",\n serializedName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n blobTags: {\n xmlName: \"Tags\",\n serializedName: \"BlobTags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n xmlName: \"OrMetadata\",\n serializedName: \"ObjectReplicationMetadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar BlobFlatListSegment = {\n xmlName: \"Blobs\",\n serializedName: \"BlobFlatListSegment\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"BlobItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\nvar ListBlobsFlatSegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListBlobsFlatSegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlIsAttribute: true,\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n xmlName: \"Blobs\",\n required: true,\n serializedName: \"Segment\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobPrefix = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobHierarchyListSegment = {\n xmlName: \"Blobs\",\n serializedName: \"BlobHierarchyListSegment\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n serializedName: \"BlobPrefixes\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"BlobItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\nvar ListBlobsHierarchySegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlIsAttribute: true,\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n xmlName: \"Delimiter\",\n serializedName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n xmlName: \"Blobs\",\n required: true,\n serializedName: \"Segment\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar Block = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n xmlName: \"Size\",\n required: true,\n serializedName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar BlockList = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n xmlIsWrapped: true,\n xmlName: \"CommittedBlocks\",\n xmlElementName: \"Block\",\n serializedName: \"CommittedBlocks\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n xmlIsWrapped: true,\n xmlName: \"UncommittedBlocks\",\n xmlElementName: \"Block\",\n serializedName: \"UncommittedBlocks\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\nvar BlockLookupList = {\n xmlName: \"BlockList\",\n serializedName: \"BlockLookupList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n serializedName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n serializedName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n serializedName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar ContainerProperties = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n xmlName: \"Last-Modified\",\n required: true,\n serializedName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n xmlName: \"Etag\",\n required: true,\n serializedName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n xmlName: \"LeaseStatus\",\n serializedName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n leaseState: {\n xmlName: \"LeaseState\",\n serializedName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n xmlName: \"LeaseDuration\",\n serializedName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n publicAccess: {\n xmlName: \"PublicAccess\",\n serializedName: \"PublicAccess\",\n type: {\n name: \"String\"\n }\n },\n hasImmutabilityPolicy: {\n xmlName: \"HasImmutabilityPolicy\",\n serializedName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n xmlName: \"HasLegalHold\",\n serializedName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n xmlName: \"DefaultEncryptionScope\",\n serializedName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n xmlName: \"DenyEncryptionScopeOverride\",\n serializedName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n xmlName: \"DeletedTime\",\n serializedName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n xmlName: \"RemainingRetentionDays\",\n serializedName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ContainerItem = {\n xmlName: \"Container\",\n serializedName: \"ContainerItem\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n xmlName: \"Deleted\",\n serializedName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n xmlName: \"Version\",\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n xmlName: \"Properties\",\n required: true,\n serializedName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n xmlName: \"Metadata\",\n serializedName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\nvar DelimitedTextConfiguration = {\n serializedName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n xmlName: \"ColumnSeparator\",\n required: true,\n serializedName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n xmlName: \"FieldQuote\",\n required: true,\n serializedName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n xmlName: \"RecordSeparator\",\n required: true,\n serializedName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n xmlName: \"EscapeChar\",\n required: true,\n serializedName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n xmlName: \"HasHeaders\",\n required: true,\n serializedName: \"HeadersPresent\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\nvar JsonTextConfiguration = {\n serializedName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n xmlName: \"RecordSeparator\",\n required: true,\n serializedName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ArrowField = {\n xmlName: \"Field\",\n serializedName: \"ArrowField\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n xmlName: \"Type\",\n required: true,\n serializedName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n xmlName: \"Name\",\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n xmlName: \"Precision\",\n serializedName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n xmlName: \"Scale\",\n serializedName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ArrowConfiguration = {\n serializedName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n xmlIsWrapped: true,\n xmlName: \"Schema\",\n xmlElementName: \"Field\",\n required: true,\n serializedName: \"Schema\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\nvar ListContainersSegmentResponse = {\n xmlName: \"EnumerationResults\",\n serializedName: \"ListContainersSegmentResponse\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n prefix: {\n xmlName: \"Prefix\",\n serializedName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n xmlName: \"Marker\",\n serializedName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n xmlName: \"MaxResults\",\n serializedName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n xmlIsWrapped: true,\n xmlName: \"Containers\",\n xmlElementName: \"Container\",\n required: true,\n serializedName: \"ContainerItems\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar CorsRule = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n xmlName: \"AllowedOrigins\",\n required: true,\n serializedName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n xmlName: \"AllowedMethods\",\n required: true,\n serializedName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n xmlName: \"AllowedHeaders\",\n required: true,\n serializedName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n xmlName: \"ExposedHeaders\",\n required: true,\n serializedName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n xmlName: \"MaxAgeInSeconds\",\n required: true,\n serializedName: \"MaxAgeInSeconds\",\n constraints: {\n InclusiveMinimum: 0\n },\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar FilterBlobItem = {\n xmlName: \"Blob\",\n serializedName: \"FilterBlobItem\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n xmlName: \"Name\",\n required: true,\n serializedName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n xmlName: \"ContainerName\",\n required: true,\n serializedName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n xmlName: \"Tags\",\n serializedName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\nvar FilterBlobSegment = {\n xmlName: \"EnumerationResults\",\n serializedName: \"FilterBlobSegment\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n xmlIsAttribute: true,\n xmlName: \"ServiceEndpoint\",\n required: true,\n serializedName: \"ServiceEndpoint\",\n type: {\n name: \"String\"\n }\n },\n where: {\n xmlName: \"Where\",\n required: true,\n serializedName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n xmlIsWrapped: true,\n xmlName: \"Blobs\",\n xmlElementName: \"Blob\",\n required: true,\n serializedName: \"Blobs\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n xmlName: \"NextMarker\",\n serializedName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar GeoReplication = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n xmlName: \"Status\",\n required: true,\n serializedName: \"Status\",\n type: {\n name: \"String\"\n }\n },\n lastSyncOn: {\n xmlName: \"LastSyncTime\",\n required: true,\n serializedName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar RetentionPolicy = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n xmlName: \"Days\",\n serializedName: \"Days\",\n constraints: {\n InclusiveMinimum: 1\n },\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar Logging = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n xmlName: \"Version\",\n required: true,\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n xmlName: \"Delete\",\n required: true,\n serializedName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n xmlName: \"Read\",\n required: true,\n serializedName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n xmlName: \"Write\",\n required: true,\n serializedName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n xmlName: \"RetentionPolicy\",\n required: true,\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\nvar Metrics = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n xmlName: \"Version\",\n serializedName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n xmlName: \"IncludeAPIs\",\n serializedName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n xmlName: \"RetentionPolicy\",\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\nvar PageRange = {\n serializedName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n xmlName: \"End\",\n required: true,\n serializedName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar ClearRange = {\n serializedName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n xmlName: \"Start\",\n required: true,\n serializedName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n xmlName: \"End\",\n required: true,\n serializedName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\nvar PageList = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n serializedName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n serializedName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n }\n }\n }\n};\nvar QueryFormat = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n xmlName: \"Type\",\n serializedName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"delimited\",\n \"json\",\n \"arrow\"\n ]\n }\n },\n delimitedTextConfiguration: {\n xmlName: \"DelimitedTextConfiguration\",\n serializedName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n xmlName: \"JsonTextConfiguration\",\n serializedName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n xmlName: \"ArrowConfiguration\",\n serializedName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n }\n }\n }\n};\nvar QuerySerialization = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n xmlName: \"Format\",\n required: true,\n serializedName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\nvar QueryRequest = {\n serializedName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n xmlName: \"QueryType\",\n required: true,\n isConstant: true,\n serializedName: \"QueryType\",\n defaultValue: 'SQL',\n type: {\n name: \"String\"\n }\n },\n expression: {\n xmlName: \"Expression\",\n required: true,\n serializedName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n xmlName: \"InputSerialization\",\n serializedName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n xmlName: \"OutputSerialization\",\n serializedName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\nvar SignedIdentifier = {\n serializedName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n xmlName: \"Id\",\n required: true,\n serializedName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n xmlName: \"AccessPolicy\",\n required: true,\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\nvar StaticWebsite = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n xmlName: \"Enabled\",\n required: true,\n serializedName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n xmlName: \"IndexDocument\",\n serializedName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n xmlName: \"ErrorDocument404Path\",\n serializedName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n xmlName: \"DefaultIndexDocumentPath\",\n serializedName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobServiceProperties = {\n xmlName: \"StorageServiceProperties\",\n serializedName: \"BlobServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n xmlName: \"Logging\",\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n xmlName: \"HourMetrics\",\n serializedName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n xmlName: \"MinuteMetrics\",\n serializedName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n xmlIsWrapped: true,\n xmlName: \"Cors\",\n xmlElementName: \"CorsRule\",\n serializedName: \"Cors\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n xmlName: \"DefaultServiceVersion\",\n serializedName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n xmlName: \"DeleteRetentionPolicy\",\n serializedName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n xmlName: \"StaticWebsite\",\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\nvar BlobServiceStatistics = {\n xmlName: \"StorageServiceStats\",\n serializedName: \"BlobServiceStatistics\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n xmlName: \"GeoReplication\",\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\nvar ServiceSetPropertiesHeaders = {\n serializedName: \"service-setproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetPropertiesHeaders = {\n serializedName: \"service-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetStatisticsHeaders = {\n serializedName: \"service-getstatistics-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceListContainersSegmentHeaders = {\n serializedName: \"service-listcontainerssegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetUserDelegationKeyHeaders = {\n serializedName: \"service-getuserdelegationkey-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceGetAccountInfoHeaders = {\n serializedName: \"service-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceSubmitBatchHeaders = {\n serializedName: \"service-submitbatch-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ServiceFilterBlobsHeaders = {\n serializedName: \"service-filterblobs-headers\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerCreateHeaders = {\n serializedName: \"container-create-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetPropertiesHeaders = {\n serializedName: \"container-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerDeleteHeaders = {\n serializedName: \"container-delete-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerSetMetadataHeaders = {\n serializedName: \"container-setmetadata-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetAccessPolicyHeaders = {\n serializedName: \"container-getaccesspolicy-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerSetAccessPolicyHeaders = {\n serializedName: \"container-setaccesspolicy-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerRestoreHeaders = {\n serializedName: \"container-restore-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerAcquireLeaseHeaders = {\n serializedName: \"container-acquirelease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerReleaseLeaseHeaders = {\n serializedName: \"container-releaselease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerRenewLeaseHeaders = {\n serializedName: \"container-renewlease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerBreakLeaseHeaders = {\n serializedName: \"container-breaklease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerChangeLeaseHeaders = {\n serializedName: \"container-changelease-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerListBlobFlatSegmentHeaders = {\n serializedName: \"container-listblobflatsegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerListBlobHierarchySegmentHeaders = {\n serializedName: \"container-listblobhierarchysegment-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar ContainerGetAccountInfoHeaders = {\n serializedName: \"container-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobDownloadHeaders = {\n serializedName: \"blob-download-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetPropertiesHeaders = {\n serializedName: \"blob-getproperties-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"String\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobDeleteHeaders = {\n serializedName: \"blob-delete-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetAccessControlHeaders = {\n serializedName: \"blob-setaccesscontrol-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetAccessControlHeaders\",\n modelProperties: {\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetAccessControlHeaders = {\n serializedName: \"blob-getaccesscontrol-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccessControlHeaders\",\n modelProperties: {\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsOwner: {\n serializedName: \"x-ms-owner\",\n type: {\n name: \"String\"\n }\n },\n xMsGroup: {\n serializedName: \"x-ms-group\",\n type: {\n name: \"String\"\n }\n },\n xMsPermissions: {\n serializedName: \"x-ms-permissions\",\n type: {\n name: \"String\"\n }\n },\n xMsAcl: {\n serializedName: \"x-ms-acl\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobRenameHeaders = {\n serializedName: \"blob-rename-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobRenameHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\nvar PageBlobCreateHeaders = {\n serializedName: \"pageblob-create-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobCreateHeaders = {\n serializedName: \"appendblob-create-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobUploadHeaders = {\n serializedName: \"blockblob-upload-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobPutBlobFromUrlHeaders = {\n serializedName: \"blockblob-putblobfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobUndeleteHeaders = {\n serializedName: \"blob-undelete-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetExpiryHeaders = {\n serializedName: \"blob-setexpiry-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetHTTPHeadersHeaders = {\n serializedName: \"blob-sethttpheaders-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHTTPHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetMetadataHeaders = {\n serializedName: \"blob-setmetadata-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobAcquireLeaseHeaders = {\n serializedName: \"blob-acquirelease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobReleaseLeaseHeaders = {\n serializedName: \"blob-releaselease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobRenewLeaseHeaders = {\n serializedName: \"blob-renewlease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobChangeLeaseHeaders = {\n serializedName: \"blob-changelease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobBreakLeaseHeaders = {\n serializedName: \"blob-breaklease-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobCreateSnapshotHeaders = {\n serializedName: \"blob-createsnapshot-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobStartCopyFromURLHeaders = {\n serializedName: \"blob-startcopyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobCopyFromURLHeaders = {\n serializedName: \"blob-copyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"success\"\n ]\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobAbortCopyFromURLHeaders = {\n serializedName: \"blob-abortcopyfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetTierHeaders = {\n serializedName: \"blob-settier-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetAccountInfoHeaders = {\n serializedName: \"blob-getaccountinfo-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobStageBlockHeaders = {\n serializedName: \"blockblob-stageblock-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobStageBlockFromURLHeaders = {\n serializedName: \"blockblob-stageblockfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobCommitBlockListHeaders = {\n serializedName: \"blockblob-commitblocklist-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlockBlobGetBlockListHeaders = {\n serializedName: \"blockblob-getblocklist-headers\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUploadPagesHeaders = {\n serializedName: \"pageblob-uploadpages-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobClearPagesHeaders = {\n serializedName: \"pageblob-clearpages-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUploadPagesFromURLHeaders = {\n serializedName: \"pageblob-uploadpagesfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobGetPageRangesHeaders = {\n serializedName: \"pageblob-getpageranges-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobGetPageRangesDiffHeaders = {\n serializedName: \"pageblob-getpagerangesdiff-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobResizeHeaders = {\n serializedName: \"pageblob-resize-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobUpdateSequenceNumberHeaders = {\n serializedName: \"pageblob-updatesequencenumber-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar PageBlobCopyIncrementalHeaders = {\n serializedName: \"pageblob-copyincremental-headers\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobAppendBlockHeaders = {\n serializedName: \"appendblob-appendblock-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobAppendBlockFromUrlHeaders = {\n serializedName: \"appendblob-appendblockfromurl-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar AppendBlobSealHeaders = {\n serializedName: \"appendblob-seal-headers\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobQueryHeaders = {\n serializedName: \"blob-query-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"BlockBlob\",\n \"PageBlob\",\n \"AppendBlob\"\n ]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"pending\",\n \"success\",\n \"aborted\",\n \"failed\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"infinite\",\n \"fixed\"\n ]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"locked\",\n \"unlocked\"\n ]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobGetTagsHeaders = {\n serializedName: \"blob-gettags-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\nvar BlobSetTagsHeaders = {\n serializedName: \"blob-settags-headers\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers = /*#__PURE__*/Object.freeze({\n __proto__: null,\n BlobServiceProperties: BlobServiceProperties,\n BlobServiceStatistics: BlobServiceStatistics,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n ContainerItem: ContainerItem,\n ContainerProperties: ContainerProperties,\n CorsRule: CorsRule,\n FilterBlobItem: FilterBlobItem,\n FilterBlobSegment: FilterBlobSegment,\n GeoReplication: GeoReplication,\n KeyInfo: KeyInfo,\n ListContainersSegmentResponse: ListContainersSegmentResponse,\n Logging: Logging,\n Metrics: Metrics,\n RetentionPolicy: RetentionPolicy,\n ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders,\n ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders,\n ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders,\n ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders,\n ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders,\n ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders,\n ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders,\n StaticWebsite: StaticWebsite,\n StorageError: StorageError,\n UserDelegationKey: UserDelegationKey\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\nvar access = {\n parameterPath: [\n \"options\",\n \"access\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n type: {\n name: \"String\"\n }\n }\n};\nvar action0 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'acquire',\n type: {\n name: \"String\"\n }\n }\n};\nvar action1 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'release',\n type: {\n name: \"String\"\n }\n }\n};\nvar action2 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'renew',\n type: {\n name: \"String\"\n }\n }\n};\nvar action3 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'break',\n type: {\n name: \"String\"\n }\n }\n};\nvar action4 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n defaultValue: 'change',\n type: {\n name: \"String\"\n }\n }\n};\nvar action5 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"action\",\n defaultValue: 'setAccessControl',\n type: {\n name: \"String\"\n }\n }\n};\nvar action6 = {\n parameterPath: \"action\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"action\",\n defaultValue: 'getAccessControl',\n type: {\n name: \"String\"\n }\n }\n};\nvar appendPosition = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobCacheControl = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobCacheControl\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentDisposition = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentDisposition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentEncoding = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentEncoding\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentLanguage = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentLanguage\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobContentLength = {\n parameterPath: \"blobContentLength\",\n mapper: {\n required: true,\n serializedName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobContentMD5 = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar blobContentType = {\n parameterPath: [\n \"options\",\n \"blobHTTPHeaders\",\n \"blobContentType\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobDeleteType = {\n parameterPath: [\n \"options\",\n \"blobDeleteType\"\n ],\n mapper: {\n serializedName: \"deletetype\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Permanent\"\n ]\n }\n }\n};\nvar blobSequenceNumber = {\n parameterPath: [\n \"options\",\n \"blobSequenceNumber\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n defaultValue: 0,\n type: {\n name: \"Number\"\n }\n }\n};\nvar blobTagsString = {\n parameterPath: [\n \"options\",\n \"blobTagsString\"\n ],\n mapper: {\n serializedName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType0 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'PageBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType1 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'AppendBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blobType2 = {\n parameterPath: \"blobType\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n defaultValue: 'BlockBlob',\n type: {\n name: \"String\"\n }\n }\n};\nvar blockId = {\n parameterPath: \"blockId\",\n mapper: {\n required: true,\n serializedName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar breakPeriod = {\n parameterPath: [\n \"options\",\n \"breakPeriod\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar cacheControl = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"cacheControl\"\n ],\n mapper: {\n serializedName: \"x-ms-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\nvar comp0 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'properties',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp1 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'stats',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp10 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'expiry',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp11 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'snapshot',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp12 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'copy',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp13 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'tier',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp14 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'query',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp15 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'tags',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp16 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'page',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp17 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'pagelist',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp18 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'incrementalcopy',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp19 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'appendblock',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp2 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'list',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp20 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'seal',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp21 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'block',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp22 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'blocklist',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp3 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'userdelegationkey',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp4 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'batch',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp5 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'blobs',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp6 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'metadata',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp7 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'acl',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp8 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'undelete',\n type: {\n name: \"String\"\n }\n }\n};\nvar comp9 = {\n parameterPath: \"comp\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"comp\",\n defaultValue: 'lease',\n type: {\n name: \"String\"\n }\n }\n};\nvar contentDisposition = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentDisposition\"\n ],\n mapper: {\n serializedName: \"x-ms-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentEncoding = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentEncoding\"\n ],\n mapper: {\n serializedName: \"x-ms-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentLanguage = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentLanguage\"\n ],\n mapper: {\n serializedName: \"x-ms-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\nvar contentLength = {\n parameterPath: \"contentLength\",\n mapper: {\n required: true,\n serializedName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar contentType = {\n parameterPath: [\n \"options\",\n \"directoryHttpHeaders\",\n \"contentType\"\n ],\n mapper: {\n serializedName: \"x-ms-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copyActionAbortConstant = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n defaultValue: 'abort',\n type: {\n name: \"String\"\n }\n }\n};\nvar copyId = {\n parameterPath: \"copyId\",\n mapper: {\n required: true,\n serializedName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copySource = {\n parameterPath: \"copySource\",\n mapper: {\n required: true,\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar copySourceBlobProperties = {\n parameterPath: [\n \"options\",\n \"copySourceBlobProperties\"\n ],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar defaultEncryptionScope = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deletedContainerName = {\n parameterPath: [\n \"options\",\n \"deletedContainerName\"\n ],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deletedContainerVersion = {\n parameterPath: [\n \"options\",\n \"deletedContainerVersion\"\n ],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\nvar deleteSnapshots = {\n parameterPath: [\n \"options\",\n \"deleteSnapshots\"\n ],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"include\",\n \"only\"\n ]\n }\n }\n};\nvar delimiter = {\n parameterPath: \"delimiter\",\n mapper: {\n required: true,\n serializedName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\nvar directoryProperties = {\n parameterPath: [\n \"options\",\n \"directoryProperties\"\n ],\n mapper: {\n serializedName: \"x-ms-properties\",\n type: {\n name: \"String\"\n }\n }\n};\nvar duration = {\n parameterPath: [\n \"options\",\n \"duration\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar encryptionAlgorithm = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionAlgorithm\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"AES256\"\n ]\n }\n }\n};\nvar encryptionKey = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionKey\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\nvar encryptionKeySha256 = {\n parameterPath: [\n \"options\",\n \"cpkInfo\",\n \"encryptionKeySha256\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\nvar encryptionScope = {\n parameterPath: [\n \"options\",\n \"encryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\nvar expiresOn = {\n parameterPath: [\n \"options\",\n \"expiresOn\"\n ],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\nvar expiryOptions = {\n parameterPath: \"expiryOptions\",\n mapper: {\n required: true,\n serializedName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\nvar group = {\n parameterPath: [\n \"options\",\n \"group\"\n ],\n mapper: {\n serializedName: \"x-ms-group\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifMatch = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifMatch\"\n ],\n mapper: {\n serializedName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifModifiedSince = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifModifiedSince\"\n ],\n mapper: {\n serializedName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar ifNoneMatch = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifNoneMatch\"\n ],\n mapper: {\n serializedName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifSequenceNumberEqualTo = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifSequenceNumberLessThan = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifSequenceNumberLessThanOrEqualTo = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar ifTags = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifTags\"\n ],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar ifUnmodifiedSince = {\n parameterPath: [\n \"options\",\n \"modifiedAccessConditions\",\n \"ifUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar include0 = {\n parameterPath: [\n \"options\",\n \"include\"\n ],\n mapper: {\n serializedName: \"include\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"metadata\",\n \"deleted\"\n ]\n }\n }\n }\n },\n collectionFormat: coreHttp.QueryCollectionFormat.Csv\n};\nvar include1 = {\n parameterPath: [\n \"options\",\n \"include\"\n ],\n mapper: {\n serializedName: \"include\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\"\n ]\n }\n }\n }\n },\n collectionFormat: coreHttp.QueryCollectionFormat.Csv\n};\nvar leaseId0 = {\n parameterPath: [\n \"options\",\n \"leaseAccessConditions\",\n \"leaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar leaseId1 = {\n parameterPath: \"leaseId\",\n mapper: {\n required: true,\n serializedName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar listType = {\n parameterPath: \"listType\",\n mapper: {\n required: true,\n serializedName: \"blocklisttype\",\n defaultValue: 'committed',\n type: {\n name: \"Enum\",\n allowedValues: [\n \"committed\",\n \"uncommitted\",\n \"all\"\n ]\n }\n }\n};\nvar marker0 = {\n parameterPath: [\n \"options\",\n \"marker\"\n ],\n mapper: {\n serializedName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\nvar maxPageSize = {\n parameterPath: [\n \"options\",\n \"maxPageSize\"\n ],\n mapper: {\n serializedName: \"maxresults\",\n constraints: {\n InclusiveMinimum: 1\n },\n type: {\n name: \"Number\"\n }\n }\n};\nvar maxSize = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"maxSize\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\nvar metadata = {\n parameterPath: [\n \"options\",\n \"metadata\"\n ],\n mapper: {\n serializedName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: {\n type: {\n name: \"String\"\n }\n }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\nvar multipartContentType = {\n parameterPath: \"multipartContentType\",\n mapper: {\n required: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\nvar owner = {\n parameterPath: [\n \"options\",\n \"owner\"\n ],\n mapper: {\n serializedName: \"x-ms-owner\",\n type: {\n name: \"String\"\n }\n }\n};\nvar pageWrite0 = {\n parameterPath: \"pageWrite\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n defaultValue: 'update',\n type: {\n name: \"String\"\n }\n }\n};\nvar pageWrite1 = {\n parameterPath: \"pageWrite\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n defaultValue: 'clear',\n type: {\n name: \"String\"\n }\n }\n};\nvar pathRenameMode = {\n parameterPath: [\n \"options\",\n \"pathRenameMode\"\n ],\n mapper: {\n serializedName: \"mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"legacy\",\n \"posix\"\n ]\n }\n }\n};\nvar posixAcl = {\n parameterPath: [\n \"options\",\n \"posixAcl\"\n ],\n mapper: {\n serializedName: \"x-ms-acl\",\n type: {\n name: \"String\"\n }\n }\n};\nvar posixPermissions = {\n parameterPath: [\n \"options\",\n \"posixPermissions\"\n ],\n mapper: {\n serializedName: \"x-ms-permissions\",\n type: {\n name: \"String\"\n }\n }\n};\nvar posixUmask = {\n parameterPath: [\n \"options\",\n \"posixUmask\"\n ],\n mapper: {\n serializedName: \"x-ms-umask\",\n type: {\n name: \"String\"\n }\n }\n};\nvar prefix = {\n parameterPath: [\n \"options\",\n \"prefix\"\n ],\n mapper: {\n serializedName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\nvar preventEncryptionScopeOverride = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar prevsnapshot = {\n parameterPath: [\n \"options\",\n \"prevsnapshot\"\n ],\n mapper: {\n serializedName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\nvar prevSnapshotUrl = {\n parameterPath: [\n \"options\",\n \"prevSnapshotUrl\"\n ],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\nvar proposedLeaseId0 = {\n parameterPath: [\n \"options\",\n \"proposedLeaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar proposedLeaseId1 = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n required: true,\n serializedName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar range0 = {\n parameterPath: [\n \"options\",\n \"range\"\n ],\n mapper: {\n serializedName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar range1 = {\n parameterPath: \"range\",\n mapper: {\n required: true,\n serializedName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar rangeGetContentCRC64 = {\n parameterPath: [\n \"options\",\n \"rangeGetContentCRC64\"\n ],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar rangeGetContentMD5 = {\n parameterPath: [\n \"options\",\n \"rangeGetContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar rehydratePriority = {\n parameterPath: [\n \"options\",\n \"rehydratePriority\"\n ],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"String\"\n }\n }\n};\nvar renameSource = {\n parameterPath: \"renameSource\",\n mapper: {\n required: true,\n serializedName: \"x-ms-rename-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar requestId = {\n parameterPath: [\n \"options\",\n \"requestId\"\n ],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar restype0 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'service',\n type: {\n name: \"String\"\n }\n }\n};\nvar restype1 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'account',\n type: {\n name: \"String\"\n }\n }\n};\nvar restype2 = {\n parameterPath: \"restype\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"restype\",\n defaultValue: 'container',\n type: {\n name: \"String\"\n }\n }\n};\nvar sealBlob = {\n parameterPath: [\n \"options\",\n \"sealBlob\"\n ],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar sequenceNumberAction = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n required: true,\n serializedName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"max\",\n \"update\",\n \"increment\"\n ]\n }\n }\n};\nvar snapshot = {\n parameterPath: [\n \"options\",\n \"snapshot\"\n ],\n mapper: {\n serializedName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceContentCrc64 = {\n parameterPath: [\n \"options\",\n \"sourceContentCrc64\"\n ],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar sourceContentMD5 = {\n parameterPath: [\n \"options\",\n \"sourceContentMD5\"\n ],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar sourceIfMatch = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfModifiedSince = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar sourceIfNoneMatch = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfTags = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfTags\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceIfUnmodifiedSince = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\nvar sourceLeaseId = {\n parameterPath: [\n \"options\",\n \"sourceLeaseId\"\n ],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceRange0 = {\n parameterPath: \"sourceRange\",\n mapper: {\n required: true,\n serializedName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceRange1 = {\n parameterPath: [\n \"options\",\n \"sourceRange\"\n ],\n mapper: {\n serializedName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\nvar sourceUrl = {\n parameterPath: \"sourceUrl\",\n mapper: {\n required: true,\n serializedName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\nvar tier0 = {\n parameterPath: [\n \"options\",\n \"tier\"\n ],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n }\n};\nvar tier1 = {\n parameterPath: \"tier\",\n mapper: {\n required: true,\n serializedName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n }\n};\nvar timeoutInSeconds = {\n parameterPath: [\n \"options\",\n \"timeoutInSeconds\"\n ],\n mapper: {\n serializedName: \"timeout\",\n constraints: {\n InclusiveMinimum: 0\n },\n type: {\n name: \"Number\"\n }\n }\n};\nvar transactionalContentCrc64 = {\n parameterPath: [\n \"options\",\n \"transactionalContentCrc64\"\n ],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar transactionalContentMD5 = {\n parameterPath: [\n \"options\",\n \"transactionalContentMD5\"\n ],\n mapper: {\n serializedName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\nvar upn = {\n parameterPath: [\n \"options\",\n \"upn\"\n ],\n mapper: {\n serializedName: \"upn\",\n type: {\n name: \"Boolean\"\n }\n }\n};\nvar url = {\n parameterPath: \"url\",\n mapper: {\n required: true,\n serializedName: \"url\",\n defaultValue: '',\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\nvar version = {\n parameterPath: \"version\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-version\",\n defaultValue: '2020-04-08',\n type: {\n name: \"String\"\n }\n }\n};\nvar versionId = {\n parameterPath: [\n \"options\",\n \"versionId\"\n ],\n mapper: {\n serializedName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\nvar where = {\n parameterPath: [\n \"options\",\n \"where\"\n ],\n mapper: {\n serializedName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\nvar xMsRequiresSync = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n required: true,\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n defaultValue: 'true',\n type: {\n name: \"String\"\n }\n }\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Service. */\nvar Service = /** @class */ (function () {\n /**\n * Create a Service.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Service(client) {\n this.client = client;\n }\n Service.prototype.setProperties = function (blobServiceProperties, options, callback) {\n return this.client.sendOperationRequest({\n blobServiceProperties: blobServiceProperties,\n options: options\n }, setPropertiesOperationSpec, callback);\n };\n Service.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec, callback);\n };\n Service.prototype.getStatistics = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getStatisticsOperationSpec, callback);\n };\n Service.prototype.listContainersSegment = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, listContainersSegmentOperationSpec, callback);\n };\n Service.prototype.getUserDelegationKey = function (keyInfo, options, callback) {\n return this.client.sendOperationRequest({\n keyInfo: keyInfo,\n options: options\n }, getUserDelegationKeyOperationSpec, callback);\n };\n Service.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec, callback);\n };\n Service.prototype.submitBatch = function (body, contentLength, multipartContentType, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n multipartContentType: multipartContentType,\n options: options\n }, submitBatchOperationSpec, callback);\n };\n Service.prototype.filterBlobs = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, filterBlobsOperationSpec, callback);\n };\n return Service;\n}());\n// Operation Specifications\nvar serializer = new coreHttp.Serializer(Mappers, true);\nvar setPropertiesOperationSpec = {\n httpMethod: \"PUT\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp0\n ],\n headerParameters: [\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"blobServiceProperties\",\n mapper: tslib.__assign(tslib.__assign({}, BlobServiceProperties), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 202: {\n headersMapper: ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceSetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getPropertiesOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp0\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: BlobServiceProperties,\n headersMapper: ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getStatisticsOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp1\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: BlobServiceStatistics,\n headersMapper: ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetStatisticsHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar listContainersSegmentOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n marker0,\n maxPageSize,\n include0,\n timeoutInSeconds,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListContainersSegmentResponse,\n headersMapper: ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceListContainersSegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getUserDelegationKeyOperationSpec = {\n httpMethod: \"POST\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype0,\n comp3\n ],\n headerParameters: [\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"keyInfo\",\n mapper: tslib.__assign(tslib.__assign({}, KeyInfo), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n bodyMapper: UserDelegationKey,\n headersMapper: ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetUserDelegationKeyHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar getAccountInfoOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar submitBatchOperationSpec = {\n httpMethod: \"POST\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp4\n ],\n headerParameters: [\n contentLength,\n multipartContentType,\n version,\n requestId\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 202: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceSubmitBatchHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\nvar filterBlobsOperationSpec = {\n httpMethod: \"GET\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n where,\n marker0,\n maxPageSize,\n comp5\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: FilterBlobSegment,\n headersMapper: ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ServiceFilterBlobsHeaders\n }\n },\n isXML: true,\n serializer: serializer\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$1 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n AccessPolicy: AccessPolicy,\n BlobFlatListSegment: BlobFlatListSegment,\n BlobHierarchyListSegment: BlobHierarchyListSegment,\n BlobItemInternal: BlobItemInternal,\n BlobPrefix: BlobPrefix,\n BlobPropertiesInternal: BlobPropertiesInternal,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders,\n ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders,\n ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders,\n ContainerCreateHeaders: ContainerCreateHeaders,\n ContainerDeleteHeaders: ContainerDeleteHeaders,\n ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders,\n ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders,\n ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders,\n ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders,\n ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders,\n ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders,\n ContainerRestoreHeaders: ContainerRestoreHeaders,\n ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders,\n ContainerSetMetadataHeaders: ContainerSetMetadataHeaders,\n ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse,\n SignedIdentifier: SignedIdentifier,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Container. */\nvar Container = /** @class */ (function () {\n /**\n * Create a Container.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Container(client) {\n this.client = client;\n }\n Container.prototype.create = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, createOperationSpec, callback);\n };\n Container.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec$1, callback);\n };\n Container.prototype.deleteMethod = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, deleteMethodOperationSpec, callback);\n };\n Container.prototype.setMetadata = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setMetadataOperationSpec, callback);\n };\n Container.prototype.getAccessPolicy = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccessPolicyOperationSpec, callback);\n };\n Container.prototype.setAccessPolicy = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setAccessPolicyOperationSpec, callback);\n };\n Container.prototype.restore = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, restoreOperationSpec, callback);\n };\n Container.prototype.acquireLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, acquireLeaseOperationSpec, callback);\n };\n Container.prototype.releaseLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, releaseLeaseOperationSpec, callback);\n };\n Container.prototype.renewLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, renewLeaseOperationSpec, callback);\n };\n Container.prototype.breakLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, breakLeaseOperationSpec, callback);\n };\n Container.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n proposedLeaseId: proposedLeaseId,\n options: options\n }, changeLeaseOperationSpec, callback);\n };\n Container.prototype.listBlobFlatSegment = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, listBlobFlatSegmentOperationSpec, callback);\n };\n Container.prototype.listBlobHierarchySegment = function (delimiter, options, callback) {\n return this.client.sendOperationRequest({\n delimiter: delimiter,\n options: options\n }, listBlobHierarchySegmentOperationSpec, callback);\n };\n Container.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec$1, callback);\n };\n return Container;\n}());\n// Operation Specifications\nvar serializer$1 = new coreHttp.Serializer(Mappers$1, true);\nvar createOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n metadata,\n access,\n version,\n requestId,\n defaultEncryptionScope,\n preventEncryptionScopeOverride\n ],\n responses: {\n 201: {\n headersMapper: ContainerCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getPropertiesOperationSpec$1 = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0\n ],\n responses: {\n 200: {\n headersMapper: ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar deleteMethodOperationSpec = {\n httpMethod: \"DELETE\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 202: {\n headersMapper: ContainerDeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerDeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar setMetadataOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp6\n ],\n headerParameters: [\n metadata,\n version,\n requestId,\n leaseId0,\n ifModifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerSetMetadataHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getAccessPolicyOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp7\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0\n ],\n responses: {\n 200: {\n bodyMapper: {\n xmlElementName: \"SignedIdentifier\",\n serializedName: \"parsedResponse\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n },\n headersMapper: ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetAccessPolicyHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar setAccessPolicyOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp7\n ],\n headerParameters: [\n access,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"containerAcl\"\n ],\n mapper: {\n xmlName: \"SignedIdentifiers\",\n xmlElementName: \"SignedIdentifier\",\n serializedName: \"containerAcl\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n headersMapper: ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerSetAccessPolicyHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar restoreOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n restype2,\n comp8\n ],\n headerParameters: [\n version,\n requestId,\n deletedContainerName,\n deletedContainerVersion\n ],\n responses: {\n 201: {\n headersMapper: ContainerRestoreHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerRestoreHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar acquireLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9,\n restype2\n ],\n headerParameters: [\n duration,\n proposedLeaseId0,\n version,\n requestId,\n action0,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 201: {\n headersMapper: ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerAcquireLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar releaseLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9,\n restype2\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action1,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerReleaseLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar renewLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9,\n restype2\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action2,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerRenewLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar breakLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9,\n restype2\n ],\n headerParameters: [\n breakPeriod,\n version,\n requestId,\n action3,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 202: {\n headersMapper: ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerBreakLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar changeLeaseOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9,\n restype2\n ],\n headerParameters: [\n leaseId1,\n proposedLeaseId1,\n version,\n requestId,\n action4,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerChangeLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar listBlobFlatSegmentOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n marker0,\n maxPageSize,\n include1,\n timeoutInSeconds,\n restype2,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListBlobsFlatSegmentResponse,\n headersMapper: ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerListBlobFlatSegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar listBlobHierarchySegmentOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n prefix,\n delimiter,\n marker0,\n maxPageSize,\n include1,\n timeoutInSeconds,\n restype2,\n comp2\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n bodyMapper: ListBlobsHierarchySegmentResponse,\n headersMapper: ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerListBlobHierarchySegmentHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\nvar getAccountInfoOperationSpec$1 = {\n httpMethod: \"GET\",\n path: \"{containerName}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: ContainerGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer$1\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$2 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n ArrowConfiguration: ArrowConfiguration,\n ArrowField: ArrowField,\n BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders,\n BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders,\n BlobBreakLeaseHeaders: BlobBreakLeaseHeaders,\n BlobChangeLeaseHeaders: BlobChangeLeaseHeaders,\n BlobCopyFromURLHeaders: BlobCopyFromURLHeaders,\n BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders,\n BlobDeleteHeaders: BlobDeleteHeaders,\n BlobDownloadHeaders: BlobDownloadHeaders,\n BlobGetAccessControlHeaders: BlobGetAccessControlHeaders,\n BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders,\n BlobGetPropertiesHeaders: BlobGetPropertiesHeaders,\n BlobGetTagsHeaders: BlobGetTagsHeaders,\n BlobQueryHeaders: BlobQueryHeaders,\n BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders,\n BlobRenameHeaders: BlobRenameHeaders,\n BlobRenewLeaseHeaders: BlobRenewLeaseHeaders,\n BlobSetAccessControlHeaders: BlobSetAccessControlHeaders,\n BlobSetExpiryHeaders: BlobSetExpiryHeaders,\n BlobSetHTTPHeadersHeaders: BlobSetHTTPHeadersHeaders,\n BlobSetMetadataHeaders: BlobSetMetadataHeaders,\n BlobSetTagsHeaders: BlobSetTagsHeaders,\n BlobSetTierHeaders: BlobSetTierHeaders,\n BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders,\n BlobTag: BlobTag,\n BlobTags: BlobTags,\n BlobUndeleteHeaders: BlobUndeleteHeaders,\n DataLakeStorageError: DataLakeStorageError,\n DataLakeStorageErrorError: DataLakeStorageErrorError,\n DelimitedTextConfiguration: DelimitedTextConfiguration,\n JsonTextConfiguration: JsonTextConfiguration,\n QueryFormat: QueryFormat,\n QueryRequest: QueryRequest,\n QuerySerialization: QuerySerialization,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a Blob. */\nvar Blob$1 = /** @class */ (function () {\n /**\n * Create a Blob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function Blob(client) {\n this.client = client;\n }\n Blob.prototype.download = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, downloadOperationSpec, callback);\n };\n Blob.prototype.getProperties = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPropertiesOperationSpec$2, callback);\n };\n Blob.prototype.deleteMethod = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, deleteMethodOperationSpec$1, callback);\n };\n Blob.prototype.setAccessControl = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setAccessControlOperationSpec, callback);\n };\n Blob.prototype.getAccessControl = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccessControlOperationSpec, callback);\n };\n Blob.prototype.rename = function (renameSource, options, callback) {\n return this.client.sendOperationRequest({\n renameSource: renameSource,\n options: options\n }, renameOperationSpec, callback);\n };\n Blob.prototype.undelete = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, undeleteOperationSpec, callback);\n };\n Blob.prototype.setExpiry = function (expiryOptions, options, callback) {\n return this.client.sendOperationRequest({\n expiryOptions: expiryOptions,\n options: options\n }, setExpiryOperationSpec, callback);\n };\n Blob.prototype.setHTTPHeaders = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setHTTPHeadersOperationSpec, callback);\n };\n Blob.prototype.setMetadata = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setMetadataOperationSpec$1, callback);\n };\n Blob.prototype.acquireLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, acquireLeaseOperationSpec$1, callback);\n };\n Blob.prototype.releaseLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, releaseLeaseOperationSpec$1, callback);\n };\n Blob.prototype.renewLease = function (leaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n options: options\n }, renewLeaseOperationSpec$1, callback);\n };\n Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {\n return this.client.sendOperationRequest({\n leaseId: leaseId,\n proposedLeaseId: proposedLeaseId,\n options: options\n }, changeLeaseOperationSpec$1, callback);\n };\n Blob.prototype.breakLease = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, breakLeaseOperationSpec$1, callback);\n };\n Blob.prototype.createSnapshot = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, createSnapshotOperationSpec, callback);\n };\n Blob.prototype.startCopyFromURL = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, startCopyFromURLOperationSpec, callback);\n };\n Blob.prototype.copyFromURL = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, copyFromURLOperationSpec, callback);\n };\n Blob.prototype.abortCopyFromURL = function (copyId, options, callback) {\n return this.client.sendOperationRequest({\n copyId: copyId,\n options: options\n }, abortCopyFromURLOperationSpec, callback);\n };\n Blob.prototype.setTier = function (tier, options, callback) {\n return this.client.sendOperationRequest({\n tier: tier,\n options: options\n }, setTierOperationSpec, callback);\n };\n Blob.prototype.getAccountInfo = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getAccountInfoOperationSpec$2, callback);\n };\n Blob.prototype.query = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, queryOperationSpec, callback);\n };\n Blob.prototype.getTags = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getTagsOperationSpec, callback);\n };\n Blob.prototype.setTags = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, setTagsOperationSpec, callback);\n };\n return Blob;\n}());\n// Operation Specifications\nvar serializer$2 = new coreHttp.Serializer(Mappers$2, true);\nvar downloadOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds\n ],\n headerParameters: [\n range0,\n rangeGetContentMD5,\n rangeGetContentCRC64,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobDownloadHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobDownloadHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getPropertiesOperationSpec$2 = {\n httpMethod: \"HEAD\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetPropertiesHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar deleteMethodOperationSpec$1 = {\n httpMethod: \"DELETE\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds,\n blobDeleteType\n ],\n headerParameters: [\n deleteSnapshots,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: BlobDeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobDeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setAccessControlOperationSpec = {\n httpMethod: \"PATCH\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n action5\n ],\n headerParameters: [\n owner,\n group,\n posixPermissions,\n posixAcl,\n requestId,\n version,\n leaseId0,\n ifMatch,\n ifNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: BlobSetAccessControlHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobSetAccessControlHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getAccessControlOperationSpec = {\n httpMethod: \"HEAD\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n upn,\n action6\n ],\n headerParameters: [\n requestId,\n version,\n leaseId0,\n ifMatch,\n ifNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince\n ],\n responses: {\n 200: {\n headersMapper: BlobGetAccessControlHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobGetAccessControlHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar renameOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{filesystem}/{path}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n pathRenameMode\n ],\n headerParameters: [\n renameSource,\n directoryProperties,\n posixPermissions,\n posixUmask,\n sourceLeaseId,\n version,\n requestId,\n cacheControl,\n contentType,\n contentEncoding,\n contentLanguage,\n contentDisposition,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: BlobRenameHeaders\n },\n default: {\n bodyMapper: DataLakeStorageError,\n headersMapper: BlobRenameHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar undeleteOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp8\n ],\n headerParameters: [\n version,\n requestId\n ],\n responses: {\n 200: {\n headersMapper: BlobUndeleteHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobUndeleteHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setExpiryOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp10\n ],\n headerParameters: [\n version,\n requestId,\n expiryOptions,\n expiresOn\n ],\n responses: {\n 200: {\n headersMapper: BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetExpiryHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setHTTPHeadersOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n version,\n requestId,\n blobCacheControl,\n blobContentType,\n blobContentMD5,\n blobContentEncoding,\n blobContentLanguage,\n blobContentDisposition,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetHTTPHeadersHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetHTTPHeadersHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setMetadataOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp6\n ],\n headerParameters: [\n metadata,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetMetadataHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar acquireLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9\n ],\n headerParameters: [\n duration,\n proposedLeaseId0,\n version,\n requestId,\n action0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobAcquireLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar releaseLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action1,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobReleaseLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar renewLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9\n ],\n headerParameters: [\n leaseId1,\n version,\n requestId,\n action2,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobRenewLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar changeLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9\n ],\n headerParameters: [\n leaseId1,\n proposedLeaseId1,\n version,\n requestId,\n action4,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobChangeLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar breakLeaseOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp9\n ],\n headerParameters: [\n breakPeriod,\n version,\n requestId,\n action3,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobBreakLeaseHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar createSnapshotOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp11\n ],\n headerParameters: [\n metadata,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 201: {\n headersMapper: BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobCreateSnapshotHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar startCopyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n metadata,\n tier0,\n rehydratePriority,\n copySource,\n version,\n requestId,\n blobTagsString,\n sealBlob,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n sourceIfTags,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 202: {\n headersMapper: BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobStartCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar copyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n metadata,\n tier0,\n copySource,\n version,\n requestId,\n sourceContentMD5,\n blobTagsString,\n xMsRequiresSync,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n leaseId0\n ],\n responses: {\n 202: {\n headersMapper: BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar abortCopyFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n copyId,\n timeoutInSeconds,\n comp12\n ],\n headerParameters: [\n version,\n requestId,\n copyActionAbortConstant,\n leaseId0\n ],\n responses: {\n 204: {\n headersMapper: BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobAbortCopyFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setTierOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n versionId,\n timeoutInSeconds,\n comp13\n ],\n headerParameters: [\n tier1,\n rehydratePriority,\n version,\n requestId,\n leaseId0,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: BlobSetTierHeaders\n },\n 202: {\n headersMapper: BlobSetTierHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetTierHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getAccountInfoOperationSpec$2 = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n restype1,\n comp0\n ],\n headerParameters: [\n version\n ],\n responses: {\n 200: {\n headersMapper: BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetAccountInfoHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar queryOperationSpec = {\n httpMethod: \"POST\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n comp14\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"queryRequest\"\n ],\n mapper: QueryRequest\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 200: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n serializedName: \"parsedResponse\",\n type: {\n name: \"Stream\"\n }\n },\n headersMapper: BlobQueryHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobQueryHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar getTagsOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n snapshot,\n versionId,\n comp15\n ],\n headerParameters: [\n version,\n requestId,\n ifTags,\n leaseId0\n ],\n responses: {\n 200: {\n bodyMapper: BlobTags,\n headersMapper: BlobGetTagsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobGetTagsHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\nvar setTagsOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n versionId,\n comp15\n ],\n headerParameters: [\n version,\n transactionalContentMD5,\n transactionalContentCrc64,\n requestId,\n ifTags,\n leaseId0\n ],\n requestBody: {\n parameterPath: [\n \"options\",\n \"tags\"\n ],\n mapper: BlobTags\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 204: {\n headersMapper: BlobSetTagsHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlobSetTagsHeaders\n }\n },\n isXML: true,\n serializer: serializer$2\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$3 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n ClearRange: ClearRange,\n PageBlobClearPagesHeaders: PageBlobClearPagesHeaders,\n PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders: PageBlobCreateHeaders,\n PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders,\n PageBlobResizeHeaders: PageBlobResizeHeaders,\n PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders,\n PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders,\n PageList: PageList,\n PageRange: PageRange,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a PageBlob. */\nvar PageBlob = /** @class */ (function () {\n /**\n * Create a PageBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function PageBlob(client) {\n this.client = client;\n }\n PageBlob.prototype.create = function (contentLength, blobContentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n blobContentLength: blobContentLength,\n options: options\n }, createOperationSpec$1, callback);\n };\n PageBlob.prototype.uploadPages = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, uploadPagesOperationSpec, callback);\n };\n PageBlob.prototype.clearPages = function (contentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n options: options\n }, clearPagesOperationSpec, callback);\n };\n PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options, callback) {\n return this.client.sendOperationRequest({\n sourceUrl: sourceUrl,\n sourceRange: sourceRange,\n contentLength: contentLength,\n range: range,\n options: options\n }, uploadPagesFromURLOperationSpec, callback);\n };\n PageBlob.prototype.getPageRanges = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPageRangesOperationSpec, callback);\n };\n PageBlob.prototype.getPageRangesDiff = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, getPageRangesDiffOperationSpec, callback);\n };\n PageBlob.prototype.resize = function (blobContentLength, options, callback) {\n return this.client.sendOperationRequest({\n blobContentLength: blobContentLength,\n options: options\n }, resizeOperationSpec, callback);\n };\n PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options, callback) {\n return this.client.sendOperationRequest({\n sequenceNumberAction: sequenceNumberAction,\n options: options\n }, updateSequenceNumberOperationSpec, callback);\n };\n PageBlob.prototype.copyIncremental = function (copySource, options, callback) {\n return this.client.sendOperationRequest({\n copySource: copySource,\n options: options\n }, copyIncrementalOperationSpec, callback);\n };\n return PageBlob;\n}());\n// Operation Specifications\nvar serializer$3 = new coreHttp.Serializer(Mappers$3, true);\nvar createOperationSpec$1 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n contentLength,\n tier0,\n metadata,\n encryptionScope,\n blobContentLength,\n blobSequenceNumber,\n version,\n requestId,\n blobTagsString,\n blobType0,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: PageBlobCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar uploadPagesOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp16\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n range0,\n encryptionScope,\n version,\n requestId,\n pageWrite0,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUploadPagesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar clearPagesOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp16\n ],\n headerParameters: [\n contentLength,\n range0,\n encryptionScope,\n version,\n requestId,\n pageWrite1,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobClearPagesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar uploadPagesFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp16\n ],\n headerParameters: [\n sourceUrl,\n sourceRange0,\n sourceContentMD5,\n sourceContentCrc64,\n contentLength,\n range1,\n encryptionScope,\n version,\n requestId,\n pageWrite0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n ifSequenceNumberLessThanOrEqualTo,\n ifSequenceNumberLessThan,\n ifSequenceNumberEqualTo,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUploadPagesFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar getPageRangesOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n comp17\n ],\n headerParameters: [\n range0,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: PageList,\n headersMapper: PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobGetPageRangesHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar getPageRangesDiffOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n timeoutInSeconds,\n prevsnapshot,\n comp17\n ],\n headerParameters: [\n prevSnapshotUrl,\n range0,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: PageList,\n headersMapper: PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobGetPageRangesDiffHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar resizeOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n encryptionScope,\n blobContentLength,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: PageBlobResizeHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobResizeHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar updateSequenceNumberOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp0\n ],\n headerParameters: [\n sequenceNumberAction,\n blobSequenceNumber,\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 200: {\n headersMapper: PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobUpdateSequenceNumberHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\nvar copyIncrementalOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp18\n ],\n headerParameters: [\n copySource,\n version,\n requestId,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 202: {\n headersMapper: PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: PageBlobCopyIncrementalHeaders\n }\n },\n isXML: true,\n serializer: serializer$3\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$4 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders: AppendBlobCreateHeaders,\n AppendBlobSealHeaders: AppendBlobSealHeaders,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a AppendBlob. */\nvar AppendBlob = /** @class */ (function () {\n /**\n * Create a AppendBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function AppendBlob(client) {\n this.client = client;\n }\n AppendBlob.prototype.create = function (contentLength, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n options: options\n }, createOperationSpec$2, callback);\n };\n AppendBlob.prototype.appendBlock = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, appendBlockOperationSpec, callback);\n };\n AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n sourceUrl: sourceUrl,\n contentLength: contentLength,\n options: options\n }, appendBlockFromUrlOperationSpec, callback);\n };\n AppendBlob.prototype.seal = function (options, callback) {\n return this.client.sendOperationRequest({\n options: options\n }, sealOperationSpec, callback);\n };\n return AppendBlob;\n}());\n// Operation Specifications\nvar serializer$4 = new coreHttp.Serializer(Mappers$4, true);\nvar createOperationSpec$2 = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n contentLength,\n metadata,\n encryptionScope,\n version,\n requestId,\n blobTagsString,\n blobType1,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n responses: {\n 201: {\n headersMapper: AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobCreateHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar appendBlockOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp19\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n maxSize,\n appendPosition,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobAppendBlockHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar appendBlockFromUrlOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp19\n ],\n headerParameters: [\n sourceUrl,\n sourceRange1,\n sourceContentMD5,\n sourceContentCrc64,\n contentLength,\n transactionalContentMD5,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n maxSize,\n appendPosition,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobAppendBlockFromUrlHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\nvar sealOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp20\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n appendPosition\n ],\n responses: {\n 200: {\n headersMapper: AppendBlobSealHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: AppendBlobSealHeaders\n }\n },\n isXML: true,\n serializer: serializer$4\n};\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nvar Mappers$5 = /*#__PURE__*/Object.freeze({\n __proto__: null,\n Block: Block,\n BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders,\n BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders,\n BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders,\n BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders,\n BlockBlobUploadHeaders: BlockBlobUploadHeaders,\n BlockList: BlockList,\n BlockLookupList: BlockLookupList,\n StorageError: StorageError\n});\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\n/** Class representing a BlockBlob. */\nvar BlockBlob = /** @class */ (function () {\n /**\n * Create a BlockBlob.\n * @param {StorageClientContext} client Reference to the service client.\n */\n function BlockBlob(client) {\n this.client = client;\n }\n BlockBlob.prototype.upload = function (body, contentLength, options, callback) {\n return this.client.sendOperationRequest({\n body: body,\n contentLength: contentLength,\n options: options\n }, uploadOperationSpec, callback);\n };\n BlockBlob.prototype.putBlobFromUrl = function (contentLength, copySource, options, callback) {\n return this.client.sendOperationRequest({\n contentLength: contentLength,\n copySource: copySource,\n options: options\n }, putBlobFromUrlOperationSpec, callback);\n };\n BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options, callback) {\n return this.client.sendOperationRequest({\n blockId: blockId,\n contentLength: contentLength,\n body: body,\n options: options\n }, stageBlockOperationSpec, callback);\n };\n BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options, callback) {\n return this.client.sendOperationRequest({\n blockId: blockId,\n contentLength: contentLength,\n sourceUrl: sourceUrl,\n options: options\n }, stageBlockFromURLOperationSpec, callback);\n };\n BlockBlob.prototype.commitBlockList = function (blocks, options, callback) {\n return this.client.sendOperationRequest({\n blocks: blocks,\n options: options\n }, commitBlockListOperationSpec, callback);\n };\n BlockBlob.prototype.getBlockList = function (listType, options, callback) {\n return this.client.sendOperationRequest({\n listType: listType,\n options: options\n }, getBlockListOperationSpec, callback);\n };\n return BlockBlob;\n}());\n// Operation Specifications\nvar serializer$5 = new coreHttp.Serializer(Mappers$5, true);\nvar uploadOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n transactionalContentMD5,\n contentLength,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n blobTagsString,\n blobType2,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobUploadHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar putBlobFromUrlOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds\n ],\n headerParameters: [\n transactionalContentMD5,\n contentLength,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n sourceContentMD5,\n blobTagsString,\n copySource,\n copySourceBlobProperties,\n blobType2,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobCacheControl,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch,\n sourceIfTags\n ],\n responses: {\n 201: {\n headersMapper: BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobPutBlobFromUrlHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar stageBlockOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n blockId,\n timeoutInSeconds,\n comp21\n ],\n headerParameters: [\n contentLength,\n transactionalContentMD5,\n transactionalContentCrc64,\n encryptionScope,\n version,\n requestId,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm\n ],\n requestBody: {\n parameterPath: \"body\",\n mapper: {\n required: true,\n serializedName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n },\n contentType: \"application/octet-stream\",\n responses: {\n 201: {\n headersMapper: BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobStageBlockHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar stageBlockFromURLOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n blockId,\n timeoutInSeconds,\n comp21\n ],\n headerParameters: [\n contentLength,\n sourceUrl,\n sourceRange1,\n sourceContentMD5,\n sourceContentCrc64,\n encryptionScope,\n version,\n requestId,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n leaseId0,\n sourceIfModifiedSince,\n sourceIfUnmodifiedSince,\n sourceIfMatch,\n sourceIfNoneMatch\n ],\n responses: {\n 201: {\n headersMapper: BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobStageBlockFromURLHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar commitBlockListOperationSpec = {\n httpMethod: \"PUT\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n timeoutInSeconds,\n comp22\n ],\n headerParameters: [\n transactionalContentMD5,\n transactionalContentCrc64,\n metadata,\n encryptionScope,\n tier0,\n version,\n requestId,\n blobTagsString,\n blobCacheControl,\n blobContentType,\n blobContentEncoding,\n blobContentLanguage,\n blobContentMD5,\n blobContentDisposition,\n leaseId0,\n encryptionKey,\n encryptionKeySha256,\n encryptionAlgorithm,\n ifModifiedSince,\n ifUnmodifiedSince,\n ifMatch,\n ifNoneMatch,\n ifTags\n ],\n requestBody: {\n parameterPath: \"blocks\",\n mapper: tslib.__assign(tslib.__assign({}, BlockLookupList), { required: true })\n },\n contentType: \"application/xml; charset=utf-8\",\n responses: {\n 201: {\n headersMapper: BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobCommitBlockListHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\nvar getBlockListOperationSpec = {\n httpMethod: \"GET\",\n path: \"{containerName}/{blob}\",\n urlParameters: [\n url\n ],\n queryParameters: [\n snapshot,\n listType,\n timeoutInSeconds,\n comp22\n ],\n headerParameters: [\n version,\n requestId,\n leaseId0,\n ifTags\n ],\n responses: {\n 200: {\n bodyMapper: BlockList,\n headersMapper: BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: StorageError,\n headersMapper: BlockBlobGetBlockListHeaders\n }\n },\n isXML: true,\n serializer: serializer$5\n};\n\n// Copyright (c) Microsoft Corporation.\n/**\n * The @azure/logger configuration for this package.\n */\nvar logger = logger$1.createClientLogger(\"storage-blob\");\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nvar SDK_VERSION = \"12.4.1\";\nvar SERVICE_VERSION = \"2020-04-08\";\nvar BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB\nvar BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB\nvar BLOCK_BLOB_MAX_BLOCKS = 50000;\nvar DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB\nvar DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB\nvar DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nvar StorageOAuthScopes = \"https://storage.azure.com/.default\";\nvar URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\"\n }\n};\nvar HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416\n};\nvar HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\"\n};\nvar ETagNone = \"\";\nvar ETagAny = \"*\";\nvar SIZE_1_MB = 1 * 1024 * 1024;\nvar BATCH_MAX_REQUEST = 256;\nvar BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nvar HTTP_LINE_ENDING = \"\\r\\n\";\nvar HTTP_VERSION_1_1 = \"HTTP/1.1\";\nvar EncryptionAlgorithmAES25 = \"AES256\";\nvar DevelopmentConnectionString = \"DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;\";\nvar StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\"\n];\nvar StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\"\n];\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @export\n * @param {string} url\n * @returns {string}\n */\nfunction escapeURLPath(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var path = urlParsed.getPath();\n path = path || \"/\";\n path = escape(path);\n urlParsed.setPath(path);\n return urlParsed.toString();\n}\nfunction getProxyUriFromDevConnString(connectionString) {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n var proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n var matchCredentials = connectionString.split(\";\");\n for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) {\n var element = matchCredentials_1[_i];\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")[1];\n }\n }\n }\n return proxyUri;\n}\nfunction getValueInConnString(connectionString, argument) {\n var elements = connectionString.split(\";\");\n for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) {\n var element = elements_1[_i];\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")[1];\n }\n }\n return \"\";\n}\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @export\n * @param {string} connectionString Connection string.\n * @returns {ConnectionString} String key value pairs of the storage account's url and credentials.\n */\nfunction extractConnectionStringParts(connectionString) {\n var proxyUri = \"\";\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n // Matching BlobEndpoint in the Account connection string\n var blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n if (connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1) {\n // Account connection string\n var defaultEndpointsProtocol = \"\";\n var accountName = \"\";\n var accountKey = Buffer.from(\"accountKey\", \"base64\");\n var endpointSuffix = \"\";\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n var protocol = defaultEndpointsProtocol.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\");\n }\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = defaultEndpointsProtocol + \"://\" + accountName + \".blob.\" + endpointSuffix;\n }\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n }\n else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName: accountName,\n accountKey: accountKey,\n proxyUri: proxyUri\n };\n }\n else {\n // SAS connection string\n var accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n var accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n }\n else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n return { kind: \"SASConnString\", url: blobEndpoint, accountName: accountName, accountSas: accountSas };\n }\n}\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param {string} text\n * @returns {string}\n */\nfunction escape(text) {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @export\n * @param {string} url Source URL string\n * @param {string} name String to be appended to URL\n * @returns {string} An updated URL string\n */\nfunction appendToURLPath(url, name) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? \"\" + path + name : path + \"/\" + name) : name;\n urlParsed.setPath(path);\n return urlParsed.toString();\n}\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @export\n * @param {string} url Source URL string\n * @param {string} name Parameter name\n * @param {string} [value] Parameter value\n * @returns {string} An updated URL string\n */\nfunction setURLParameter(url, name, value) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n/**\n * Get URL parameter by name.\n *\n * @export\n * @param {string} url\n * @param {string} name\n * @returns {(string | string[] | undefined)}\n */\nfunction getURLParameter(url, name) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n/**\n * Set URL host.\n *\n * @export\n * @param {string} url Source URL string\n * @param {string} host New host string\n * @returns An updated URL string\n */\nfunction setURLHost(url, host) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n/**\n * Get URL path from an URL string.\n *\n * @export\n * @param {string} url Source URL string\n * @returns {(string | undefined)}\n */\nfunction getURLPath(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n/**\n * Get URL scheme from an URL string.\n *\n * @export\n * @param {string} url Source URL string\n * @returns {(string | undefined)}\n */\nfunction getURLScheme(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n/**\n * Get URL path and query from an URL string.\n *\n * @export\n * @param {string} url Source URL string\n * @returns {(string | undefined)}\n */\nfunction getURLPathAndQuery(url) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n var queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString != \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : \"?\" + queryString; // Ensure query string start with '?'\n }\n return \"\" + pathString + queryString;\n}\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @export\n * @param {string} url\n * @returns {{[key: string]: string}}\n */\nfunction getURLQueries(url) {\n var queryString = coreHttp.URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n var querySubStrings = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter(function (value) {\n var indexOfEqual = value.indexOf(\"=\");\n var lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1);\n });\n var queries = {};\n for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) {\n var querySubString = querySubStrings_1[_i];\n var splitResults = querySubString.split(\"=\");\n var key = splitResults[0];\n var value = splitResults[1];\n queries[key] = value;\n }\n return queries;\n}\n/**\n * Append a string to URL query.\n *\n * @export\n * @param {string} url Source URL string.\n * @param {string} queryParts String to be appended to the URL query.\n * @returns {string} An updated URL string.\n */\nfunction appendToURLQuery(url, queryParts) {\n var urlParsed = coreHttp.URLBuilder.parse(url);\n var query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n }\n else {\n query = queryParts;\n }\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n/**\n * Rounds a date off to seconds.\n *\n * @export\n * @param {Date} date\n * @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component\n */\nfunction truncatedISO8061Date(date, withMilliseconds) {\n if (withMilliseconds === void 0) { withMilliseconds = true; }\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n var dateString = date.toISOString();\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n/**\n * Base64 encode.\n *\n * @export\n * @param {string} content\n * @returns {string}\n */\nfunction base64encode(content) {\n return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @export\n * @param {number} blockIndex\n * @returns {string}\n */\nfunction generateBlockID(blockIDPrefix, blockIndex) {\n // To generate a 64 bytes base64 string, source string should be 48\n var maxSourceStringLength = 48;\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n var maxBlockIndexLength = 6;\n var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n var res = blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n/**\n * Delay specified time interval.\n *\n * @export\n * @param {number} timeInMs\n * @param {AbortSignalLike} [aborter]\n * @param {Error} [abortError]\n */\nfunction delay(timeInMs, aborter, abortError) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var timeout;\n var abortHandler = function () {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n var resolveHandler = function () {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n timeout = setTimeout(resolveHandler, timeInMs);\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n })];\n });\n });\n}\n/**\n * String.prototype.padStart()\n *\n * @export\n * @param {string} currentString\n * @param {number} targetLength\n * @param {string} [padString=\" \"]\n * @returns {string}\n */\nfunction padStart(currentString, targetLength, padString) {\n if (padString === void 0) { padString = \" \"; }\n // TS doesn't know this code needs to run downlevel sometimes.\n // @ts-expect-error\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n }\n else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @export\n * @param {string} str1\n * @param {string} str2\n * @returns {boolean}\n */\nfunction iEqual(str1, str2) {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n/**\n * Extracts account name from the url\n * @param {string} url url to extract the account name from\n * @returns {string} with the account name\n */\nfunction getAccountNameFromUrl(url) {\n var parsedUrl = coreHttp.URLBuilder.parse(url);\n var accountName;\n try {\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost().split(\".\")[0];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath().split(\"/\")[1];\n }\n else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n }\n catch (error) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\nfunction isIpEndpointStyle(parsedUrl) {\n if (parsedUrl.getHost() == undefined) {\n return false;\n }\n var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? \"\" : \":\" + parsedUrl.getPort());\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(host);\n}\n/**\n * Convert Tags to encoded string.\n *\n * @export\n * @param {Tags} tags\n * @returns {string | undefined}\n */\nfunction toBlobTagsString(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var tagPairs = [];\n for (var key in tags) {\n if (tags.hasOwnProperty(key)) {\n var value = tags[key];\n tagPairs.push(encodeURIComponent(key) + \"=\" + encodeURIComponent(value));\n }\n }\n return tagPairs.join(\"&\");\n}\n/**\n * Convert Tags type to BlobTags.\n *\n * @export\n * @param {Tags} [tags]\n * @returns {(BlobTags | undefined)}\n */\nfunction toBlobTags(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var res = {\n blobTagSet: []\n };\n for (var key in tags) {\n if (tags.hasOwnProperty(key)) {\n var value = tags[key];\n res.blobTagSet.push({\n key: key,\n value: value\n });\n }\n }\n return res;\n}\n/**\n * Covert BlobTags to Tags type.\n *\n * @export\n * @param {BlobTags} [tags]\n * @returns {(Tags | undefined)}\n */\nfunction toTags(tags) {\n if (tags === undefined) {\n return undefined;\n }\n var res = {};\n for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) {\n var blobTag = _a[_i];\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @export\n * @param {(BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration)} [textConfiguration]\n * @returns {(QuerySerialization | undefined)}\n */\nfunction toQuerySerialization(textConfiguration) {\n if (textConfiguration === undefined) {\n return undefined;\n }\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false\n }\n }\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator\n }\n }\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema\n }\n }\n };\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\nfunction parseObjectReplicationRecord(objectReplicationRecord) {\n if (!objectReplicationRecord) {\n return undefined;\n }\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n var orProperties = [];\n var _loop_1 = function (key) {\n var ids = key.split(\"_\");\n var policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n var rule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key]\n };\n var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; });\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n }\n else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule]\n });\n }\n };\n for (var key in objectReplicationRecord) {\n _loop_1(key);\n }\n return orProperties;\n}\n/**\n * Attach a TokenCredential to an object.\n *\n * @export\n * @param {T} thing\n * @param {TokenCredential} credential\n * @returns {T}\n */\nfunction attachCredential(thing, credential) {\n thing.credential = credential;\n return thing;\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n *\n * @class StorageBrowserPolicy\n * @extends {BaseRequestPolicy}\n */\nvar StorageBrowserPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageBrowserPolicy, _super);\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @memberof StorageBrowserPolicy\n */\n function StorageBrowserPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n /**\n * Sends out request.\n *\n * @param {WebResource} request\n * @returns {Promise}\n * @memberof StorageBrowserPolicy\n */\n StorageBrowserPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n {\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n }\n });\n });\n };\n return StorageBrowserPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n *\n * @export\n * @class StorageBrowserPolicyFactory\n * @implements {RequestPolicyFactory}\n */\nvar StorageBrowserPolicyFactory = /** @class */ (function () {\n function StorageBrowserPolicyFactory() {\n }\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @returns {StorageBrowserPolicy}\n * @memberof StorageBrowserPolicyFactory\n */\n StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new StorageBrowserPolicy(nextPolicy, options);\n };\n return StorageBrowserPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n(function (StorageRetryPolicyType) {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n StorageRetryPolicyType[StorageRetryPolicyType[\"EXPONENTIAL\"] = 0] = \"EXPONENTIAL\";\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n StorageRetryPolicyType[StorageRetryPolicyType[\"FIXED\"] = 1] = \"FIXED\";\n})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {}));\n// Default values of StorageRetryOptions\nvar DEFAULT_RETRY_OPTIONS = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined // Use server side default timeout strategy\n};\nvar RETRY_ABORT_ERROR = new abortController.AbortError(\"The operation was aborted.\");\n/**\n * Retry policy with exponential retry and linear retry implemented.\n *\n * @class RetryPolicy\n * @extends {BaseRequestPolicy}\n */\nvar StorageRetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageRetryPolicy, _super);\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @param {StorageRetryOptions} [retryOptions=DEFAULT_RETRY_OPTIONS]\n * @memberof StorageRetryPolicy\n */\n function StorageRetryPolicy(nextPolicy, options, retryOptions) {\n if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; }\n var _this = _super.call(this, nextPolicy, options) || this;\n // Initialize retry options\n _this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs)\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost\n };\n return _this;\n }\n /**\n * Sends request.\n *\n * @param {WebResource} request\n * @returns {Promise}\n * @memberof StorageRetryPolicy\n */\n StorageRetryPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, this.attemptSendRequest(request, false, 1)];\n });\n });\n };\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @protected\n * @param {WebResource} request\n * @param {boolean} secondaryHas404 If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param {number} attempt How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n * @returns {Promise}\n * @memberof StorageRetryPolicy\n */\n StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var newRequest, isPrimaryRetry, response, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n newRequest = request.clone();\n isPrimaryRetry = secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost);\n }\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString());\n }\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n logger.info(\"RetryPolicy: =====> Try=\" + attempt + \" \" + (isPrimaryRetry ? \"Primary\" : \"Secondary\"));\n return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)];\n case 2:\n response = _a.sent();\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return [2 /*return*/, response];\n }\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n return [3 /*break*/, 4];\n case 3:\n err_1 = _a.sent();\n logger.error(\"RetryPolicy: Caught error, message: \" + err_1.message + \", code: \" + err_1.code);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) {\n throw err_1;\n }\n return [3 /*break*/, 4];\n case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)];\n case 5:\n _a.sent();\n return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)];\n case 6: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @protected\n * @param {boolean} isPrimaryRetry\n * @param {number} attempt\n * @param {HttpOperationResponse} [response]\n * @param {RestError} [err]\n * @returns {boolean}\n * @memberof StorageRetryPolicy\n */\n StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) {\n if (attempt >= this.retryOptions.maxTries) {\n logger.info(\"RetryPolicy: Attempt(s) \" + attempt + \" >= maxTries \" + this.retryOptions\n .maxTries + \", no further try.\");\n return false;\n }\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n var retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\" // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) {\n var retriableError = retriableErrors_1[_i];\n if (err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)) {\n logger.info(\"RetryPolicy: Network error \" + retriableError + \" found, will retry.\");\n return true;\n }\n }\n }\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n var statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(\"RetryPolicy: Secondary access with 404, will retry.\");\n return true;\n }\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(\"RetryPolicy: Will retry for status code \" + statusCode + \".\");\n return true;\n }\n }\n if ((err === null || err === void 0 ? void 0 : err.code) === \"PARSE_ERROR\" && (err === null || err === void 0 ? void 0 : err.message.startsWith(\"Error \\\"Error: Unclosed root tag\"))) {\n logger.info(\"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\");\n return true;\n }\n return false;\n };\n /**\n * Delay a calculated time between retries.\n *\n * @private\n * @param {boolean} isPrimaryRetry\n * @param {number} attempt\n * @param {AbortSignalLike} [abortSignal]\n * @memberof StorageRetryPolicy\n */\n StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var delayTimeInMs;\n return tslib.__generator(this, function (_a) {\n delayTimeInMs = 0;\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case exports.StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs);\n break;\n case exports.StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs;\n break;\n }\n }\n else {\n delayTimeInMs = Math.random() * 1000;\n }\n logger.info(\"RetryPolicy: Delay for \" + delayTimeInMs + \"ms\");\n return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)];\n });\n });\n };\n return StorageRetryPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n *\n * @export\n * @class StorageRetryPolicyFactory\n * @implements {RequestPolicyFactory}\n */\nvar StorageRetryPolicyFactory = /** @class */ (function () {\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param {StorageRetryOptions} [retryOptions]\n * @memberof StorageRetryPolicyFactory\n */\n function StorageRetryPolicyFactory(retryOptions) {\n this.retryOptions = retryOptions;\n }\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @returns {StorageRetryPolicy}\n * @memberof StorageRetryPolicyFactory\n */\n StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n };\n return StorageRetryPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n *\n * @export\n * @abstract\n * @class CredentialPolicy\n * @extends {BaseRequestPolicy}\n */\nvar CredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(CredentialPolicy, _super);\n function CredentialPolicy() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n /**\n * Sends out request.\n *\n * @param {WebResource} request\n * @returns {Promise}\n * @memberof CredentialPolicy\n */\n CredentialPolicy.prototype.sendRequest = function (request) {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n };\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @protected\n * @abstract\n * @param {WebResource} request\n * @returns {WebResource}\n * @memberof CredentialPolicy\n */\n CredentialPolicy.prototype.signRequest = function (request) {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n };\n return CredentialPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n *\n * @export\n * @class AnonymousCredentialPolicy\n * @extends {CredentialPolicy}\n */\nvar AnonymousCredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(AnonymousCredentialPolicy, _super);\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @memberof AnonymousCredentialPolicy\n */\n function AnonymousCredentialPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n return AnonymousCredentialPolicy;\n}(CredentialPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n *\n * @export\n * @abstract\n * @class Credential\n */\nvar Credential = /** @class */ (function () {\n function Credential() {\n }\n /**\n * Creates a RequestPolicy object.\n *\n * @param {RequestPolicy} _nextPolicy\n * @param {RequestPolicyOptions} _options\n * @returns {RequestPolicy}\n * @memberof Credential\n */\n Credential.prototype.create = function (\n // tslint:disable-next-line:variable-name\n _nextPolicy, \n // tslint:disable-next-line:variable-name\n _options) {\n throw new Error(\"Method should be implemented in children classes.\");\n };\n return Credential;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n *\n * @export\n * @class AnonymousCredential\n * @extends {Credential}\n */\nvar AnonymousCredential = /** @class */ (function (_super) {\n tslib.__extends(AnonymousCredential, _super);\n function AnonymousCredential() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @returns {AnonymousCredentialPolicy}\n * @memberof AnonymousCredential\n */\n AnonymousCredential.prototype.create = function (nextPolicy, options) {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n };\n return AnonymousCredential;\n}(Credential));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n *\n * @class TelemetryPolicy\n * @extends {BaseRequestPolicy}\n */\nvar TelemetryPolicy = /** @class */ (function (_super) {\n tslib.__extends(TelemetryPolicy, _super);\n /**\n * Creates an instance of TelemetryPolicy.\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @param {string} telemetry\n * @memberof TelemetryPolicy\n */\n function TelemetryPolicy(nextPolicy, options, telemetry) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.telemetry = telemetry;\n return _this;\n }\n /**\n * Sends out request.\n *\n * @param {WebResource} request\n * @returns {Promise}\n * @memberof TelemetryPolicy\n */\n TelemetryPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n {\n if (!request.headers) {\n request.headers = new coreHttp.HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return TelemetryPolicy;\n}(coreHttp.BaseRequestPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n *\n * @export\n * @class TelemetryPolicyFactory\n * @implements {RequestPolicyFactory}\n */\nvar TelemetryPolicyFactory = /** @class */ (function () {\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param {UserAgentOptions} [telemetry]\n * @memberof TelemetryPolicyFactory\n */\n function TelemetryPolicyFactory(telemetry) {\n var userAgentInfo = [];\n {\n if (telemetry) {\n var telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n // e.g. azsdk-js-storageblob/10.0.0\n var libInfo = \"azsdk-js-storageblob/\" + SDK_VERSION;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n var runtimeInfo = \"(NODE-VERSION \" + process.version + \"; \" + os.type() + \" \" + os.release() + \")\";\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n this.telemetryString = userAgentInfo.join(\" \");\n }\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @returns {TelemetryPolicy}\n * @memberof TelemetryPolicyFactory\n */\n TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n };\n return TelemetryPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar _defaultHttpClient = new coreHttp.DefaultHttpClient();\nfunction getCachedDefaultHttpClient() {\n return _defaultHttpClient;\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n *\n * @export\n * @class Pipeline\n */\nvar Pipeline = /** @class */ (function () {\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param {RequestPolicyFactory[]} factories\n * @param {PipelineOptions} [options={}]\n * @memberof Pipeline\n */\n function Pipeline(factories, options) {\n if (options === void 0) { options = {}; }\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() });\n }\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns {ServiceClientOptions} The ServiceClientOptions object from this Pipeline.\n * @memberof Pipeline\n */\n Pipeline.prototype.toServiceClientOptions = function () {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories\n };\n };\n return Pipeline;\n}());\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @export\n * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param {StoragePipelineOptions} [pipelineOptions] Optional. Options.\n * @returns {Pipeline} A new Pipeline object.\n */\nfunction newPipeline(credential, pipelineOptions) {\n if (pipelineOptions === void 0) { pipelineOptions = {}; }\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n var factories = [\n coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n coreHttp.generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions),\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n coreHttp.deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n coreHttp.logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters\n })\n ];\n {\n // policies only available in Node.js runtime, not in browsers\n factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(coreHttp.disableResponseDecompressionPolicy());\n }\n factories.push(coreHttp.isTokenCredential(credential)\n ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential)\n : credential);\n return new Pipeline(factories, pipelineOptions);\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n *\n * @class RetriableReadableStream\n * @extends {Readable}\n */\nvar RetriableReadableStream = /** @class */ (function (_super) {\n tslib.__extends(RetriableReadableStream, _super);\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param {NodeJS.ReadableStream} source The current ReadableStream returned from getter\n * @param {ReadableStreamGetter} getter A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param {number} offset Offset position in original data source to read\n * @param {number} count How much data in original data source to read\n * @param {RetriableReadableStreamOptions} [options={}]\n * @memberof RetriableReadableStream\n */\n function RetriableReadableStream(source, getter, offset, count, options) {\n if (options === void 0) { options = {}; }\n var _this = _super.call(this, { highWaterMark: options.highWaterMark }) || this;\n _this.retries = 0;\n _this.sourceDataHandler = function (data) {\n if (_this.options.doInjectErrorOnce) {\n _this.options.doInjectErrorOnce = undefined;\n _this.source.pause();\n _this.source.removeAllListeners(\"data\");\n _this.source.emit(\"end\");\n return;\n }\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n _this.offset += data.length;\n if (_this.onProgress) {\n _this.onProgress({ loadedBytes: _this.offset - _this.start });\n }\n if (!_this.push(data)) {\n _this.source.pause();\n }\n };\n _this.sourceErrorOrEndHandler = function (err) {\n if (err && err.name === \"AbortError\") {\n _this.destroy(err);\n return;\n }\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n _this.removeSourceEventHandlers();\n if (_this.offset - 1 === _this.end) {\n _this.push(null);\n }\n else if (_this.offset <= _this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (_this.retries < _this.maxRetryRequests) {\n _this.retries += 1;\n _this.getter(_this.offset)\n .then(function (newSource) {\n _this.source = newSource;\n _this.setSourceEventHandlers();\n })\n .catch(function (error) {\n _this.destroy(error);\n });\n }\n else {\n _this.destroy(new Error(\n // tslint:disable-next-line:max-line-length\n \"Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: \" + (_this\n .offset - 1) + \", data needed offset: \" + _this.end + \", retries: \" + _this.retries + \", max retries: \" + _this.maxRetryRequests));\n }\n }\n else {\n _this.destroy(new Error(\"Data corruption failure: Received more data than original request, data needed offset is \" + _this.end + \", received offset: \" + (_this.offset - 1)));\n }\n };\n _this.getter = getter;\n _this.source = source;\n _this.start = offset;\n _this.offset = offset;\n _this.end = offset + count - 1;\n _this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n _this.onProgress = options.onProgress;\n _this.options = options;\n _this.setSourceEventHandlers();\n return _this;\n }\n RetriableReadableStream.prototype._read = function () {\n this.source.resume();\n };\n RetriableReadableStream.prototype.setSourceEventHandlers = function () {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n };\n RetriableReadableStream.prototype.removeSourceEventHandlers = function () {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n };\n RetriableReadableStream.prototype._destroy = function (error, callback) {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n this.source.destroy();\n callback(error === null ? undefined : error);\n };\n return RetriableReadableStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n *\n * @export\n * @class BlobDownloadResponse\n * @implements {BlobDownloadResponseParsed}\n */\nvar BlobDownloadResponse = /** @class */ (function () {\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param {BlobDownloadResponseParsed} originalResponse\n * @param {ReadableStreamGetter} getter\n * @param {number} offset\n * @param {number} count\n * @param {RetriableReadableStreamOptions} [options={}]\n * @memberof BlobDownloadResponse\n */\n function BlobDownloadResponse(originalResponse, getter, offset, count, options) {\n if (options === void 0) { options = {}; }\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);\n }\n Object.defineProperty(BlobDownloadResponse.prototype, \"acceptRanges\", {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.acceptRanges;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"cacheControl\", {\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.cacheControl;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentDisposition\", {\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentDisposition;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentEncoding\", {\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentEncoding;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentLanguage\", {\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentLanguage;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobSequenceNumber\", {\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.blobSequenceNumber;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobType\", {\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n * @type {(BlobType | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.blobType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentLength\", {\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentLength;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentMD5\", {\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n * @type {(Uint8Array | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentRange\", {\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentRange;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentType\", {\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyCompletedOn\", {\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copyCompletedOn;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyId\", {\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyProgress\", {\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copyProgress;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copySource\", {\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copySource;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyStatus\", {\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n * @type {(CopyStatusType | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copyStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"copyStatusDescription\", {\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.copyStatusDescription;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseDuration\", {\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n * @type {(LeaseDurationType | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.leaseDuration;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseState\", {\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n * @type {(LeaseStateType | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.leaseState;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"leaseStatus\", {\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n * @type {(LeaseStatusType | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.leaseStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"date\", {\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.date;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobCommittedBlockCount\", {\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.blobCommittedBlockCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"etag\", {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.etag;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"tagCount\", {\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.tagCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"errorCode\", {\n /**\n * The error code.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.errorCode;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"isServerEncrypted\", {\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n * @type {(boolean | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.isServerEncrypted;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"blobContentMD5\", {\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n * @type {(Uint8Array | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.blobContentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"lastModified\", {\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.lastModified;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"lastAccessed\", {\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.lastAccessed;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"metadata\", {\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n * @type {(Metadata | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.metadata;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"requestId\", {\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.requestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"clientRequestId\", {\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.clientRequestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"version\", {\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.version;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"versionId\", {\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.versionId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"encryptionKeySha256\", {\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.encryptionKeySha256;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentCrc64\", {\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n *\n * @type {(Uint8Array | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.contentCrc64;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"objectReplicationDestinationPolicyId\", {\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n * @type {(string| undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"objectReplicationSourceProperties\", {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n * @type {(ObjectReplicationPolicy[] | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.objectReplicationSourceProperties;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"isSealed\", {\n /**\n * If this blob has been sealed.\n *\n * @readonly\n * @type {(boolean | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.isSealed;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"contentAsBlob\", {\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n * @type {(Promise | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse.blobBody;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"readableStreamBody\", {\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n * @type {(NodeJS.ReadableStream | undefined)}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return coreHttp.isNode ? this.blobDownloadStream : undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobDownloadResponse.prototype, \"_response\", {\n /**\n * The HTTP response.\n *\n * @type {HttpResponse}\n * @memberof BlobDownloadResponse\n */\n get: function () {\n return this.originalResponse._response;\n },\n enumerable: false,\n configurable: true\n });\n return BlobDownloadResponse;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar AVRO_SYNC_MARKER_SIZE = 16;\nvar AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]);\nvar AVRO_CODEC_KEY = \"avro.codec\";\nvar AVRO_SCHEMA_KEY = \"avro.schema\";\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nfunction arraysEqual(a, b) {\n if (a === b)\n return true;\n if (a == null || b == null)\n return false;\n if (a.length != b.length)\n return false;\n for (var i = 0; i < a.length; ++i) {\n if (a[i] !== b[i])\n return false;\n }\n return true;\n}\n\n// Copyright (c) Microsoft Corporation.\nvar AvroParser = /** @class */ (function () {\n function AvroParser() {\n }\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @static\n * @param {AvroReadable} [stream]\n * @param {number} [length]\n * @param {AvroParserReadOptions} [options={}]\n * @returns {Promise}\n * @memberof AvroParser\n */\n AvroParser.readFixedBytes = function (stream, length, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var bytes;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })];\n case 1:\n bytes = _a.sent();\n if (bytes.length != length) {\n throw new Error(\"Hit stream end.\");\n }\n return [2 /*return*/, bytes];\n }\n });\n });\n };\n /**\n * Reads a single byte from the stream.\n *\n * @static\n * @param {AvroReadable} [stream]\n * @param {AvroParserReadOptions} [options={}]\n * @returns {Promise}\n * @memberof AvroParser\n */\n AvroParser.readByte = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buf;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)];\n case 1:\n buf = _a.sent();\n return [2 /*return*/, buf[0]];\n }\n });\n });\n };\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n AvroParser.readZigZagLong = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n zigZagEncoded = 0;\n significanceInBit = 0;\n _a.label = 1;\n case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 2:\n byte = _a.sent();\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n _a.label = 3;\n case 3:\n if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1];\n _a.label = 4;\n case 4:\n if (!haveMoreByte) return [3 /*break*/, 9];\n // Switch to float arithmetic\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n _a.label = 5;\n case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 6:\n byte = _a.sent();\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n _a.label = 7;\n case 7:\n if (byte & 0x80) return [3 /*break*/, 5];\n _a.label = 8;\n case 8:\n res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return [2 /*return*/, res];\n case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)];\n }\n });\n });\n };\n AvroParser.readLong = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];\n });\n });\n };\n AvroParser.readInt = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];\n });\n });\n };\n AvroParser.readNull = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, null];\n });\n });\n };\n AvroParser.readBoolean = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var b;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)];\n case 1:\n b = _a.sent();\n if (b == 1) {\n return [2 /*return*/, true];\n }\n else if (b == 0) {\n return [2 /*return*/, false];\n }\n else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n });\n });\n };\n AvroParser.readFloat = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, view;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)];\n case 1:\n u8arr = _a.sent();\n view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true\n }\n });\n });\n };\n AvroParser.readDouble = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, view;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)];\n case 1:\n u8arr = _a.sent();\n view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true\n }\n });\n });\n };\n AvroParser.readBytes = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var size;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 1:\n size = _a.sent();\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })];\n case 2: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n AvroParser.readString = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var u8arr, utf8decoder;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)];\n case 1:\n u8arr = _a.sent();\n // polyfill TextDecoder to be backward compatible with older\n // nodejs that doesn't expose TextDecoder as a global variable\n if (typeof TextDecoder === \"undefined\" && typeof require !== \"undefined\") {\n global.TextDecoder = require(\"util\").TextDecoder;\n }\n utf8decoder = new TextDecoder();\n return [2 /*return*/, utf8decoder.decode(u8arr)];\n }\n });\n });\n };\n AvroParser.readMapPair = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var key, value;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readString(stream, options)];\n case 1:\n key = _a.sent();\n return [4 /*yield*/, readItemMethod(stream, options)];\n case 2:\n value = _a.sent();\n return [2 /*return*/, { key: key, value: value }];\n }\n });\n });\n };\n AvroParser.readMap = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var readPairMethod, pairs, dict, _i, pairs_1, pair;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n readPairMethod = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)];\n case 1:\n pairs = _a.sent();\n dict = {};\n for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) {\n pair = pairs_1[_i];\n dict[pair.key] = pair.value;\n }\n return [2 /*return*/, dict];\n }\n });\n });\n };\n AvroParser.readArray = function (stream, readItemMethod, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var items, count, item;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n items = [];\n return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 1:\n count = _a.sent();\n _a.label = 2;\n case 2:\n if (!(count != 0)) return [3 /*break*/, 8];\n if (!(count < 0)) return [3 /*break*/, 4];\n // Ignore block sizes\n return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 3:\n // Ignore block sizes\n _a.sent();\n count = -count;\n _a.label = 4;\n case 4:\n if (!count--) return [3 /*break*/, 6];\n return [4 /*yield*/, readItemMethod(stream, options)];\n case 5:\n item = _a.sent();\n items.push(item);\n return [3 /*break*/, 4];\n case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 7:\n count = _a.sent();\n return [3 /*break*/, 2];\n case 8: return [2 /*return*/, items];\n }\n });\n });\n };\n return AvroParser;\n}());\nvar AvroComplex;\n(function (AvroComplex) {\n AvroComplex[\"RECORD\"] = \"record\";\n AvroComplex[\"ENUM\"] = \"enum\";\n AvroComplex[\"ARRAY\"] = \"array\";\n AvroComplex[\"MAP\"] = \"map\";\n AvroComplex[\"UNION\"] = \"union\";\n AvroComplex[\"FIXED\"] = \"fixed\";\n})(AvroComplex || (AvroComplex = {}));\nvar AvroType = /** @class */ (function () {\n function AvroType() {\n }\n /**\n * Determines the AvroType from the Avro Schema.\n */\n AvroType.fromSchema = function (schema) {\n if (typeof schema == \"string\") {\n return AvroType.fromStringSchema(schema);\n }\n else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n }\n else {\n return AvroType.fromObjectSchema(schema);\n }\n };\n AvroType.fromStringSchema = function (schema) {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema);\n default:\n throw new Error(\"Unexpected Avro type \" + schema);\n }\n };\n AvroType.fromArraySchema = function (schema) {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n };\n AvroType.fromObjectSchema = function (schema) {\n var type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n }\n catch (err) { }\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(\"aliases currently is not supported, schema: \" + schema);\n }\n if (!schema.name) {\n throw new Error(\"Required attribute 'name' doesn't exist on schema: \" + schema);\n }\n var fields = {};\n if (!schema.fields) {\n throw new Error(\"Required attribute 'fields' doesn't exist on schema: \" + schema);\n }\n for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) {\n var field = _a[_i];\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(\"aliases currently is not supported, schema: \" + schema);\n }\n if (!schema.symbols) {\n throw new Error(\"Required attribute 'symbols' doesn't exist on schema: \" + schema);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(\"Required attribute 'values' doesn't exist on schema: \" + schema);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(\"Unexpected Avro type \" + type + \" in \" + schema);\n }\n };\n return AvroType;\n}());\nvar AvroPrimitive;\n(function (AvroPrimitive) {\n AvroPrimitive[\"NULL\"] = \"null\";\n AvroPrimitive[\"BOOLEAN\"] = \"boolean\";\n AvroPrimitive[\"INT\"] = \"int\";\n AvroPrimitive[\"LONG\"] = \"long\";\n AvroPrimitive[\"FLOAT\"] = \"float\";\n AvroPrimitive[\"DOUBLE\"] = \"double\";\n AvroPrimitive[\"BYTES\"] = \"bytes\";\n AvroPrimitive[\"STRING\"] = \"string\";\n})(AvroPrimitive || (AvroPrimitive = {}));\nvar AvroPrimitiveType = /** @class */ (function (_super) {\n tslib.__extends(AvroPrimitiveType, _super);\n function AvroPrimitiveType(primitive) {\n var _this = _super.call(this) || this;\n _this._primitive = primitive;\n return _this;\n }\n AvroPrimitiveType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = this._primitive;\n switch (_a) {\n case AvroPrimitive.NULL: return [3 /*break*/, 1];\n case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3];\n case AvroPrimitive.INT: return [3 /*break*/, 5];\n case AvroPrimitive.LONG: return [3 /*break*/, 7];\n case AvroPrimitive.FLOAT: return [3 /*break*/, 9];\n case AvroPrimitive.DOUBLE: return [3 /*break*/, 11];\n case AvroPrimitive.BYTES: return [3 /*break*/, 13];\n case AvroPrimitive.STRING: return [3 /*break*/, 15];\n }\n return [3 /*break*/, 17];\n case 1: return [4 /*yield*/, AvroParser.readNull()];\n case 2: return [2 /*return*/, _b.sent()];\n case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)];\n case 4: return [2 /*return*/, _b.sent()];\n case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 6: return [2 /*return*/, _b.sent()];\n case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)];\n case 8: return [2 /*return*/, _b.sent()];\n case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)];\n case 10: return [2 /*return*/, _b.sent()];\n case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)];\n case 12: return [2 /*return*/, _b.sent()];\n case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)];\n case 14: return [2 /*return*/, _b.sent()];\n case 15: return [4 /*yield*/, AvroParser.readString(stream, options)];\n case 16: return [2 /*return*/, _b.sent()];\n case 17: throw new Error(\"Unknown Avro Primitive\");\n }\n });\n });\n };\n return AvroPrimitiveType;\n}(AvroType));\nvar AvroEnumType = /** @class */ (function (_super) {\n tslib.__extends(AvroEnumType, _super);\n function AvroEnumType(symbols) {\n var _this = _super.call(this) || this;\n _this._symbols = symbols;\n return _this;\n }\n AvroEnumType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var value;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 1:\n value = _a.sent();\n return [2 /*return*/, this._symbols[value]];\n }\n });\n });\n };\n return AvroEnumType;\n}(AvroType));\nvar AvroUnionType = /** @class */ (function (_super) {\n tslib.__extends(AvroUnionType, _super);\n function AvroUnionType(types) {\n var _this = _super.call(this) || this;\n _this._types = types;\n return _this;\n }\n AvroUnionType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var typeIndex;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];\n case 1:\n typeIndex = _a.sent();\n return [4 /*yield*/, this._types[typeIndex].read(stream, options)];\n case 2: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return AvroUnionType;\n}(AvroType));\nvar AvroMapType = /** @class */ (function (_super) {\n tslib.__extends(AvroMapType, _super);\n function AvroMapType(itemType) {\n var _this = _super.call(this) || this;\n _this._itemType = itemType;\n return _this;\n }\n AvroMapType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var readItemMethod;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, this._itemType.read(s, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n }); };\n return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)];\n case 1: return [2 /*return*/, _a.sent()];\n }\n });\n });\n };\n return AvroMapType;\n}(AvroType));\nvar AvroRecordType = /** @class */ (function (_super) {\n tslib.__extends(AvroRecordType, _super);\n function AvroRecordType(fields, name) {\n var _this = _super.call(this) || this;\n _this._fields = fields;\n _this._name = name;\n return _this;\n }\n AvroRecordType.prototype.read = function (stream, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var record, _a, _b, _i, key, _c, _d;\n return tslib.__generator(this, function (_e) {\n switch (_e.label) {\n case 0:\n record = {};\n record[\"$schema\"] = this._name;\n _a = [];\n for (_b in this._fields)\n _a.push(_b);\n _i = 0;\n _e.label = 1;\n case 1:\n if (!(_i < _a.length)) return [3 /*break*/, 4];\n key = _a[_i];\n if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3];\n _c = record;\n _d = key;\n return [4 /*yield*/, this._fields[key].read(stream, options)];\n case 2:\n _c[_d] = _e.sent();\n _e.label = 3;\n case 3:\n _i++;\n return [3 /*break*/, 1];\n case 4: return [2 /*return*/, record];\n }\n });\n });\n };\n return AvroRecordType;\n}(AvroType));\n\n// Copyright (c) Microsoft Corporation.\nvar AvroReader = /** @class */ (function () {\n function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n Object.defineProperty(AvroReader.prototype, \"blockOffset\", {\n get: function () {\n return this._blockOffset;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(AvroReader.prototype, \"objectIndex\", {\n get: function () {\n return this._objectIndex;\n },\n enumerable: false,\n configurable: true\n });\n AvroReader.prototype.initialize = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var header, _a, codec, _b, schema, _c, i;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal\n })];\n case 1:\n header = _d.sent();\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n _a = this;\n return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal\n })];\n case 2:\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n _a._metadata = _d.sent();\n codec = this._metadata[AVRO_CODEC_KEY];\n if (!(codec == undefined || codec == \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n // The 16-byte, randomly-generated sync marker for this file.\n _b = this;\n return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n })];\n case 3:\n // The 16-byte, randomly-generated sync marker for this file.\n _b._syncMarker = _d.sent();\n schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n if (this._blockOffset == 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n _c = this;\n return [4 /*yield*/, AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n })];\n case 4:\n _c._itemsRemainingInBlock = _d.sent();\n // skip block length\n return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })];\n case 5:\n // skip block length\n _d.sent();\n this._initialized = true;\n if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9];\n i = 0;\n _d.label = 6;\n case 6:\n if (!(i < this._objectIndex)) return [3 /*break*/, 9];\n return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })];\n case 7:\n _d.sent();\n this._itemsRemainingInBlock--;\n _d.label = 8;\n case 8:\n i++;\n return [3 /*break*/, 6];\n case 9: return [2 /*return*/];\n }\n });\n });\n };\n AvroReader.prototype.hasNext = function () {\n return !this._initialized || this._itemsRemainingInBlock > 0;\n };\n AvroReader.prototype.parseObjects = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function parseObjects_1() {\n var result, marker, _a, err_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!!this._initialized) return [3 /*break*/, 2];\n return [4 /*yield*/, tslib.__await(this.initialize(options))];\n case 1:\n _b.sent();\n _b.label = 2;\n case 2:\n if (!this.hasNext()) return [3 /*break*/, 13];\n return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, {\n abortSignal: options.abortSignal\n }))];\n case 3:\n result = _b.sent();\n this._itemsRemainingInBlock--;\n this._objectIndex++;\n if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n }))];\n case 4:\n marker = _b.sent();\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n if (!arraysEqual(this._syncMarker, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n _b.label = 5;\n case 5:\n _b.trys.push([5, 7, , 8]);\n _a = this;\n return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n }))];\n case 6:\n _a._itemsRemainingInBlock = _b.sent();\n return [3 /*break*/, 8];\n case 7:\n err_1 = _b.sent();\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n return [3 /*break*/, 8];\n case 8:\n if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10];\n // Ignore block size\n return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))];\n case 9:\n // Ignore block size\n _b.sent();\n _b.label = 10;\n case 10: return [4 /*yield*/, tslib.__await(result)];\n case 11: return [4 /*yield*/, _b.sent()];\n case 12:\n _b.sent();\n return [3 /*break*/, 2];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n return AvroReader;\n}());\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nvar AvroReadable = /** @class */ (function () {\n function AvroReadable() {\n }\n return AvroReadable;\n}());\n\n// Copyright (c) Microsoft Corporation.\nvar ABORT_ERROR = new abortController.AbortError(\"Reading from the avro stream was aborted.\");\nvar AvroReadableFromStream = /** @class */ (function (_super) {\n tslib.__extends(AvroReadableFromStream, _super);\n function AvroReadableFromStream(readable) {\n var _this = _super.call(this) || this;\n _this._readable = readable;\n _this._position = 0;\n return _this;\n }\n AvroReadableFromStream.prototype.toUint8Array = function (data) {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n };\n Object.defineProperty(AvroReadableFromStream.prototype, \"position\", {\n get: function () {\n return this._position;\n },\n enumerable: false,\n configurable: true\n });\n AvroReadableFromStream.prototype.read = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var chunk;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {\n throw ABORT_ERROR;\n }\n if (size < 0) {\n throw new Error(\"size parameter should be positive: \" + size);\n }\n if (size === 0) {\n return [2 /*return*/, new Uint8Array()];\n }\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return [2 /*return*/, this.toUint8Array(chunk)];\n }\n else {\n // register callback to wait for enough data to read\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var cleanUp = function () {\n _this._readable.removeListener(\"readable\", readableCallback);\n _this._readable.removeListener(\"error\", rejectCallback);\n _this._readable.removeListener(\"end\", rejectCallback);\n _this._readable.removeListener(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal.removeEventListener(\"abort\", abortHandler);\n }\n };\n var readableCallback = function () {\n var chunk = _this._readable.read(size);\n if (chunk) {\n _this._position += chunk.length;\n cleanUp();\n // chunk.length maybe less than desired size if the stream ends.\n resolve(_this.toUint8Array(chunk));\n }\n };\n var rejectCallback = function () {\n cleanUp();\n reject();\n };\n var abortHandler = function () {\n cleanUp();\n reject(ABORT_ERROR);\n };\n _this._readable.on(\"readable\", readableCallback);\n _this._readable.once(\"error\", rejectCallback);\n _this._readable.once(\"end\", rejectCallback);\n _this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", abortHandler);\n }\n })];\n }\n });\n });\n };\n return AvroReadableFromStream;\n}(AvroReadable));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n *\n * @class BlobQuickQueryStream\n * @extends {Readable}\n */\nvar BlobQuickQueryStream = /** @class */ (function (_super) {\n tslib.__extends(BlobQuickQueryStream, _super);\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param {NodeJS.ReadableStream} source The current ReadableStream returned from getter\n * @param {BlobQuickQueryStreamOptions} [options={}]\n * @memberof BlobQuickQueryStream\n */\n function BlobQuickQueryStream(source, options) {\n if (options === void 0) { options = {}; }\n var _this = _super.call(this) || this;\n _this.avroPaused = true;\n _this.source = source;\n _this.onProgress = options.onProgress;\n _this.onError = options.onError;\n _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source));\n _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n return _this;\n }\n BlobQuickQueryStream.prototype._read = function () {\n var _this = this;\n if (this.avroPaused) {\n this.readInternal().catch(function (err) {\n _this.emit(\"error\", err);\n });\n }\n };\n BlobQuickQueryStream.prototype.readInternal = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var avroNext, obj, schema, data, bytesScanned, totalBytes, fatal, name_1, description, position;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n this.avroPaused = false;\n _a.label = 1;\n case 1: return [4 /*yield*/, this.avroIter.next()];\n case 2:\n avroNext = _a.sent();\n if (avroNext.done) {\n return [3 /*break*/, 4];\n }\n obj = avroNext.value;\n schema = obj.$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n data = obj.data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n bytesScanned = obj.bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n totalBytes = obj.totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n fatal = obj.fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n name_1 = obj.name;\n if (typeof name_1 !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n description = obj.description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n position = obj.position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position: position,\n name: name_1,\n isFatal: fatal,\n description: description\n });\n }\n break;\n default:\n throw Error(\"Unknown schema \" + schema + \" in avro progress record.\");\n }\n _a.label = 3;\n case 3:\n if (!avroNext.done && !this.avroPaused) return [3 /*break*/, 1];\n _a.label = 4;\n case 4: return [2 /*return*/];\n }\n });\n });\n };\n return BlobQuickQueryStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n *\n * @export\n * @class BlobQueryResponse\n * @implements {BlobDownloadResponseModel}\n */\nvar BlobQueryResponse = /** @class */ (function () {\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param {BlobQueryResponseModel} originalResponse\n * @param {BlobQuickQueryStreamOptions} [options={}]\n * @memberof BlobQueryResponse\n */\n function BlobQueryResponse(originalResponse, options) {\n if (options === void 0) { options = {}; }\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);\n }\n Object.defineProperty(BlobQueryResponse.prototype, \"acceptRanges\", {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.acceptRanges;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"cacheControl\", {\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.cacheControl;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentDisposition\", {\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentDisposition;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentEncoding\", {\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentEncoding;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentLanguage\", {\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentLanguage;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobSequenceNumber\", {\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.blobSequenceNumber;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobType\", {\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n * @type {(BlobType | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.blobType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentLength\", {\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentLength;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentMD5\", {\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n * @type {(Uint8Array | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentRange\", {\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentRange;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentType\", {\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentType;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyCompletedOn\", {\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyId\", {\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.copyId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyProgress\", {\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.copyProgress;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copySource\", {\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.copySource;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyStatus\", {\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n * @type {(CopyStatusType | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.copyStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"copyStatusDescription\", {\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.copyStatusDescription;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseDuration\", {\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n * @type {(LeaseDurationType | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.leaseDuration;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseState\", {\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n * @type {(LeaseStateType | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.leaseState;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"leaseStatus\", {\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n * @type {(LeaseStatusType | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.leaseStatus;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"date\", {\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.date;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobCommittedBlockCount\", {\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n * @type {(number | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.blobCommittedBlockCount;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"etag\", {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.etag;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"errorCode\", {\n /**\n * The error code.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.errorCode;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"isServerEncrypted\", {\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n * @type {(boolean | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.isServerEncrypted;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobContentMD5\", {\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n * @type {(Uint8Array | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.blobContentMD5;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"lastModified\", {\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n * @type {(Date | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.lastModified;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"metadata\", {\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n * @type {(Metadata | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.metadata;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"requestId\", {\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.requestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"clientRequestId\", {\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.clientRequestId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"version\", {\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.version;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"encryptionKeySha256\", {\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n * @type {(string | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.encryptionKeySha256;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"contentCrc64\", {\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n *\n * @type {(Uint8Array | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse.contentCrc64;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"blobBody\", {\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n * @type {(Promise | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"readableStreamBody\", {\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n * @type {(NodeJS.ReadableStream | undefined)}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return coreHttp.isNode ? this.blobDownloadStream : undefined;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobQueryResponse.prototype, \"_response\", {\n /**\n * The HTTP response.\n *\n * @type {HttpResponse}\n * @memberof BlobQueryResponse\n */\n get: function () {\n return this.originalResponse._response;\n },\n enumerable: false,\n configurable: true\n });\n return BlobQueryResponse;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n *\n * @export\n * @class StorageSharedKeyCredentialPolicy\n * @extends {CredentialPolicy}\n */\nvar StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) {\n tslib.__extends(StorageSharedKeyCredentialPolicy, _super);\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @param {StorageSharedKeyCredential} factory\n * @memberof StorageSharedKeyCredentialPolicy\n */\n function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.factory = factory;\n return _this;\n }\n /**\n * Signs request.\n *\n * @protected\n * @param {WebResource} request\n * @returns {WebResource}\n * @memberof StorageSharedKeyCredentialPolicy\n */\n StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n if (request.body && typeof request.body === \"string\" && request.body.length > 0) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n var stringToSign = [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE)\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n var signature = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(HeaderConstants.AUTHORIZATION, \"SharedKey \" + this.factory.accountName + \":\" + signature);\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n };\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @private\n * @param {WebResource} request\n * @param {string} headerName\n * @returns {string}\n * @memberof StorageSharedKeyCredentialPolicy\n */\n StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) {\n var value = request.headers.get(headerName);\n if (!value) {\n return \"\";\n }\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n return value;\n };\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @private\n * @param {WebResource} request\n * @returns {string}\n * @memberof StorageSharedKeyCredentialPolicy\n */\n StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) {\n var headersArray = request.headers.headersArray().filter(function (value) {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n headersArray.sort(function (a, b) {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n // Remove duplicate headers\n headersArray = headersArray.filter(function (value, index, array) {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n var canonicalizedHeadersStringToSign = \"\";\n headersArray.forEach(function (header) {\n canonicalizedHeadersStringToSign += header.name\n .toLowerCase()\n .trimRight() + \":\" + header.value.trimLeft() + \"\\n\";\n });\n return canonicalizedHeadersStringToSign;\n };\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @private\n * @param {WebResource} request\n * @returns {string}\n * @memberof StorageSharedKeyCredentialPolicy\n */\n StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) {\n var path = getURLPath(request.url) || \"/\";\n var canonicalizedResourceString = \"\";\n canonicalizedResourceString += \"/\" + this.factory.accountName + path;\n var queries = getURLQueries(request.url);\n var lowercaseQueries = {};\n if (queries) {\n var queryKeys = [];\n for (var key in queries) {\n if (queries.hasOwnProperty(key)) {\n var lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n queryKeys.sort();\n for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) {\n var key = queryKeys_1[_i];\n canonicalizedResourceString += \"\\n\" + key + \":\" + decodeURIComponent(lowercaseQueries[key]);\n }\n }\n return canonicalizedResourceString;\n };\n return StorageSharedKeyCredentialPolicy;\n}(CredentialPolicy));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n *\n * @export\n * @class StorageSharedKeyCredential\n * @extends {Credential}\n */\nvar StorageSharedKeyCredential = /** @class */ (function (_super) {\n tslib.__extends(StorageSharedKeyCredential, _super);\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param {string} accountName\n * @param {string} accountKey\n * @memberof StorageSharedKeyCredential\n */\n function StorageSharedKeyCredential(accountName, accountKey) {\n var _this = _super.call(this) || this;\n _this.accountName = accountName;\n _this.accountKey = Buffer.from(accountKey, \"base64\");\n return _this;\n }\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param {RequestPolicy} nextPolicy\n * @param {RequestPolicyOptions} options\n * @returns {StorageSharedKeyCredentialPolicy}\n * @memberof StorageSharedKeyCredential\n */\n StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n };\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param {string} stringToSign\n * @returns {string}\n * @memberof StorageSharedKeyCredential\n */\n StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {\n return crypto.createHmac(\"sha256\", this.accountKey)\n .update(stringToSign, \"utf8\")\n .digest(\"base64\");\n };\n return StorageSharedKeyCredential;\n}(Credential));\n\n/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n */\nvar packageName = \"azure-storage-blob\";\nvar packageVersion = \"12.4.1\";\nvar StorageClientContext = /** @class */ (function (_super) {\n tslib.__extends(StorageClientContext, _super);\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the targe of the desired\n * operation.\n * @param [options] The parameter options\n */\n function StorageClientContext(url, options) {\n var _this = this;\n if (url == undefined) {\n throw new Error(\"'url' cannot be null.\");\n }\n if (!options) {\n options = {};\n }\n if (!options.userAgent) {\n var defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = packageName + \"/\" + packageVersion + \" \" + defaultUserAgent;\n }\n _this = _super.call(this, undefined, options) || this;\n _this.version = '2020-04-08';\n _this.baseUri = \"{url}\";\n _this.requestContentType = \"application/json; charset=utf-8\";\n _this.url = url;\n return _this;\n }\n return StorageClientContext;\n}(coreHttp.ServiceClient));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n(function (BlockBlobTier) {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n BlockBlobTier[\"Hot\"] = \"Hot\";\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n BlockBlobTier[\"Cool\"] = \"Cool\";\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n BlockBlobTier[\"Archive\"] = \"Archive\";\n})(exports.BlockBlobTier || (exports.BlockBlobTier = {}));\n(function (PremiumPageBlobTier) {\n /**\n * P4 Tier.\n */\n PremiumPageBlobTier[\"P4\"] = \"P4\";\n /**\n * P6 Tier.\n */\n PremiumPageBlobTier[\"P6\"] = \"P6\";\n /**\n * P10 Tier.\n */\n PremiumPageBlobTier[\"P10\"] = \"P10\";\n /**\n * P15 Tier.\n */\n PremiumPageBlobTier[\"P15\"] = \"P15\";\n /**\n * P20 Tier.\n */\n PremiumPageBlobTier[\"P20\"] = \"P20\";\n /**\n * P30 Tier.\n */\n PremiumPageBlobTier[\"P30\"] = \"P30\";\n /**\n * P40 Tier.\n */\n PremiumPageBlobTier[\"P40\"] = \"P40\";\n /**\n * P50 Tier.\n */\n PremiumPageBlobTier[\"P50\"] = \"P50\";\n /**\n * P60 Tier.\n */\n PremiumPageBlobTier[\"P60\"] = \"P60\";\n /**\n * P70 Tier.\n */\n PremiumPageBlobTier[\"P70\"] = \"P70\";\n /**\n * P80 Tier.\n */\n PremiumPageBlobTier[\"P80\"] = \"P80\";\n})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {}));\nfunction toAccessTier(tier) {\n if (tier == undefined) {\n return undefined;\n }\n return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\nfunction ensureCpkIfSpecified(cpk, isHttps) {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response Model PageBlob Range response\n */\nfunction rangeResponseFromModel(response) {\n var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({\n offset: x.start,\n count: x.end - x.start\n }); });\n var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({\n offset: x.start,\n count: x.end - x.start\n }); });\n return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange,\n clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: {\n pageRange: pageRange,\n clearRange: clearRange\n } }) });\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nvar BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) {\n tslib.__extends(BlobBeginCopyFromUrlPoller, _super);\n function BlobBeginCopyFromUrlPoller(options) {\n var _this = this;\n var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions;\n var state;\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient,\n copySource: copySource,\n startCopyFromURLOptions: startCopyFromURLOptions }));\n _this = _super.call(this, operation) || this;\n if (typeof onProgress === \"function\") {\n _this.onProgress(onProgress);\n }\n _this.intervalInMs = intervalInMs;\n return _this;\n }\n BlobBeginCopyFromUrlPoller.prototype.delay = function () {\n return coreHttp.delay(this.intervalInMs);\n };\n return BlobBeginCopyFromUrlPoller;\n}(coreLro.Poller));\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar cancel = function cancel(options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var state, copyId;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n state = this.state;\n copyId = state.copyId;\n if (state.isCompleted) {\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n if (!copyId) {\n state.isCancelled = true;\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal\n })];\n case 1:\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n _a.sent();\n state.isCancelled = true;\n return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n });\n });\n};\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar update = function update(options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n state = this.state;\n blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions;\n if (!!state.isStarted) return [3 /*break*/, 2];\n state.isStarted = true;\n return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)];\n case 1:\n result = _a.sent();\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n return [3 /*break*/, 6];\n case 2:\n if (!!state.isCompleted) return [3 /*break*/, 6];\n _a.label = 3;\n case 3:\n _a.trys.push([3, 5, , 6]);\n return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })];\n case 4:\n result = _a.sent();\n copyStatus = result.copyStatus, copyProgress = result.copyProgress;\n prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\") {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n }\n else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n else if (copyStatus === \"failed\") {\n state.error = new Error(\"Blob copy failed with reason: \\\"\" + (result.copyStatusDescription || \"unknown\") + \"\\\"\");\n state.isCompleted = true;\n }\n return [3 /*break*/, 6];\n case 5:\n err_1 = _a.sent();\n state.error = err_1;\n state.isCompleted = true;\n return [3 /*break*/, 6];\n case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];\n }\n });\n });\n};\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nvar toString = function toString() {\n return JSON.stringify({ state: this.state }, function (key, value) {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(state) {\n return {\n state: tslib.__assign({}, state),\n cancel: cancel,\n toString: toString,\n update: update\n };\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @export\n * @param {Range} iRange\n * @returns {string}\n */\nfunction rangeToString(iRange) {\n if (iRange.offset < 0) {\n throw new RangeError(\"Range.offset cannot be smaller than 0.\");\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\"Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.\");\n }\n return iRange.count\n ? \"bytes=\" + iRange.offset + \"-\" + (iRange.offset + iRange.count - 1)\n : \"bytes=\" + iRange.offset + \"-\";\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n *\n * @export\n * @class StorageClient\n */\nvar StorageClient = /** @class */ (function () {\n /**\n * Creates an instance of StorageClient.\n * @param {string} url url to resource\n * @param {Pipeline} pipeline request policy pipeline.\n * @memberof StorageClient\n */\n function StorageClient(url, pipeline) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions());\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n this.credential = new AnonymousCredential();\n for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) {\n var factory = _a[_i];\n if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential) {\n this.credential = factory;\n }\n else if (coreHttp.isTokenCredential(factory.credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = factory.credential;\n }\n }\n // Override protocol layer's default content-type\n var storageClientContext = this.storageClientContext;\n storageClientContext.requestContentType = undefined;\n }\n return StorageClient;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * States for Batch.\n *\n * @enum {number}\n */\nvar BatchStates;\n(function (BatchStates) {\n BatchStates[BatchStates[\"Good\"] = 0] = \"Good\";\n BatchStates[BatchStates[\"Error\"] = 1] = \"Error\";\n})(BatchStates || (BatchStates = {}));\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n *\n * @export\n * @class Batch\n */\nvar Batch = /** @class */ (function () {\n /**\n * Creates an instance of Batch.\n * @param {number} [concurrency=5]\n * @memberof Batch\n */\n function Batch(concurrency) {\n if (concurrency === void 0) { concurrency = 5; }\n /**\n * Number of active operations under execution.\n *\n * @private\n * @type {number}\n * @memberof Batch\n */\n this.actives = 0;\n /**\n * Number of completed operations under execution.\n *\n * @private\n * @type {number}\n * @memberof Batch\n */\n this.completed = 0;\n /**\n * Offset of next operation to be executed.\n *\n * @private\n * @type {number}\n * @memberof Batch\n */\n this.offset = 0;\n /**\n * Operation array to be executed.\n *\n * @private\n * @type {Operation[]}\n * @memberof Batch\n */\n this.operations = [];\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n *\n * @private\n * @type {BatchStates}\n * @memberof Batch\n */\n this.state = BatchStates.Good;\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new events.EventEmitter();\n }\n /**\n * Add a operation into queue.\n *\n * @param {Operation} operation\n * @memberof Batch\n */\n Batch.prototype.addOperation = function (operation) {\n var _this = this;\n this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var error_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n _a.trys.push([0, 2, , 3]);\n this.actives++;\n return [4 /*yield*/, operation()];\n case 1:\n _a.sent();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n return [3 /*break*/, 3];\n case 2:\n error_1 = _a.sent();\n this.emitter.emit(\"error\", error_1);\n return [3 /*break*/, 3];\n case 3: return [2 /*return*/];\n }\n });\n }); });\n };\n /**\n * Start execute operations in the queue.\n *\n * @returns {Promise}\n * @memberof Batch\n */\n Batch.prototype.do = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n if (this.operations.length === 0) {\n return [2 /*return*/, Promise.resolve()];\n }\n this.parallelExecute();\n return [2 /*return*/, new Promise(function (resolve, reject) {\n _this.emitter.on(\"finish\", resolve);\n _this.emitter.on(\"error\", function (error) {\n _this.state = BatchStates.Error;\n reject(error);\n });\n })];\n });\n });\n };\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n * @private\n * @returns {(Operation | null)}\n * @memberof Batch\n */\n Batch.prototype.nextOperation = function () {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n };\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n * @private\n * @returns {void}\n * @memberof Batch\n */\n Batch.prototype.parallelExecute = function () {\n if (this.state === BatchStates.Error) {\n return;\n }\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n while (this.actives < this.concurrency) {\n var operation = this.nextOperation();\n if (operation) {\n operation();\n }\n else {\n return;\n }\n }\n };\n return Batch;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * This class generates a readable stream from the data in an array of buffers.\n *\n * @export\n * @class BuffersStream\n */\nvar BuffersStream = /** @class */ (function (_super) {\n tslib.__extends(BuffersStream, _super);\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param {Buffer[]} buffers Array of buffers containing the data\n * @param {number} byteLength The total length of data contained in the buffers\n * @memberof BuffersStream\n */\n function BuffersStream(buffers, byteLength, options) {\n var _this = _super.call(this, options) || this;\n _this.buffers = buffers;\n _this.byteLength = byteLength;\n _this.byteOffsetInCurrentBuffer = 0;\n _this.bufferIndex = 0;\n _this.pushedBytesLength = 0;\n // check byteLength is no larger than buffers[] total length\n var buffersLength = 0;\n for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) {\n var buf = _a[_i];\n buffersLength += buf.byteLength;\n }\n if (buffersLength < _this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n return _this;\n }\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param {number} size Optional. The size of data to be read\n * @memberof BuffersStream\n */\n BuffersStream.prototype._read = function (size) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n if (!size) {\n size = this.readableHighWaterMark;\n }\n var outBuffers = [];\n var i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n var end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n }\n else {\n // chunkSize = remaining\n var end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n }\n else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n }\n else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n };\n return BuffersStream;\n}(stream.Readable));\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nvar maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n *\n * @export\n * @class BufferScheduler\n */\nvar PooledBuffer = /** @class */ (function () {\n function PooledBuffer(capacity, buffers, totalLength) {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n *\n * @private\n * @type {Buffer[]}\n * @memberof PooledBuffer\n */\n this.buffers = [];\n this.capacity = capacity;\n this._size = 0;\n // allocate\n var bufferNum = Math.ceil(capacity / maxBufferLength);\n for (var i = 0; i < bufferNum; i++) {\n var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n if (buffers) {\n this.fill(buffers, totalLength);\n }\n }\n Object.defineProperty(PooledBuffer.prototype, \"size\", {\n /**\n * The size of the data contained in the pooled buffers.\n */\n get: function () {\n return this._size;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param {Buffer[]} buffers Input buffers containing the data to be filled in the pooled buffer\n * @param {number} totalLength Total length of the data to be filled in.\n *\n * @returns {void}\n * @memberof PooledBuffer\n */\n PooledBuffer.prototype.fill = function (buffers, totalLength) {\n this._size = Math.min(this.capacity, totalLength);\n var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n var source = buffers[i];\n var target = this.buffers[j];\n var copiedNum = source.copy(target, targetOffset, sourceOffset);\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n };\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n * @returns {Readable}\n * @memberof PooledBuffer\n */\n PooledBuffer.prototype.getReadableStream = function () {\n return new BuffersStream(this.buffers, this.size);\n };\n return PooledBuffer;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS <= maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n * @export\n * @class BufferScheduler\n */\nvar BufferScheduler = /** @class */ (function () {\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param {Readable} readable A Node.js Readable stream\n * @param {number} bufferSize Buffer size of every maintained buffer\n * @param {number} maxBuffers How many buffers can be allocated\n * @param {OutgoingHandler} outgoingHandler An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param {number} concurrency Concurrency of executing outgoingHandlers (>0)\n * @param {string} [encoding] [Optional] Encoding of Readable stream when it's a string stream\n * @memberof BufferScheduler\n */\n function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {\n /**\n * An internal event emitter.\n *\n * @private\n * @type {EventEmitter}\n * @memberof BufferScheduler\n */\n this.emitter = new events.EventEmitter();\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n *\n * @private\n * @type {number}\n * @memberof BufferScheduler\n */\n this.offset = 0;\n /**\n * An internal marker to track whether stream is end.\n *\n * @private\n * @type {boolean}\n * @memberof BufferScheduler\n */\n this.isStreamEnd = false;\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n *\n * @private\n * @type {boolean}\n * @memberof BufferScheduler\n */\n this.isError = false;\n /**\n * How many handlers are executing.\n *\n * @private\n * @type {number}\n * @memberof BufferScheduler\n */\n this.executingOutgoingHandlers = 0;\n /**\n * How many buffers have been allocated.\n *\n * @private\n * @type {number}\n * @memberof BufferScheduler\n */\n this.numBuffers = 0;\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n *\n * @private\n * @type {Buffer[]}\n * @memberof BufferScheduler\n */\n this.unresolvedDataArray = [];\n /**\n * How much data consisted in unresolvedDataArray.\n *\n * @private\n * @type {number}\n * @memberof BufferScheduler\n */\n this.unresolvedLength = 0;\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n *\n * @private\n * @type {PooledBuffer[]}\n * @memberof BufferScheduler\n */\n this.incoming = [];\n /**\n * The array (queue) includes all the buffers filled from stream data.\n *\n * @private\n * @type {PooledBuffer[]}\n * @memberof BufferScheduler\n */\n this.outgoing = [];\n if (bufferSize <= 0) {\n throw new RangeError(\"bufferSize must be larger than 0, current is \" + bufferSize);\n }\n if (maxBuffers <= 0) {\n throw new RangeError(\"maxBuffers must be larger than 0, current is \" + maxBuffers);\n }\n if (concurrency <= 0) {\n throw new RangeError(\"concurrency must be larger than 0, current is \" + concurrency);\n }\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n * @returns {Promise}\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.do = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n _this.readable.on(\"data\", function (data) {\n data = typeof data === \"string\" ? Buffer.from(data, _this.encoding) : data;\n _this.appendUnresolvedData(data);\n if (!_this.resolveData()) {\n _this.readable.pause();\n }\n });\n _this.readable.on(\"error\", function (err) {\n _this.emitter.emit(\"error\", err);\n });\n _this.readable.on(\"end\", function () {\n _this.isStreamEnd = true;\n _this.emitter.emit(\"checkEnd\");\n });\n _this.emitter.on(\"error\", function (err) {\n _this.isError = true;\n _this.readable.pause();\n reject(err);\n });\n _this.emitter.on(\"checkEnd\", function () {\n if (_this.outgoing.length > 0) {\n _this.triggerOutgoingHandlers();\n return;\n }\n if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) {\n if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) {\n var buffer_1 = _this.shiftBufferFromUnresolvedDataArray();\n _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset)\n .then(resolve)\n .catch(reject);\n }\n else if (_this.unresolvedLength >= _this.bufferSize) {\n return;\n }\n else {\n resolve();\n }\n }\n });\n })];\n });\n });\n };\n /**\n * Insert a new data into unresolved array.\n *\n * @private\n * @param {Buffer} data\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.appendUnresolvedData = function (data) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n };\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n * @private\n * @returns {PooledBuffer}\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n }\n else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n this.unresolvedLength -= buffer.size;\n return buffer;\n };\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @private\n * @returns {boolean} Return false when buffers in incoming are not enough, else true.\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.resolveData = function () {\n while (this.unresolvedLength >= this.bufferSize) {\n var buffer = void 0;\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift();\n this.shiftBufferFromUnresolvedDataArray(buffer);\n }\n else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n }\n else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n };\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n *\n * @private\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.triggerOutgoingHandlers = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer;\n return tslib.__generator(this, function (_a) {\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return [2 /*return*/];\n }\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n return [2 /*return*/];\n });\n });\n };\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @private\n * @param {Buffer} buffer\n * @returns {Promise}\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var bufferLength, err_1;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n bufferLength = buffer.size;\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n _a.label = 1;\n case 1:\n _a.trys.push([1, 3, , 4]);\n return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)];\n case 2:\n _a.sent();\n return [3 /*break*/, 4];\n case 3:\n err_1 = _a.sent();\n this.emitter.emit(\"error\", err_1);\n return [2 /*return*/];\n case 4:\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @private\n * @param {Buffer} buffer\n * @memberof BufferScheduler\n */\n BufferScheduler.prototype.reuseBuffer = function (buffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n };\n return BufferScheduler;\n}());\n\n// Copyright (c) Microsoft Corporation.\n/**\n * Creates a span using the global tracer.\n * @param name The name of the operation being performed.\n * @param tracingOptions The options for the underlying http request.\n */\nfunction createSpan(operationName, tracingOptions) {\n if (tracingOptions === void 0) { tracingOptions = {}; }\n var tracer = coreTracing.getTracer();\n var spanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { kind: api.SpanKind.INTERNAL });\n var span = tracer.startSpan(\"Azure.Storage.Blob.\" + operationName, spanOptions);\n span.setAttribute(\"az.namespace\", \"Microsoft.Storage\");\n var newOptions = tracingOptions.spanOptions || {};\n if (span.isRecording()) {\n newOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { parent: span.context(), attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { \"az.namespace\": \"Microsoft.Storage\" }) });\n }\n return {\n span: span,\n spanOptions: newOptions\n };\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @export\n * @param {NodeJS.ReadableStream} stream A Node.js Readable stream\n * @param {Buffer} buffer Buffer to be filled, length must >= offset\n * @param {number} offset From which position in the buffer to be filled, inclusive\n * @param {number} end To which position in the buffer to be filled, exclusive\n * @param {string} [encoding] Encoding of the Readable stream\n * @returns {Promise}\n */\nfunction streamToBuffer(stream, buffer, offset, end, encoding) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var pos, count;\n return tslib.__generator(this, function (_a) {\n pos = 0;\n count = end - offset;\n return [2 /*return*/, new Promise(function (resolve, reject) {\n stream.on(\"readable\", function () {\n if (pos >= count) {\n resolve();\n return;\n }\n var chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n // How much data needed in this chunk\n var chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n stream.on(\"end\", function () {\n if (pos < count) {\n reject(new Error(\"Stream drains before getting enough data needed. Data read: \" + pos + \", data need: \" + count));\n }\n resolve();\n });\n stream.on(\"error\", reject);\n })];\n });\n });\n}\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @export\n * @param {NodeJS.ReadableStream} stream A Node.js Readable stream\n * @param {Buffer} buffer Buffer to be filled, length must >= offset\n * @param {string} [encoding] Encoding of the Readable stream\n * @returns {Promise} with the count of bytes read.\n * @throws {RangeError} If buffer size is not big enough.\n */\nfunction streamToBuffer2(stream, buffer, encoding) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var pos, bufferSize;\n return tslib.__generator(this, function (_a) {\n pos = 0;\n bufferSize = buffer.length;\n return [2 /*return*/, new Promise(function (resolve, reject) {\n stream.on(\"readable\", function () {\n var chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n if (pos + chunk.length > bufferSize) {\n reject(new Error(\"Stream exceeds buffer size. Buffer size: \" + bufferSize));\n return;\n }\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n stream.on(\"end\", function () {\n resolve(pos);\n });\n stream.on(\"error\", reject);\n })];\n });\n });\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @export\n * @param {NodeJS.ReadableStream} rs The read stream.\n * @param {string} file Destination file path.\n * @returns {Promise}\n */\nfunction readStreamToLocalFile(rs, file) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve, reject) {\n var ws = fs.createWriteStream(file);\n rs.on(\"error\", function (err) {\n reject(err);\n });\n ws.on(\"error\", function (err) {\n reject(err);\n });\n ws.on(\"close\", resolve);\n rs.pipe(ws);\n })];\n });\n });\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nvar fsStat = util.promisify(fs.stat);\nvar fsCreateReadStream = fs.createReadStream;\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n *\n * @export\n * @class BlobSASPermissions\n */\nvar BlobSASPermissions = /** @class */ (function () {\n function BlobSASPermissions() {\n /**\n * Specifies Read access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.read = false;\n /**\n * Specifies Add access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.add = false;\n /**\n * Specifies Create access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.create = false;\n /**\n * Specifies Write access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.write = false;\n /**\n * Specifies Delete access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.delete = false;\n /**\n * Specifies Delete version access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.deleteVersion = false;\n /**\n * Specfies Tag access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.tag = false;\n /**\n * Specifies Move access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.move = false;\n /**\n * Specifies Execute access granted.\n *\n * @type {boolean}\n * @memberof BlobSASPermissions\n */\n this.execute = false;\n }\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @static\n * @param {string} permissions\n * @returns {BlobSASPermissions}\n * @memberof BlobSASPermissions\n */\n BlobSASPermissions.parse = function (permissions) {\n var blobSASPermissions = new BlobSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var char = permissions_1[_i];\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n default:\n throw new RangeError(\"Invalid permission: \" + char);\n }\n }\n return blobSASPermissions;\n };\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @static\n * @param {BlobSASPermissionsLike} permissionLike\n * @returns {BlobSASPermissions}\n * @memberof BlobSASPermissions\n */\n BlobSASPermissions.from = function (permissionLike) {\n var blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n return blobSASPermissions;\n };\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns {string} A string which represents the BlobSASPermissions\n * @memberof BlobSASPermissions\n */\n BlobSASPermissions.prototype.toString = function () {\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n return permissions.join(\"\");\n };\n return BlobSASPermissions;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n *\n * @export\n * @class ContainerSASPermissions\n */\nvar ContainerSASPermissions = /** @class */ (function () {\n function ContainerSASPermissions() {\n /**\n * Specifies Read access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.read = false;\n /**\n * Specifies Add access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.add = false;\n /**\n * Specifies Create access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.create = false;\n /**\n * Specifies Write access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.write = false;\n /**\n * Specifies Delete access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.delete = false;\n /**\n * Specifies Delete version access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.deleteVersion = false;\n /**\n * Specifies List access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.list = false;\n /**\n * Specfies Tag access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.tag = false;\n /**\n * Specifies Move access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.move = false;\n /**\n * Specifies Execute access granted.\n *\n * @type {boolean}\n * @memberof ContainerSASPermissions\n */\n this.execute = false;\n }\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @static\n * @param {string} permissions\n * @returns {ContainerSASPermissions}\n * @memberof ContainerSASPermissions\n */\n ContainerSASPermissions.parse = function (permissions) {\n var containerSASPermissions = new ContainerSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var char = permissions_1[_i];\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n default:\n throw new RangeError(\"Invalid permission \" + char);\n }\n }\n return containerSASPermissions;\n };\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @static\n * @param {ContainerSASPermissionsLike} permissionLike\n * @returns {ContainerSASPermissions}\n * @memberof ContainerSASPermissions\n */\n ContainerSASPermissions.from = function (permissionLike) {\n var containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n return containerSASPermissions;\n };\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @returns {string}\n * @memberof ContainerSASPermissions\n */\n ContainerSASPermissions.prototype.toString = function () {\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n return permissions.join(\"\");\n };\n return ContainerSASPermissions;\n}());\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n *\n * @export\n * @class UserDelegationKeyCredential\n */\nvar UserDelegationKeyCredential = /** @class */ (function () {\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param {string} accountName\n * @param {UserDelegationKey} userDelegationKey\n * @memberof UserDelegationKeyCredential\n */\n function UserDelegationKeyCredential(accountName, userDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param {string} stringToSign\n * @returns {string}\n * @memberof UserDelegationKeyCredential\n */\n UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n return crypto.createHmac(\"sha256\", this.key)\n .update(stringToSign, \"utf8\")\n .digest(\"base64\");\n };\n return UserDelegationKeyCredential;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @export\n * @param {SasIPRange} ipRange\n * @returns {string}\n */\nfunction ipRangeToString(ipRange) {\n return ipRange.end ? ipRange.start + \"-\" + ipRange.end : ipRange.start;\n}\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n(function (SASProtocol) {\n /**\n * Protocol that allows HTTPS only\n */\n SASProtocol[\"Https\"] = \"https\";\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n SASProtocol[\"HttpsAndHttp\"] = \"https,http\";\n})(exports.SASProtocol || (exports.SASProtocol = {}));\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@code String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n *\n * @export\n * @class SASQueryParameters\n */\nvar SASQueryParameters = /** @class */ (function () {\n function SASQueryParameters(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) {\n this.version = version;\n this.signature = signature;\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n }\n else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n Object.defineProperty(SASQueryParameters.prototype, \"ipRange\", {\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n * @type {(SasIPRange | undefined)}\n * @memberof SASQueryParameters\n */\n get: function () {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start\n };\n }\n return undefined;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n * @returns {string}\n * @memberof SASQueryParameters\n */\n SASQueryParameters.prototype.toString = function () {\n var params = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"skoid\",\n \"sktid\",\n \"skt\",\n \"ske\",\n \"sks\",\n \"skv\",\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\"\n ];\n var queries = [];\n for (var _i = 0, params_1 = params; _i < params_1.length; _i++) {\n var param = params_1[_i];\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined);\n break;\n case \"se\":\n this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined);\n break;\n case \"sip\":\n this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined);\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined);\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined);\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n };\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @private\n * @param {string[]} queries\n * @param {string} key\n * @param {string} [value]\n * @returns {void}\n * @memberof SASQueryParameters\n */\n SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) {\n if (!value) {\n return;\n }\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(key + \"=\" + value);\n }\n };\n return SASQueryParameters;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\nfunction generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {\n var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n var userDelegationKeyCredential;\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey);\n }\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n }\n else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential);\n }\n else {\n return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential);\n }\n }\n }\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n }\n else {\n throw new RangeError(\"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\");\n }\n }\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param {BlobSASSignatureValues} blobSASSignatureValues\n * @param {StorageSharedKeyCredential} sharedKeyCredential\n * @returns {SASQueryParameters}\n */\nfunction generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n if (!blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\");\n }\n var resource = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\"\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param {BlobSASSignatureValues} blobSASSignatureValues\n * @param {StorageSharedKeyCredential} sharedKeyCredential\n * @returns {SASQueryParameters}\n */\nfunction generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n if (!blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\"\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param {BlobSASSignatureValues} blobSASSignatureValues\n * @param {UserDelegationKeyCredential} userDelegationKeyCredential\n * @returns {SASQueryParameters}\n */\nfunction generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n ].join(\"\\n\");\n var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey);\n}\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param {BlobSASSignatureValues} blobSASSignatureValues\n * @param {UserDelegationKeyCredential} userDelegationKeyCredential\n * @returns {SASQueryParameters}\n */\nfunction generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\");\n }\n var resource = \"c\";\n var timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n }\n else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n var verifiedPermissions;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n else {\n verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();\n }\n }\n // Signature is generated on the un-url-encoded values.\n var stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n ].join(\"\\n\");\n var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId);\n}\nfunction getCanonicalName(accountName, containerName, blobName) {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n var elements = [\"/blob/\" + accountName + \"/\" + containerName];\n if (blobName) {\n elements.push(\"/\" + blobName);\n }\n return elements.join(\"\");\n}\nfunction SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) {\n var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n if (blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n if (version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n if (version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\");\n }\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n *\n * @export\n * @class BlobClient\n */\nvar BlobClient = /** @class */ (function (_super) {\n tslib.__extends(BlobClient, _super);\n function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _a;\n var _this = this;\n options = options || {};\n var pipeline;\n var url;\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName);\n _this.blobContext = new Blob$1(_this.storageClientContext);\n _this._snapshot = getURLParameter(_this.url, URLConstants.Parameters.SNAPSHOT);\n _this._versionId = getURLParameter(_this.url, URLConstants.Parameters.VERSIONID);\n return _this;\n }\n Object.defineProperty(BlobClient.prototype, \"name\", {\n /**\n * The name of the blob.\n */\n get: function () {\n return this._name;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobClient.prototype, \"containerName\", {\n /**\n * The name of the storage container the blob is associated with.\n */\n get: function () {\n return this._containerName;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param {string} snapshot The snapshot timestamp.\n * @returns {BlobClient} A new BlobClient object identical to the source but with the specified snapshot timestamp\n * @memberof BlobClient\n */\n BlobClient.prototype.withSnapshot = function (snapshot) {\n return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param {string} versionId The versionId.\n * @returns {BlobClient} A new BlobClient object pointing to the version of this blob.\n * @memberof BlobClient\n */\n BlobClient.prototype.withVersion = function (versionId) {\n return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline);\n };\n /**\n * Creates a AppendBlobClient object.\n *\n * @returns {AppendBlobClient}\n * @memberof BlobClient\n */\n BlobClient.prototype.getAppendBlobClient = function () {\n return new AppendBlobClient(this.url, this.pipeline);\n };\n /**\n * Creates a BlockBlobClient object.\n *\n * @returns {BlockBlobClient}\n * @memberof BlobClient\n */\n BlobClient.prototype.getBlockBlobClient = function () {\n return new BlockBlobClient(this.url, this.pipeline);\n };\n /**\n * Creates a PageBlobClient object.\n *\n * @returns {PageBlobClient}\n * @memberof BlobClient\n */\n BlobClient.prototype.getPageBlobClient = function () {\n return new PageBlobClient(this.url, this.pipeline);\n };\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param {number} [offset] From which position of the blob to download, >= 0\n * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined\n * @param {BlobDownloadOptions} [options] Optional options to Blob Download operation.\n * @returns {Promise}\n * @memberof BlobClient\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n BlobClient.prototype.download = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, res_1, wrappedRes, e_1;\n var _this = this;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n _b = createSpan(\"BlobClient-download\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress,\n range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n spanOptions: spanOptions\n })];\n case 2:\n res_1 = _c.sent();\n wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) });\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n if (res_1.contentLength === undefined) {\n throw new RangeError(\"File download response doesn't contain valid content length header\");\n }\n if (!res_1.etag) {\n throw new RangeError(\"File download response doesn't contain valid etag header\");\n }\n return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () {\n var updatedOptions;\n var _a;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n updatedOptions = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions.ifMatch || res_1.etag,\n ifModifiedSince: options.conditions.ifModifiedSince,\n ifNoneMatch: options.conditions.ifNoneMatch,\n ifUnmodifiedSince: options.conditions.ifUnmodifiedSince,\n ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions\n },\n range: rangeToString({\n count: offset + res_1.contentLength - start,\n offset: start\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey\n };\n return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))];\n case 1: \n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n return [2 /*return*/, (_b.sent()).readableStreamBody];\n }\n });\n }); }, offset, res_1.contentLength, {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress\n })];\n case 3:\n e_1 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param {BlobExistsOptions} [options] options to Exists operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.exists = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_2;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-exists\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })\n })];\n case 2:\n _b.sent();\n return [2 /*return*/, true];\n case 3:\n e_2 = _b.sent();\n if (e_2.statusCode === 404) {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when checking blob existence\"\n });\n return [2 /*return*/, false];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param {BlobGetPropertiesOptions} [options] Optional options to Get Properties operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.getProperties = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, res, e_3;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-getProperties\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n spanOptions: spanOptions\n })];\n case 2:\n res = _c.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })];\n case 3:\n e_3 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.delete = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_4;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-delete\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.deleteMethod({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_4 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.deleteIfExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, spanOptions, res, e_5;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"BlobClient-deleteIfExists\", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_5 = _d.sent();\n if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobNotFound\") {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param {BlobUndeleteOptions} [options] Optional options to Blob Undelete operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.undelete = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_6;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-undelete\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.undelete({\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_6 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_6.message\n });\n throw e_6;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param {BlobHTTPHeaders} [blobHTTPHeaders] If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * @param {BlobSetHTTPHeadersOptions} [options] Optional options to Blob Set HTTP Headers operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_7;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setHTTPHeaders\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.setHTTPHeaders({\n abortSignal: options.abortSignal,\n blobHTTPHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_7 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_7.message\n });\n throw e_7;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param {Metadata} [metadata] Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param {BlobSetMetadataOptions} [options] Optional options to Set Metadata operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.setMetadata = function (metadata, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_8;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setMetadata\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_8 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_8.message\n });\n throw e_8;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param {Tags} tags\n * @param {BlobSetTagsOptions} [options={}]\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.setTags = function (tags, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_9;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setTags\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions,\n tags: toBlobTags(tags)\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_9 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_9.message\n });\n throw e_9;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param {BlobGetTagsOptions} [options={}]\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.getTags = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, response, wrappedResponse, e_10;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-getTags\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2:\n response = _c.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_10 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_10.message\n });\n throw e_10;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param {string} [proposeLeaseId] Initial proposed lease Id.\n * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the blob.\n * @memberof BlobClient\n */\n BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {\n return new BlobLeaseClient(this, proposeLeaseId);\n };\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param {BlobCreateSnapshotOptions} [options] Optional options to the Blob Create Snapshot operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.createSnapshot = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_11;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-createSnapshot\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_11 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_11.message\n });\n throw e_11;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param {string} copySource url to the source Azure Blob/File.\n * @param {BlobBeginCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.\n */\n BlobClient.prototype.beginCopyFromURL = function (copySource, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var client, poller;\n var _this = this;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n client = {\n abortCopyFromURL: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.abortCopyFromURL.apply(_this, args);\n },\n getProperties: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.getProperties.apply(_this, args);\n },\n startCopyFromURL: function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n return _this.startCopyFromURL.apply(_this, args);\n }\n };\n poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource: copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options\n });\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n return [4 /*yield*/, poller.poll()];\n case 1:\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n _a.sent();\n return [2 /*return*/, poller];\n }\n });\n });\n };\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param {string} copyId Id of the Copy From URL operation.\n * @param {BlobAbortCopyFromURLOptions} [options] Optional options to the Blob Abort Copy From URL operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.abortCopyFromURL = function (copyId, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_12;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-abortCopyFromURL\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_12 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_12.message\n });\n throw e_12;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param {string} copySource The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param {BlobSyncCopyFromURLOptions} [options={}]\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.syncCopyFromURL = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_13;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-syncCopyFromURL\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n },\n sourceContentMD5: options.sourceContentMD5,\n blobTagsString: toBlobTagsString(options.tags),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_13 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_13.message\n });\n throw e_13;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param {BlockBlobTier | PremiumPageBlobTier | string} tier The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param {BlobSetTierOptions} [options] Optional options to the Blob Set Tier operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.setAccessTier = function (tier, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_14;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-setAccessTier\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n rehydratePriority: options.rehydratePriority,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_14 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_14.message\n });\n throw e_14;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) {\n if (param4 === void 0) { param4 = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer, offset, count, options, _a, span, spanOptions, response, transferProgress_1, batch, _loop_1, off, e_15;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n offset = 0;\n count = 0;\n options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n }\n else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = param3 || {};\n }\n _a = createSpan(\"BlobClient-downloadToBuffer\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n if (count && count <= 0) {\n throw new RangeError(\"count option must be > 0\");\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n if (!!count) return [3 /*break*/, 3];\n return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n response = _b.sent();\n count = response.contentLength - offset;\n if (count < 0) {\n throw new RangeError(\"offset \" + offset + \" shouldn't be larger than blob size \" + response.contentLength);\n }\n _b.label = 3;\n case 3:\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n }\n catch (error) {\n throw new Error(\"Unable to allocate the buffer of size: \" + count + \"(in bytes). Please try passing your own buffer to the \\\"downloadToBuffer\\\" method or try using other methods like \\\"download\\\" or \\\"downloadToFile\\\".\\t \" + error.message);\n }\n }\n if (buffer.length < count) {\n throw new RangeError(\"The buffer's size should be equal to or larger than the request count of bytes: \" + count);\n }\n transferProgress_1 = 0;\n batch = new Batch(options.concurrency);\n _loop_1 = function (off) {\n batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var chunkEnd, response, stream;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n chunkEnd = offset + count;\n if (off + options.blockSize < chunkEnd) {\n chunkEnd = off + options.blockSize;\n }\n return [4 /*yield*/, this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })\n })];\n case 1:\n response = _a.sent();\n stream = response.readableStreamBody;\n return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)];\n case 2:\n _a.sent();\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress_1 += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress_1 });\n }\n return [2 /*return*/];\n }\n });\n }); });\n };\n for (off = offset; off < offset + count; off = off + options.blockSize) {\n _loop_1(off);\n }\n return [4 /*yield*/, batch.do()];\n case 4:\n _b.sent();\n return [2 /*return*/, buffer];\n case 5:\n e_15 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_15.message\n });\n throw e_15;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param {string} filePath\n * @param {number} [offset] From which position of the block blob to download.\n * @param {number} [count] How much data to be downloaded. Will download to the end when passing undefined.\n * @param {BlobDownloadOptions} [options] Options to Blob download options.\n * @returns {Promise} The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n * @memberof BlobClient\n */\n BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) {\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, e_16;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobClient-downloadToFile\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n response = _b.sent();\n if (!response.readableStreamBody) return [3 /*break*/, 4];\n return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)];\n case 3:\n _b.sent();\n _b.label = 4;\n case 4:\n // The stream is no longer accessible so setting it to undefined.\n response.blobDownloadStream = undefined;\n return [2 /*return*/, response];\n case 5:\n e_16 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_16.message\n });\n throw e_16;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () {\n var containerName;\n var blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n var parsedUrl = coreHttp.URLBuilder.parse(this.url);\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents[1];\n blobName = pathComponents[3];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents[2];\n blobName = pathComponents[4];\n }\n else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n var pathComponents = parsedUrl.getPath().match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents[1];\n blobName = pathComponents[3];\n }\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n return { blobName: blobName, containerName: containerName };\n }\n catch (error) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n };\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param {string} copySource url to the source Azure Blob/File.\n * @param {BlobStartCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.\n * @returns {Promise}\n * @memberof BlobClient\n */\n BlobClient.prototype.startCopyFromURL = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_17;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlobClient-startCopyFromURL\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions\n },\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_17 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_17.message\n });\n throw e_17;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param {BlobGenerateSasUrlOptions} options Optional parameters.\n * @returns {Promise} The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n * @memberof BlobClient\n */\n BlobClient.prototype.generateSasUrl = function (options) {\n var _this = this;\n return new Promise(function (resolve) {\n if (!(_this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\"Can only generate the SAS when the client is initialized with a shared key credential\");\n }\n var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName, blobName: _this._name, snapshotTime: _this._snapshot, versionId: _this._versionId }, options), _this.credential).toString();\n resolve(appendToURLQuery(_this.url, sas));\n });\n };\n return BlobClient;\n}(StorageClient));\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n *\n * @export\n * @class AppendBlobClient\n * @extends {BlobClient}\n */\nvar AppendBlobClient = /** @class */ (function (_super) {\n tslib.__extends(AppendBlobClient, _super);\n function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.appendBlobContext = new AppendBlob(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param {string} snapshot The snapshot timestamp.\n * @returns {AppendBlobClient} A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n * @memberof AppendBlobClient\n */\n AppendBlobClient.prototype.withSnapshot = function (snapshot) {\n return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {AppendBlobCreateOptions} [options] Options to the Append Block Create operation.\n * @returns {Promise}\n * @memberof AppendBlobClient\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n AppendBlobClient.prototype.create = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_18;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-create\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHTTPHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n blobTagsString: toBlobTagsString(options.tags),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_18 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_18.message\n });\n throw e_18;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {AppendBlobCreateIfNotExistsOptions} [options]\n * @returns {Promise}\n * @memberof AppendBlobClient\n */\n AppendBlobClient.prototype.createIfNotExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, spanOptions, conditions, res, e_19;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"AppendBlobClient-createIfNotExists\", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;\n conditions = { ifNoneMatch: ETagAny };\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_19 = _d.sent();\n if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a blob only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_19.message\n });\n throw e_19;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Seals the append blob, making it read only.\n *\n * @param {AppendBlobSealOptions} [options={}]\n * @returns {Promise}\n * @memberof AppendBlobClient\n */\n AppendBlobClient.prototype.seal = function (options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_20;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-seal\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_20 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_20.message\n });\n throw e_20;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param {HttpRequestBody} body Data to be appended.\n * @param {number} contentLength Length of the body in bytes.\n * @param {AppendBlobAppendBlockOptions} [options] Options to the Append Block operation.\n * @returns {Promise}\n * @memberof AppendBlobClient\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_21;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-appendBlock\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.appendBlock(body, contentLength, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n onUploadProgress: options.onProgress,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_21 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_21.message\n });\n throw e_21;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param {string} sourceURL\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param {number} sourceOffset Offset in source to be appended\n * @param {number} count Number of bytes to be appended as a block\n * @param {AppendBlobAppendBlockFromURLOptions} [options={}]\n * @returns {Promise}\n * @memberof AppendBlobClient\n */\n AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_22;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"AppendBlobClient-appendBlockFromURL\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count: count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_22 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_22.message\n });\n throw e_22;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return AppendBlobClient;\n}(BlobClient));\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n *\n * @export\n * @class BlockBlobClient\n * @extends {BlobClient}\n */\nvar BlockBlobClient = /** @class */ (function (_super) {\n tslib.__extends(BlockBlobClient, _super);\n function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.blockBlobContext = new BlockBlob(_this.storageClientContext);\n _this._blobContext = new Blob$1(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param {string} snapshot The snapshot timestamp.\n * @returns {BlockBlobClient} A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.withSnapshot = function (snapshot) {\n return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param {string} query\n * @param {BlockBlobQueryOptions} [options={}]\n * @returns {Promise}\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.query = function (query, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, response, e_23;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n _b = createSpan(\"BlockBlobClient-query\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration)\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2:\n response = _c.sent();\n return [2 /*return*/, new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError\n })];\n case 3:\n e_23 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_23.message\n });\n throw e_23;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param {BlockBlobUploadOptions} [options] Options to the Block Blob Upload operation.\n * @returns {Promise} Response data for the Block Blob Upload operation.\n * @memberof BlockBlobClient\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n BlockBlobClient.prototype.upload = function (body, contentLength, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_24;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"BlockBlobClient-upload\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.upload(body, contentLength, {\n abortSignal: options.abortSignal,\n blobHTTPHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n onUploadProgress: options.onProgress,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_24 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_24.message\n });\n throw e_24;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param {string} sourceURL Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param {BlockBlobSyncUploadFromURLOptions} [options={}] Optional parameters.\n * @returns Promise\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.syncUploadFromURL = function (sourceURL, options) {\n var _a, _b, _c, _d, _e;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _f, span, spanOptions, e_25;\n return tslib.__generator(this, function (_g) {\n switch (_g.label) {\n case 0:\n options.conditions = options.conditions || {};\n _f = createSpan(\"BlockBlobClient-syncUploadFromURL\", options.tracingOptions), span = _f.span, spanOptions = _f.spanOptions;\n _g.label = 1;\n case 1:\n _g.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.putBlobFromUrl(0, sourceURL, tslib.__assign(tslib.__assign({}, options), { leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: {\n sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch,\n sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince,\n sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch,\n sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince,\n sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions\n }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), spanOptions: spanOptions }))];\n case 2: return [2 /*return*/, _g.sent()];\n case 3:\n e_25 = _g.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_25.message\n });\n throw e_25;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param {string} blockId A 64-byte value that is base64-encoded\n * @param {HttpRequestBody} body Data to upload to the staging area.\n * @param {number} contentLength Number of bytes to upload.\n * @param {BlockBlobStageBlockOptions} [options] Options to the Block Blob Stage Block operation.\n * @returns {Promise} Response data for the Block Blob Stage Block operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_26;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-stageBlock\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n onUploadProgress: options.onProgress,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_26 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_26.message\n });\n throw e_26;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param {string} blockId A 64-byte value that is base64-encoded\n * @param {string} sourceURL Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param {number} [offset] From which position of the blob to download, >= 0\n * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined\n * @param {BlockBlobStageBlockFromURLOptions} [options={}] Options to the Block Blob Stage Block From URL operation.\n * @returns {Promise} Response data for the Block Blob Stage Block From URL operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) {\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_27;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-stageBlockFromURL\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_27 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_27.message\n });\n throw e_27;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param {string[]} blocks Array of 64-byte value that is base64-encoded\n * @param {BlockBlobCommitBlockListOptions} [options] Options to the Block Blob Commit Block List operation.\n * @returns {Promise} Response data for the Block Blob Commit Block List operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.commitBlockList = function (blocks, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_28;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"BlockBlobClient-commitBlockList\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, {\n abortSignal: options.abortSignal,\n blobHTTPHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_28 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_28.message\n });\n throw e_28;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param {BlockListType} listType Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param {BlockBlobGetBlockListOptions} [options] Options to the Block Blob Get Block List operation.\n * @returns {Promise} Response data for the Block Blob Get Block List operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.getBlockList = function (listType, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, res, e_29;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"BlockBlobClient-getBlockList\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2:\n res = _c.sent();\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n return [2 /*return*/, res];\n case 3:\n e_29 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_29.message\n });\n throw e_29;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n // High level functions\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @export\n * @param {Buffer | Blob | ArrayBuffer | ArrayBufferView} data Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param {BlockBlobParallelUploadOptions} [options]\n * @returns {Promise}\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.uploadData = function (data, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, buffer_1, browserBlob_1;\n return tslib.__generator(this, function (_b) {\n _a = createSpan(\"BlockBlobClient-uploadData\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n try {\n if (true) {\n if (data instanceof Buffer) {\n buffer_1 = data;\n }\n else if (data instanceof ArrayBuffer) {\n buffer_1 = Buffer.from(data);\n }\n else {\n data = data;\n buffer_1 = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return buffer_1.slice(offset, offset + size); }, buffer_1.byteLength, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n }\n else {\n browserBlob_1 = new Blob([data]);\n return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_1.slice(offset, offset + size); }, browserBlob_1.size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n }\n }\n catch (e) {\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e.message\n });\n throw e;\n }\n finally {\n span.end();\n }\n return [2 /*return*/];\n });\n });\n };\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @export\n * @param {Blob | ArrayBuffer | ArrayBufferView} browserData Blob, File, ArrayBuffer or ArrayBufferView\n * @param {BlockBlobParallelUploadOptions} [options] Options to upload browser data.\n * @returns {Promise} Response data for the Blob Upload operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, browserBlob_2, e_30;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-uploadBrowserData\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n browserBlob_2 = new Blob([browserData]);\n return [4 /*yield*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_2.slice(offset, offset + size); }, browserBlob_2.size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_30 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_30.message\n });\n throw e_30;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param {(offset: number, size: number) => HttpRequestBody} bodyFactory\n * @param {number} size size of the data to upload.\n * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.\n * @returns {Promise} Response data for the Blob Upload operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.uploadSeekableInternal = function (bodyFactory, size, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_31;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\"blockSize option must be >= 0 and <= \" + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);\n }\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {\n throw new RangeError(\"maxSingleShotSize option must be >= 0 and <= \" + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);\n }\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(size + \" is too larger to upload to a block blob.\");\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 5, 6, 7]);\n if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];\n return [4 /*yield*/, this.upload(bodyFactory(0, size), size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\"The buffer's size is too big or the BlockSize is too small;\" +\n (\"the number of blocks must be <= \" + BLOCK_BLOB_MAX_BLOCKS));\n }\n blockList_1 = [];\n blockIDPrefix_1 = coreHttp.generateUuid();\n transferProgress_2 = 0;\n batch = new Batch(options.concurrency);\n _loop_2 = function (i) {\n batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n var blockID, start, end, contentLength;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n blockID = generateBlockID(blockIDPrefix_1, i);\n start = options.blockSize * i;\n end = i === numBlocks_1 - 1 ? size : start + options.blockSize;\n contentLength = end - start;\n blockList_1.push(blockID);\n return [4 /*yield*/, this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })\n })];\n case 1:\n _a.sent();\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress_2 += contentLength;\n if (options.onProgress) {\n options.onProgress({\n loadedBytes: transferProgress_2\n });\n }\n return [2 /*return*/];\n }\n });\n }); });\n };\n for (i = 0; i < numBlocks_1; i++) {\n _loop_2(i);\n }\n return [4 /*yield*/, batch.do()];\n case 4:\n _b.sent();\n return [2 /*return*/, this.commitBlockList(blockList_1, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 5:\n e_31 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_31.message\n });\n throw e_31;\n case 6:\n span.end();\n return [7 /*endfinally*/];\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size <= 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param {string} filePath Full path of local file\n * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.\n * @returns {(Promise)} Response data for the Blob Upload operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.uploadFile = function (filePath, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, size, e_32;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlockBlobClient-uploadFile\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n return [4 /*yield*/, fsStat(filePath)];\n case 2:\n size = (_b.sent()).size;\n return [4 /*yield*/, this.uploadSeekableInternal(function (offset, count) {\n return function () {\n return fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset\n });\n };\n }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 3: return [2 /*return*/, _b.sent()];\n case 4:\n e_32 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_32.message\n });\n throw e_32;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param {Readable} stream Node.js Readable stream\n * @param {number} bufferSize Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param {number} maxConcurrency Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param {BlockBlobUploadStreamOptions} [options] Options to Upload Stream to Block Blob operation.\n * @returns {Promise} Response data for the Blob Upload operation.\n * @memberof BlockBlobClient\n */\n BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) {\n if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; }\n if (maxConcurrency === void 0) { maxConcurrency = 5; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_33;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"BlockBlobClient-uploadStream\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n blockNum_1 = 0;\n blockIDPrefix_2 = coreHttp.generateUuid();\n transferProgress_3 = 0;\n blockList_2 = [];\n scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () {\n var blockID;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n blockID = generateBlockID(blockIDPrefix_2, blockNum_1);\n blockList_2.push(blockID);\n blockNum_1++;\n return [4 /*yield*/, this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })\n })];\n case 1:\n _a.sent();\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress_3 += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress_3 });\n }\n return [2 /*return*/];\n }\n });\n }); }, \n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3));\n return [4 /*yield*/, scheduler.do()];\n case 2:\n _b.sent();\n return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 3: return [2 /*return*/, _b.sent()];\n case 4:\n e_33 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_33.message\n });\n throw e_33;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n return BlockBlobClient;\n}(BlobClient));\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n *\n * @export\n * @class PageBlobClient\n * @extends {BlobClient}\n */\nvar PageBlobClient = /** @class */ (function (_super) {\n tslib.__extends(PageBlobClient, _super);\n function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {\n var _this = this;\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var blobName = blobNameOrOptions;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.pageBlobContext = new PageBlob(_this.storageClientContext);\n return _this;\n }\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param {string} snapshot The snapshot timestamp.\n * @returns {PageBlobClient} A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.withSnapshot = function (snapshot) {\n return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);\n };\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {number} size size of the page blob.\n * @param {PageBlobCreateOptions} [options] Options to the Page Blob Create operation.\n * @returns {Promise} Response data for the Page Blob Create operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.create = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_34;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-create\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHTTPHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_34 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_34.message\n });\n throw e_34;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {number} size size of the page blob.\n * @param {PageBlobCreateIfNotExistsOptions} [options]\n * @returns {Promise}\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.createIfNotExists = function (size, options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, spanOptions, conditions, res, e_35;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"PageBlobClient-createIfNotExists\", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n conditions = { ifNoneMatch: ETagAny };\n return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_35 = _d.sent();\n if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"BlobAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a blob only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_35.message\n });\n throw e_35;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param {HttpRequestBody} body Data to upload\n * @param {number} offset Offset of destination page blob\n * @param {number} count Content length of the body, also number of bytes to be uploaded\n * @param {PageBlobUploadPagesOptions} [options] Options to the Page Blob Upload Pages operation.\n * @returns {Promise} Response data for the Page Blob Upload Pages operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.uploadPages = function (body, offset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_36;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-uploadPages\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.uploadPages(body, count, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n onUploadProgress: options.onProgress,\n range: rangeToString({ offset: offset, count: count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_36 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_36.message\n });\n throw e_36;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param {string} sourceURL Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param {number} sourceOffset The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param {number} destOffset Offset of destination page blob\n * @param {number} count Number of bytes to be uploaded from source page blob\n * @param {PageBlobUploadPagesFromURLOptions} [options={}]\n * @returns {Promise}\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_37;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n _b = createSpan(\"PageBlobClient-uploadPagesFromURL\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_37 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_37.message\n });\n throw e_37;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param {number} [offset] Starting byte position of the pages to clear.\n * @param {number} [count] Number of bytes to clear.\n * @param {PageBlobClearPagesOptions} [options] Options to the Page Blob Clear Pages operation.\n * @returns {Promise} Response data for the Page Blob Clear Pages operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.clearPages = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_38;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-clearPages\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n range: rangeToString({ offset: offset, count: count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_38 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_38.message\n });\n throw e_38;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param {number} [offset] Starting byte position of the page ranges.\n * @param {number} [count] Number of bytes to get.\n * @param {PageBlobGetPageRangesOptions} [options] Options to the Page Blob Get Ranges operation.\n * @returns {Promise} Response data for the Page Blob Get Ranges operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.getPageRanges = function (offset, count, options) {\n var _a;\n if (offset === void 0) { offset = 0; }\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_39;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-getPageRanges\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n range: rangeToString({ offset: offset, count: count }),\n spanOptions: spanOptions\n })\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_39 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_39.message\n });\n throw e_39;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param {number} offset Starting byte position of the page blob\n * @param {number} count Number of bytes to get ranges diff.\n * @param {string} prevSnapshot Timestamp of snapshot to retrieve the difference.\n * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.\n * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_40;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-getPageRangesDiff\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset: offset, count: count }),\n spanOptions: spanOptions\n })\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_40 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_40.message\n });\n throw e_40;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param {number} offset Starting byte position of the page blob\n * @param {number} count Number of bytes to get ranges diff.\n * @param {string} prevSnapshotUrl URL of snapshot to retrieve the difference.\n * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.\n * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_41;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-GetPageRangesDiffForManagedDisks\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n prevSnapshotUrl: prevSnapshotUrl,\n range: rangeToString({ offset: offset, count: count }),\n spanOptions: spanOptions\n })\n .then(rangeResponseFromModel)];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_41 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_41.message\n });\n throw e_41;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param {number} size Target size\n * @param {PageBlobResizeOptions} [options] Options to the Page Blob Resize operation.\n * @returns {Promise} Response data for the Page Blob Resize operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.resize = function (size, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_42;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-resize\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n encryptionScope: options.encryptionScope,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_42 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_42.message\n });\n throw e_42;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param {SequenceNumberActionType} sequenceNumberAction Indicates how the service should modify the blob's sequence number.\n * @param {number} [sequenceNumber] Required if sequenceNumberAction is max or update\n * @param {PageBlobUpdateSequenceNumberOptions} [options] Options to the Page Blob Update Sequence Number operation.\n * @returns {Promise} Response data for the Page Blob Update Sequence Number operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_43;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _b = createSpan(\"PageBlobClient-updateSequenceNumber\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_43 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_43.message\n });\n throw e_43;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param {string} copySource Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param {PageBlobStartCopyIncrementalOptions} [options] Options to the Page Blob Copy Incremental operation.\n * @returns {Promise} Response data for the Page Blob Copy Incremental operation.\n * @memberof PageBlobClient\n */\n PageBlobClient.prototype.startCopyIncremental = function (copySource, options) {\n var _a;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _b, span, spanOptions, e_44;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n _b = createSpan(\"PageBlobClient-startCopyIncremental\", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_44 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_44.message\n });\n throw e_44;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return PageBlobClient;\n}(BlobClient));\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n *\n * @export\n * @class BlobLeaseClient\n */\nvar BlobLeaseClient = /** @class */ (function () {\n /**\n * Creates an instance of BlobLeaseClient.\n * @param {(ContainerClient | BlobClient)} client The client to make the lease operation requests.\n * @param {string} leaseId Initial proposed lease id.\n * @memberof BlobLeaseClient\n */\n function BlobLeaseClient(client, leaseId) {\n var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions());\n this._url = client.url;\n if (client instanceof ContainerClient) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n }\n else {\n this._isContainer = false;\n this._containerOrBlobOperation = new Blob$1(clientContext);\n }\n if (!leaseId) {\n leaseId = coreHttp.generateUuid();\n }\n this._leaseId = leaseId;\n }\n Object.defineProperty(BlobLeaseClient.prototype, \"leaseId\", {\n /**\n * Gets the lease Id.\n *\n * @readonly\n * @memberof BlobLeaseClient\n * @type {string}\n */\n get: function () {\n return this._leaseId;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(BlobLeaseClient.prototype, \"url\", {\n /**\n * Gets the url.\n *\n * @readonly\n * @memberof BlobLeaseClient\n * @type {string}\n */\n get: function () {\n return this._url;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param {number} duration Must be between 15 to 60 seconds, or infinite (-1)\n * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.\n * @returns {Promise} Response data for acquire lease operation.\n * @memberof BlobLeaseClient\n */\n BlobLeaseClient.prototype.acquireLease = function (duration, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, spanOptions, e_45;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-acquireLease\", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration: duration,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),\n proposedLeaseId: this._leaseId,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_45 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_45.message\n });\n throw e_45;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param {string} proposedLeaseId the proposed new lease Id.\n * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.\n * @returns {Promise} Response data for change lease operation.\n * @memberof BlobLeaseClient\n */\n BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, spanOptions, response, e_46;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-changeLease\", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2:\n response = _h.sent();\n this._leaseId = proposedLeaseId;\n return [2 /*return*/, response];\n case 3:\n e_46 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_46.message\n });\n throw e_46;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.\n * @returns {Promise} Response data for release lease operation.\n * @memberof BlobLeaseClient\n */\n BlobLeaseClient.prototype.releaseLease = function (options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, spanOptions, e_47;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-releaseLease\", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_47 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_47.message\n });\n throw e_47;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param {LeaseOperationOptions} [options={}] Optional option to configure lease management operations.\n * @returns {Promise} Response data for renew lease operation.\n * @memberof BlobLeaseClient\n */\n BlobLeaseClient.prototype.renewLease = function (options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, spanOptions, e_48;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-renewLease\", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_48 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_48.message\n });\n throw e_48;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @static\n * @param {number} breakPeriod Break period\n * @param {LeaseOperationOptions} [options={}] Optional options to configure lease management operations.\n * @returns {Promise} Response data for break lease operation.\n * @memberof BlobLeaseClient\n */\n BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) {\n var _a, _b, _c, _d, _e, _f;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _g, span, spanOptions, operationOptions, e_49;\n return tslib.__generator(this, function (_h) {\n switch (_h.label) {\n case 0:\n _g = createSpan(\"BlobLeaseClient-breakLease\", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;\n if (this._isContainer &&\n ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||\n (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {\n throw new RangeError(\"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\");\n }\n _h.label = 1;\n case 1:\n _h.trys.push([1, 3, 4, 5]);\n operationOptions = {\n abortSignal: options.abortSignal,\n breakPeriod: breakPeriod,\n modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),\n spanOptions: spanOptions\n };\n return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)];\n case 2: return [2 /*return*/, _h.sent()];\n case 3:\n e_49 = _h.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_49.message\n });\n throw e_49;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return BlobLeaseClient;\n}());\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n *\n * @export\n * @class ContainerClient\n */\nvar ContainerClient = /** @class */ (function (_super) {\n tslib.__extends(ContainerClient, _super);\n function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) {\n var _this = this;\n var pipeline;\n var url;\n options = options || {};\n if (credentialOrPipelineOrContainerName instanceof Pipeline) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n }\n else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n }\n else if (!credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\") {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else if (credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\") {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n var containerName = credentialOrPipelineOrContainerName;\n var extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n pipeline = newPipeline(sharedKeyCredential, options);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n }\n else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n _this = _super.call(this, url, pipeline) || this;\n _this._containerName = _this.getContainerNameFromUrl();\n _this.containerContext = new Container(_this.storageClientContext);\n return _this;\n }\n Object.defineProperty(ContainerClient.prototype, \"containerName\", {\n /**\n * The name of the container.\n */\n get: function () {\n return this._containerName;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param {ContainerCreateOptions} [options] Options to Container Create operation.\n * @returns {Promise}\n * @memberof ContainerClient\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n ContainerClient.prototype.create = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_50;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-create\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];\n case 2: \n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return [2 /*return*/, _b.sent()];\n case 3:\n e_50 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_50.message\n });\n throw e_50;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param {ContainerCreateOptions} [options]\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.createIfNotExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, spanOptions, res, e_51;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"ContainerClient-createIfNotExists\", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_51 = _d.sent();\n if (((_a = e_51.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: api.CanonicalCode.ALREADY_EXISTS,\n message: \"Expected exception when creating a container only if it does not already exist.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_51.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_51.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_51.message\n });\n throw e_51;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param {ContainerExistsOptions} [options={}]\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.exists = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_52;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-exists\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })\n })];\n case 2:\n _b.sent();\n return [2 /*return*/, true];\n case 3:\n e_52 = _b.sent();\n if (e_52.statusCode === 404) {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when checking container existence\"\n });\n return [2 /*return*/, false];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_52.message\n });\n throw e_52;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a {@link BlobClient}\n *\n * @param {string} blobName A blob name\n * @returns {BlobClient} A new BlobClient object for the given blob name.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getBlobClient = function (blobName) {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param {string} blobName An append blob name\n * @returns {AppendBlobClient}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getAppendBlobClient = function (blobName) {\n return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param {string} blobName A block blob name\n * @returns {BlockBlobClient}\n * @memberof ContainerClient\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n ContainerClient.prototype.getBlockBlobClient = function (blobName) {\n return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param {string} blobName A page blob name\n * @returns {PageBlobClient}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getPageBlobClient = function (blobName) {\n return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n };\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param {ContainerGetPropertiesOptions} [options] Options to Container Get Properties operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getProperties = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_53;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-getProperties\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), { spanOptions: spanOptions }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_53 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_53.message\n });\n throw e_53;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.delete = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_54;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-delete\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.deleteMethod({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_54 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_54.message\n });\n throw e_54;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.deleteIfExists = function (options) {\n var _a, _b;\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _c, span, spanOptions, res, e_55;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _c = createSpan(\"ContainerClient-deleteIfExists\", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;\n _d.label = 1;\n case 1:\n _d.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n res = _d.sent();\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable\n })];\n case 3:\n e_55 = _d.sent();\n if (((_a = e_55.details) === null || _a === void 0 ? void 0 : _a.errorCode) === \"ContainerNotFound\") {\n span.setStatus({\n code: api.CanonicalCode.NOT_FOUND,\n message: \"Expected exception when deleting a container only if it exists.\"\n });\n return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_55.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_55.response })];\n }\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_55.message\n });\n throw e_55;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param {Metadata} [metadata] Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param {ContainerSetMetadataOptions} [options] Options to Container Set Metadata operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.setMetadata = function (metadata, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_56;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\");\n }\n _a = createSpan(\"ContainerClient-setMetadata\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: metadata,\n modifiedAccessConditions: options.conditions,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_56 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_56.message\n });\n throw e_56;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param {ContainerGetAccessPolicyOptions} [options] Options to Container Get Access Policy operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getAccessPolicy = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, res, _i, response_1, identifier, accessPolicy, e_57;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!options.conditions) {\n options.conditions = {};\n }\n _a = createSpan(\"ContainerClient-getAccessPolicy\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n spanOptions: spanOptions\n })];\n case 2:\n response = _b.sent();\n res = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version\n };\n for (_i = 0, response_1 = response; _i < response_1.length; _i++) {\n identifier = response_1[_i];\n accessPolicy = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions\n };\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n res.signedIdentifiers.push({\n accessPolicy: accessPolicy,\n id: identifier.id\n });\n }\n return [2 /*return*/, res];\n case 3:\n e_57 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_57.message\n });\n throw e_57;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param {PublicAccessType} [access] The level of public access to data in the container.\n * @param {SignedIdentifier[]} [containerAcl] Array of elements each having a unique Id and details of the access policy.\n * @param {ContainerSetAccessPolicyOptions} [options] Options to Container Set Access Policy operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, acl, _i, _b, identifier, e_58;\n return tslib.__generator(this, function (_c) {\n switch (_c.label) {\n case 0:\n options.conditions = options.conditions || {};\n _a = createSpan(\"ContainerClient-setAccessPolicy\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _c.label = 1;\n case 1:\n _c.trys.push([1, 3, 4, 5]);\n acl = [];\n for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) {\n identifier = _b[_i];\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\"\n },\n id: identifier.id\n });\n }\n return [4 /*yield*/, this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access: access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _c.sent()];\n case 3:\n e_58 = _c.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_58.message\n });\n throw e_58;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param {string} [proposeLeaseId] Initial proposed lease Id.\n * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the container.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {\n return new BlobLeaseClient(this, proposeLeaseId);\n };\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param {string} blobName Name of the block blob to create or update.\n * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param {BlockBlobUploadOptions} [options] Options to configure the Block Blob Upload operation.\n * @returns {Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }>} Block Blob upload response data and the corresponding BlockBlobClient instance.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, blockBlobClient, response, e_59;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-uploadBlockBlob\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n blockBlobClient = this.getBlockBlobClient(blobName);\n return [4 /*yield*/, blockBlobClient.upload(body, contentLength, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n response = _b.sent();\n return [2 /*return*/, {\n blockBlobClient: blockBlobClient,\n response: response\n }];\n case 3:\n e_59 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_59.message\n });\n throw e_59;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param {string} blobName\n * @param {ContainerDeleteBlobOptions} [options] Options to Blob Delete operation.\n * @returns {Promise} Block blob deletion response data.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.deleteBlob = function (blobName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, blobClient, e_60;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-deleteBlob\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return [4 /*yield*/, blobClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_60 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_60.message\n });\n throw e_60;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.\n * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Flat Segment operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listBlobFlatSegment = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, wrappedResponse, e_61;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-listBlobFlatSegment\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {\n var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });\n return blobItem;\n }) }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_61 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_61.message\n });\n throw e_61;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param {string} delimiter The character or string used to define the virtual hierarchy\n * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.\n * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Hierarchy Segment operation.\n * @returns {Promise}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, wrappedResponse, e_62;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"ContainerClient-listBlobHierarchySegment\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {\n var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });\n return blobItem;\n }) }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_62 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_62.message\n });\n throw e_62;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @private\n * @param {string} [marker] A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.\n * @returns {AsyncIterableIterator}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listSegments = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listSegments_1() {\n var listBlobsFlatSegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))];\n case 2:\n listBlobsFlatSegmentResponse = _a.sent();\n marker = listBlobsFlatSegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @private\n * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.\n * @returns {AsyncIterableIterator}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listItems = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItems_1() {\n var marker, _a, _b, listBlobsFlatSegmentResponse, e_63_1;\n var e_63, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.listSegments(marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n listBlobsFlatSegmentResponse = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_63_1 = _d.sent();\n e_63 = { error: e_63_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_63) throw e_63.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param {ContainerListBlobsOptions} [options={}] Options to list blobs.\n * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listBlobsFlat = function (options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n var include = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n // AsyncIterableIterator to iterate over blobs\n var iter = this.listItems(updatedOptions);\n return _a = {\n /**\n * @member {Promise} [next] The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));\n },\n _a;\n };\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @private\n * @param {string} delimiter The character or string used to define the virtual hierarchy\n * @param {string} [marker] A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.\n * @returns {AsyncIterableIterator}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() {\n var listBlobsHierarchySegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))];\n case 2:\n listBlobsHierarchySegmentResponse = _a.sent();\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @private\n * @param {string} delimiter The character or string used to define the virtual hierarchy\n * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.\n * @returns {AsyncIterableIterator<{ kind: \"prefix\" } & BlobPrefix | { kind: \"blob\" } & BlobItem>}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() {\n var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_64_1;\n var e_64, _f;\n return tslib.__generator(this, function (_g) {\n switch (_g.label) {\n case 0:\n _g.trys.push([0, 14, 15, 20]);\n _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options));\n _g.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13];\n listBlobsHierarchySegmentResponse = _b.value;\n segment = listBlobsHierarchySegmentResponse.segment;\n if (!segment.blobPrefixes) return [3 /*break*/, 7];\n _i = 0, _c = segment.blobPrefixes;\n _g.label = 3;\n case 3:\n if (!(_i < _c.length)) return [3 /*break*/, 7];\n prefix = _c[_i];\n return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: \"prefix\" }, prefix))];\n case 4: return [4 /*yield*/, _g.sent()];\n case 5:\n _g.sent();\n _g.label = 6;\n case 6:\n _i++;\n return [3 /*break*/, 3];\n case 7:\n _d = 0, _e = segment.blobItems;\n _g.label = 8;\n case 8:\n if (!(_d < _e.length)) return [3 /*break*/, 12];\n blob = _e[_d];\n return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: \"blob\" }, blob))];\n case 9: return [4 /*yield*/, _g.sent()];\n case 10:\n _g.sent();\n _g.label = 11;\n case 11:\n _d++;\n return [3 /*break*/, 8];\n case 12: return [3 /*break*/, 1];\n case 13: return [3 /*break*/, 20];\n case 14:\n e_64_1 = _g.sent();\n e_64 = { error: e_64_1 };\n return [3 /*break*/, 20];\n case 15:\n _g.trys.push([15, , 18, 19]);\n if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17];\n return [4 /*yield*/, tslib.__await(_f.call(_a))];\n case 16:\n _g.sent();\n _g.label = 17;\n case 17: return [3 /*break*/, 19];\n case 18:\n if (e_64) throw e_64.error;\n return [7 /*endfinally*/];\n case 19: return [7 /*endfinally*/];\n case 20: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\"}).byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);\n * }\n * }\n * ```\n *\n * @param {string} delimiter The character or string used to define the virtual hierarchy\n * @param {ContainerListBlobsOptions} [options={}] Options to list blobs operation.\n * @returns {(PagedAsyncIterableIterator<\n * { kind: \"prefix\" } & BlobPrefix | { kind: \"blob\" } & BlobItem,\n * ContainerListBlobHierarchySegmentResponse>)}\n * @memberof ContainerClient\n */\n ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n var include = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n var iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return _a = {\n /**\n * @member {Promise} [next] The next method, part of the iteration protocol\n */\n next: function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, iter.next()];\n });\n });\n }\n },\n /**\n * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));\n },\n _a;\n };\n ContainerClient.prototype.getContainerNameFromUrl = function () {\n var containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n var parsedUrl = coreHttp.URLBuilder.parse(this.url);\n if (parsedUrl.getHost().split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath().split(\"/\")[1];\n }\n else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath().split(\"/\")[2];\n }\n else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath().split(\"/\")[1];\n }\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n return containerName;\n }\n catch (error) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n };\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param {ContainerGenerateSasUrlOptions} options Optional parameters.\n * @returns {Promise} The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n * @memberof ContainerClient\n */\n ContainerClient.prototype.generateSasUrl = function (options) {\n var _this = this;\n return new Promise(function (resolve) {\n if (!(_this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\"Can only generate the SAS when the client is initialized with a shared key credential\");\n }\n var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName }, options), _this.credential).toString();\n resolve(appendToURLQuery(_this.url, sas));\n });\n };\n return ContainerClient;\n}(StorageClient));\n\nfunction getBodyAsText(batchResponse) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var buffer, responseLength;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)];\n case 1:\n responseLength = _a.sent();\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n return [2 /*return*/, buffer.toString()];\n }\n });\n });\n}\nfunction utf8ByteLength(str) {\n return Buffer.byteLength(str);\n}\n\nvar HTTP_HEADER_DELIMITER = \": \";\nvar SPACE_DELIMITER = \" \";\nvar NOT_FOUND = -1;\n/**\n * Util class for parsing batch response.\n */\nvar BatchResponseParser = /** @class */ (function () {\n function BatchResponseParser(batchResponse, subRequests) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType.split(\"=\")[1];\n this.perResponsePrefix = \"--\" + this.responseBatchBoundary + HTTP_LINE_ENDING;\n this.batchResponseEnding = \"--\" + this.responseBatchBoundary + \"--\";\n }\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n BatchResponseParser.prototype.parseBatchResponse = function () {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\"Invalid state: batch request failed with status: '\" + this.batchResponse._response.status + \"'.\");\n }\n return [4 /*yield*/, getBodyAsText(this.batchResponse)];\n case 1:\n responseBodyAsText = _a.sent();\n subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1);\n subResponseCount = subResponses.length;\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount != this.subRequests.size && subResponseCount != 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n deserializedSubResponses = new Array(subResponseCount);\n subResponsesSucceededCount = 0;\n subResponsesFailedCount = 0;\n // Parse sub subResponses.\n for (index = 0; index < subResponseCount; index++) {\n subResponse = subResponses[index];\n deserializedSubResponse = {};\n deserializedSubResponse.headers = new coreHttp.HttpHeaders();\n responseLines = subResponse.split(\"\" + HTTP_LINE_ENDING);\n subRespHeaderStartFound = false;\n subRespHeaderEndFound = false;\n subRespFailed = false;\n contentId = NOT_FOUND;\n for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) {\n responseLine = responseLines_1[_i];\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n continue; // Skip empty line\n }\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\"Invalid state: find non-empty line '\" + responseLine + \"' without HTTP header delimiter '\" + HTTP_HEADER_DELIMITER + \"'.\");\n }\n tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n }\n else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (contentId != NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined) {\n deserializedSubResponse._request = this.subRequests.get(contentId);\n deserializedSubResponses[contentId] = deserializedSubResponse;\n }\n else {\n logger.error(\"subResponses[\" + index + \"] is dropped as the Content-ID is not found or invalid, Content-ID: \" + contentId);\n }\n if (subRespFailed) {\n subResponsesFailedCount++;\n }\n else {\n subResponsesSucceededCount++;\n }\n }\n return [2 /*return*/, {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount\n }];\n }\n });\n });\n };\n return BatchResponseParser;\n}());\n\nvar MutexLockStatus;\n(function (MutexLockStatus) {\n MutexLockStatus[MutexLockStatus[\"LOCKED\"] = 0] = \"LOCKED\";\n MutexLockStatus[MutexLockStatus[\"UNLOCKED\"] = 1] = \"UNLOCKED\";\n})(MutexLockStatus || (MutexLockStatus = {}));\n/**\n * An async mutex lock.\n *\n * @export\n * @class Mutex\n */\nvar Mutex = /** @class */ (function () {\n function Mutex() {\n }\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @static\n * @param {string} key lock key\n * @returns {Promise}\n * @memberof Mutex\n */\n Mutex.lock = function (key) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve) {\n if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) {\n _this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n }\n else {\n _this.onUnlockEvent(key, function () {\n _this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n })];\n });\n });\n };\n /**\n * Unlock a key.\n *\n * @static\n * @param {string} key\n * @returns {Promise}\n * @memberof Mutex\n */\n Mutex.unlock = function (key) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _this = this;\n return tslib.__generator(this, function (_a) {\n return [2 /*return*/, new Promise(function (resolve) {\n if (_this.keys[key] === MutexLockStatus.LOCKED) {\n _this.emitUnlockEvent(key);\n }\n delete _this.keys[key];\n resolve();\n })];\n });\n });\n };\n Mutex.onUnlockEvent = function (key, handler) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n }\n else {\n this.listeners[key].push(handler);\n }\n };\n Mutex.emitUnlockEvent = function (key) {\n var _this = this;\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n var handler_1 = this.listeners[key].shift();\n setImmediate(function () {\n handler_1.call(_this);\n });\n }\n };\n Mutex.keys = {};\n Mutex.listeners = {};\n return Mutex;\n}());\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n *\n * @export\n * @class BlobBatch\n */\nvar BlobBatch = /** @class */ (function () {\n function BlobBatch() {\n this.batch = \"batch\";\n this.batchRequest = new InnerBatchRequest();\n }\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n BlobBatch.prototype.getMultiPartContentType = function () {\n return this.batchRequest.getMultipartContentType();\n };\n /**\n * Get assembled HTTP request body for sub requests.\n */\n BlobBatch.prototype.getHttpRequestBody = function () {\n return this.batchRequest.getHttpRequestBody();\n };\n /**\n * Get sub requests that are added into the batch request.\n */\n BlobBatch.prototype.getSubRequests = function () {\n return this.batchRequest.getSubRequests();\n };\n BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, Mutex.lock(this.batch)];\n case 1:\n _a.sent();\n _a.label = 2;\n case 2:\n _a.trys.push([2, , 4, 6]);\n this.batchRequest.preAddSubRequest(subRequest);\n return [4 /*yield*/, assembleSubRequestFunc()];\n case 3:\n _a.sent();\n this.batchRequest.postAddSubRequest(subRequest);\n return [3 /*break*/, 6];\n case 4: return [4 /*yield*/, Mutex.unlock(this.batch)];\n case 5:\n _a.sent();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n BlobBatch.prototype.setBatchType = function (batchType) {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\"BlobBatch only supports one operation type per batch and it already is being used for \" + this.batchType + \" operations.\");\n }\n };\n BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var url, credential, _a, span, spanOptions, e_1;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (typeof urlOrBlobClient === \"string\" &&\n ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrOptions))) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n }\n else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions;\n }\n else {\n throw new RangeError(\"Invalid arguments. Either url and credential, or BlobClient need be provided.\");\n }\n if (!options) {\n options = {};\n }\n _a = createSpan(\"BatchDeleteRequest-addSubRequest\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n this.setBatchType(\"delete\");\n return [4 /*yield*/, this.addSubRequestInternal({\n url: url,\n credential: credential\n }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 1:\n _a.sent();\n return [2 /*return*/];\n }\n });\n }); })];\n case 2:\n _b.sent();\n return [3 /*break*/, 5];\n case 3:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var url, credential, tier, _a, span, spanOptions, e_2;\n var _this = this;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (typeof urlOrBlobClient === \"string\" &&\n ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrTier))) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier;\n tier = tierOrOptions;\n }\n else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier;\n options = tierOrOptions;\n }\n else {\n throw new RangeError(\"Invalid arguments. Either url and credential, or BlobClient need be provided.\");\n }\n if (!options) {\n options = {};\n }\n _a = createSpan(\"BatchSetTierRequest-addSubRequest\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n this.setBatchType(\"setAccessTier\");\n return [4 /*yield*/, this.addSubRequestInternal({\n url: url,\n credential: credential\n }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 1:\n _a.sent();\n return [2 /*return*/];\n }\n });\n }); })];\n case 2:\n _b.sent();\n return [3 /*break*/, 5];\n case 3:\n e_2 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n return BlobBatch;\n}());\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nvar InnerBatchRequest = /** @class */ (function () {\n function InnerBatchRequest() {\n this.operationCount = 0;\n this.body = \"\";\n var tempGuid = coreHttp.generateUuid();\n // batch_{batchid}\n this.boundary = \"batch_\" + tempGuid;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = \"--\" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + \": application/http\" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + \": binary\";\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = \"multipart/mixed; boundary=\" + this.boundary;\n // --batch_{batchid}--\n this.batchRequestEnding = \"--\" + this.boundary + \"--\";\n this.subRequests = new Map();\n }\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n InnerBatchRequest.prototype.createPipeline = function (credential) {\n var isAnonymousCreds = credential instanceof AnonymousCredential;\n var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n var factories = new Array(policyFactoryLength);\n factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = coreHttp.isTokenCredential(credential)\n ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential)\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n return new Pipeline(factories, {});\n };\n InnerBatchRequest.prototype.appendSubRequestToBody = function (request) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix,\n HeaderConstants.CONTENT_ID + \": \" + this.operationCount,\n \"\",\n request.method.toString() + \" \" + getURLPathAndQuery(request.url) + \" \" + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {\n var header = _a[_i];\n this.body += header.name + \": \" + header.value + HTTP_LINE_ENDING;\n }\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n };\n InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(\"Cannot exceed \" + BATCH_MAX_REQUEST + \" sub requests in a single batch\");\n }\n // Fast fail if url for sub request is invalid\n var path = getURLPath(subRequest.url);\n if (!path || path == \"\") {\n throw new RangeError(\"Invalid url for sub request: '\" + subRequest.url + \"'\");\n }\n };\n InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n };\n // Return the http request body with assembling the ending line to the sub request body.\n InnerBatchRequest.prototype.getHttpRequestBody = function () {\n return \"\" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING;\n };\n InnerBatchRequest.prototype.getMultipartContentType = function () {\n return this.multipartContentType;\n };\n InnerBatchRequest.prototype.getSubRequests = function () {\n return this.subRequests;\n };\n return InnerBatchRequest;\n}());\nvar BatchRequestAssemblePolicy = /** @class */ (function (_super) {\n tslib.__extends(BatchRequestAssemblePolicy, _super);\n function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) {\n var _this = _super.call(this, nextPolicy, options) || this;\n _this.dummyResponse = {\n request: new coreHttp.WebResource(),\n status: 200,\n headers: new coreHttp.HttpHeaders()\n };\n _this.batchRequest = batchRequest;\n return _this;\n }\n BatchRequestAssemblePolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)];\n case 1:\n _a.sent();\n return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire\n }\n });\n });\n };\n return BatchRequestAssemblePolicy;\n}(coreHttp.BaseRequestPolicy));\nvar BatchRequestAssemblePolicyFactory = /** @class */ (function () {\n function BatchRequestAssemblePolicyFactory(batchRequest) {\n this.batchRequest = batchRequest;\n }\n BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n };\n return BatchRequestAssemblePolicyFactory;\n}());\nvar BatchHeaderFilterPolicy = /** @class */ (function (_super) {\n tslib.__extends(BatchHeaderFilterPolicy, _super);\n function BatchHeaderFilterPolicy(nextPolicy, options) {\n return _super.call(this, nextPolicy, options) || this;\n }\n BatchHeaderFilterPolicy.prototype.sendRequest = function (request) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var xMsHeaderName, _i, _a, header;\n return tslib.__generator(this, function (_b) {\n xMsHeaderName = \"\";\n for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {\n header = _a[_i];\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n return [2 /*return*/, this._nextPolicy.sendRequest(request)];\n });\n });\n };\n return BatchHeaderFilterPolicy;\n}(coreHttp.BaseRequestPolicy));\nvar BatchHeaderFilterPolicyFactory = /** @class */ (function () {\n function BatchHeaderFilterPolicyFactory() {\n }\n BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n };\n return BatchHeaderFilterPolicyFactory;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nvar BlobBatchClient = /** @class */ (function () {\n function BlobBatchClient(url, credentialOrPipeline, options) {\n var pipeline;\n if (credentialOrPipeline instanceof Pipeline) {\n pipeline = credentialOrPipeline;\n }\n else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n this._serviceContext = new Service(storageClientContext);\n }\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n BlobBatchClient.prototype.createBatch = function () {\n return new BlobBatch();\n };\n BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var batch, _i, urlsOrBlobClients_1, urlOrBlobClient;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n batch = new BlobBatch();\n _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients;\n _a.label = 1;\n case 1:\n if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6];\n urlOrBlobClient = urlsOrBlobClients_1[_i];\n if (!(typeof urlOrBlobClient === \"string\")) return [3 /*break*/, 3];\n return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)];\n case 2:\n _a.sent();\n return [3 /*break*/, 5];\n case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n _i++;\n return [3 /*break*/, 1];\n case 6: return [2 /*return*/, this.submitBatch(batch)];\n }\n });\n });\n };\n BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) {\n return tslib.__awaiter(this, void 0, void 0, function () {\n var batch, _i, urlsOrBlobClients_2, urlOrBlobClient;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n batch = new BlobBatch();\n _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients;\n _a.label = 1;\n case 1:\n if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6];\n urlOrBlobClient = urlsOrBlobClients_2[_i];\n if (!(typeof urlOrBlobClient === \"string\")) return [3 /*break*/, 3];\n return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)];\n case 2:\n _a.sent();\n return [3 /*break*/, 5];\n case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n _i++;\n return [3 /*break*/, 1];\n case 6: return [2 /*return*/, this.submitBatch(batch)];\n }\n });\n });\n };\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param {BlobBatch} batchRequest A set of Delete or SetTier operations.\n * @param {BlobBatchSubmitBatchOptionalParams} [options]\n * @returns {Promise}\n * @memberof BlobBatchClient\n */\n BlobBatchClient.prototype.submitBatch = function (batchRequest, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n if (!batchRequest || batchRequest.getSubRequests().size == 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n _a = createSpan(\"BlobBatchClient-submitBatch\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 4, 5, 6]);\n batchRequestBody = batchRequest.getHttpRequestBody();\n return [4 /*yield*/, this._serviceContext.submitBatch(batchRequestBody, utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];\n case 2:\n rawBatchResponse = _b.sent();\n batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());\n return [4 /*yield*/, batchResponseParser.parseBatchResponse()];\n case 3:\n responseSummary = _b.sent();\n res = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount\n };\n return [2 /*return*/, res];\n case 4:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 5:\n span.end();\n return [7 /*endfinally*/];\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n return BlobBatchClient;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n *\n * @export\n * @class AccountSASPermissions\n */\nvar AccountSASPermissions = /** @class */ (function () {\n function AccountSASPermissions() {\n /**\n * Permission to read resources and list queues and tables granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.read = false;\n /**\n * Permission to write resources granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.write = false;\n /**\n * Permission to create blobs and files granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.delete = false;\n /**\n * Permission to delete versions granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.deleteVersion = false;\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.list = false;\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.add = false;\n /**\n * Permission to create blobs and files granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.create = false;\n /**\n * Permissions to update messages and table entities granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.update = false;\n /**\n * Permission to get and delete messages granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.process = false;\n /**\n * Specfies Tag access granted.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.tag = false;\n /**\n * Permission to filter blobs.\n *\n * @type {boolean}\n * @memberof AccountSASPermissions\n */\n this.filter = false;\n }\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @static\n * @param {string} permissions\n * @returns {AccountSASPermissions}\n * @memberof AccountSASPermissions\n */\n AccountSASPermissions.parse = function (permissions) {\n var accountSASPermissions = new AccountSASPermissions();\n for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {\n var c = permissions_1[_i];\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n default:\n throw new RangeError(\"Invalid permission character: \" + c);\n }\n }\n return accountSASPermissions;\n };\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @static\n * @param {AccountSASPermissionsLike} permissionLike\n * @returns {AccountSASPermissions}\n * @memberof AccountSASPermissions\n */\n AccountSASPermissions.from = function (permissionLike) {\n var accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n return accountSASPermissions;\n };\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @returns {string}\n * @memberof AccountSASPermissions\n */\n AccountSASPermissions.prototype.toString = function () {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n var permissions = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n return permissions.join(\"\");\n };\n return AccountSASPermissions;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n *\n * @export\n * @class AccountSASResourceTypes\n */\nvar AccountSASResourceTypes = /** @class */ (function () {\n function AccountSASResourceTypes() {\n /**\n * Permission to access service level APIs granted.\n *\n * @type {boolean}\n * @memberof AccountSASResourceTypes\n */\n this.service = false;\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n *\n * @type {boolean}\n * @memberof AccountSASResourceTypes\n */\n this.container = false;\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n *\n * @type {boolean}\n * @memberof AccountSASResourceTypes\n */\n this.object = false;\n }\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @static\n * @param {string} resourceTypes\n * @returns {AccountSASResourceTypes}\n * @memberof AccountSASResourceTypes\n */\n AccountSASResourceTypes.parse = function (resourceTypes) {\n var accountSASResourceTypes = new AccountSASResourceTypes();\n for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) {\n var c = resourceTypes_1[_i];\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(\"Invalid resource type: \" + c);\n }\n }\n return accountSASResourceTypes;\n };\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @returns {string}\n * @memberof AccountSASResourceTypes\n */\n AccountSASResourceTypes.prototype.toString = function () {\n var resourceTypes = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n };\n return AccountSASResourceTypes;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n *\n * @export\n * @class AccountSASServices\n */\nvar AccountSASServices = /** @class */ (function () {\n function AccountSASServices() {\n /**\n * Permission to access blob resources granted.\n *\n * @type {boolean}\n * @memberof AccountSASServices\n */\n this.blob = false;\n /**\n * Permission to access file resources granted.\n *\n * @type {boolean}\n * @memberof AccountSASServices\n */\n this.file = false;\n /**\n * Permission to access queue resources granted.\n *\n * @type {boolean}\n * @memberof AccountSASServices\n */\n this.queue = false;\n /**\n * Permission to access table resources granted.\n *\n * @type {boolean}\n * @memberof AccountSASServices\n */\n this.table = false;\n }\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @static\n * @param {string} services\n * @returns {AccountSASServices}\n * @memberof AccountSASServices\n */\n AccountSASServices.parse = function (services) {\n var accountSASServices = new AccountSASServices();\n for (var _i = 0, services_1 = services; _i < services_1.length; _i++) {\n var c = services_1[_i];\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(\"Invalid service character: \" + c);\n }\n }\n return accountSASServices;\n };\n /**\n * Converts the given services to a string.\n *\n * @returns {string}\n * @memberof AccountSASServices\n */\n AccountSASServices.prototype.toString = function () {\n var services = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n };\n return AccountSASServices;\n}());\n\n// Copyright (c) Microsoft Corporation. All rights reserved.\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param {AccountSASSignatureValues} accountSASSignatureValues\n * @param {StorageSharedKeyCredential} sharedKeyCredential\n * @returns {SASQueryParameters}\n * @memberof AccountSASSignatureValues\n */\nfunction generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) {\n var version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n if (accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\") {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString());\n var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString();\n var stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\" // Account SAS requires an additional newline character\n ].join(\"\\n\");\n var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange);\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n *\n * @export\n * @class BlobServiceClient\n */\nvar BlobServiceClient = /** @class */ (function (_super) {\n tslib.__extends(BlobServiceClient, _super);\n function BlobServiceClient(url, credentialOrPipeline, options) {\n var _this = this;\n var pipeline;\n if (credentialOrPipeline instanceof Pipeline) {\n pipeline = credentialOrPipeline;\n }\n else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n coreHttp.isTokenCredential(credentialOrPipeline)) {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n _this = _super.call(this, url, pipeline) || this;\n _this.serviceContext = new Service(_this.storageClientContext);\n return _this;\n }\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param {string} connectionString Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param {StoragePipelineOptions} [options] Optional. Options to configure the HTTP pipeline.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.fromConnectionString = function (connectionString, options) {\n options = options || {};\n var extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n {\n var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);\n options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);\n var pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n }\n }\n else if (extractedCreds.kind === \"SASConnString\") {\n var pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n }\n else {\n throw new Error(\"Connection string must be either an Account connection string or a SAS connection string\");\n }\n };\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param {string} containerName A container name\n * @returns {ContainerClient} A new ContainerClient object for the given container name.\n * @memberof BlobServiceClient\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n BlobServiceClient.prototype.getContainerClient = function (containerName) {\n return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline);\n };\n /**\n * Create a Blob container.\n *\n * @param {string} containerName Name of the container to create.\n * @param {ContainerCreateOptions} [options] Options to configure Container Create operation.\n * @returns {Promise<{ containerClient: ContainerClient; containerCreateResponse: ContainerCreateResponse }>} Container creation response and the corresponding container client.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.createContainer = function (containerName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, containerClient, containerCreateResponse, e_1;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-createContainer\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(containerName);\n return [4 /*yield*/, containerClient.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n containerCreateResponse = _b.sent();\n return [2 /*return*/, {\n containerClient: containerClient,\n containerCreateResponse: containerCreateResponse\n }];\n case 3:\n e_1 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_1.message\n });\n throw e_1;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Deletes a Blob container.\n *\n * @param {string} containerName Name of the container to delete.\n * @param {ContainerDeleteMethodOptions} [options] Options to configure Container Delete operation.\n * @returns {Promise} Container deletion response.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.deleteContainer = function (containerName, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, containerClient, e_2;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-deleteContainer\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(containerName);\n return [4 /*yield*/, containerClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_2 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_2.message\n });\n throw e_2;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param {string} deletedContainerName Name of the previously deleted container.\n * @param {string} deletedContainerVersion Version of the previously deleted container, used to uniquely identify the deleted container.\n * @returns {Promise} Container deletion response.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.undeleteContainer = function (deletedContainerName, deletedContainerVersion, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, containerClient, containerContext, containerUndeleteResponse, e_3;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-undeleteContainer\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName);\n containerContext = new Container(containerClient[\"storageClientContext\"]);\n return [4 /*yield*/, containerContext.restore(tslib.__assign(tslib.__assign({ deletedContainerName: deletedContainerName,\n deletedContainerVersion: deletedContainerVersion }, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];\n case 2:\n containerUndeleteResponse = _b.sent();\n return [2 /*return*/, { containerClient: containerClient, containerUndeleteResponse: containerUndeleteResponse }];\n case 3:\n e_3 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_3.message\n });\n throw e_3;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param {ServiceGetPropertiesOptions} [options] Options to the Service Get Properties operation.\n * @returns {Promise} Response data for the Service Get Properties operation.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.getProperties = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_4;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getProperties\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_4 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_4.message\n });\n throw e_4;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties}\n *\n * @param {BlobServiceProperties} properties\n * @param {ServiceSetPropertiesOptions} [options] Options to the Service Set Properties operation.\n * @returns {Promise} Response data for the Service Set Properties operation.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.setProperties = function (properties, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_5;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-setProperties\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_5 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_5.message\n });\n throw e_5;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats}\n *\n * @param {ServiceGetStatisticsOptions} [options] Options to the Service Get Statistics operation.\n * @returns {Promise} Response data for the Service Get Statistics operation.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.getStatistics = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_6;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getStatistics\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_6 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_6.message\n });\n throw e_6;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param {ServiceGetAccountInfoOptions} [options] Options to the Service Get Account Info operation.\n * @returns {Promise} Response data for the Service Get Account Info operation.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.getAccountInfo = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_7;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getAccountInfo\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_7 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_7.message\n });\n throw e_7;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param {string} [marker] A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ServiceListContainersSegmentOptions} [options] Options to the Service List Container Segment operation.\n * @returns {Promise} Response data for the Service List Container Segment operation.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.listContainersSegment = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, e_8;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-listContainersSegment\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === \"string\" ? [options.include] : options.include, spanOptions: spanOptions }))];\n case 2: return [2 /*return*/, _b.sent()];\n case 3:\n e_8 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_8.message\n });\n throw e_8;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @private\n * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param {string} [marker] A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.\n * @returns {Promise}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, wrappedResponse, e_9;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-findBlobsByTagsSegment\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker: marker,\n maxPageSize: options.maxPageSize,\n spanOptions: spanOptions\n })];\n case 2:\n response = _b.sent();\n wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, blobs: response.blobs.map(function (blob) {\n var _a;\n var tagValue = \"\";\n if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return tslib.__assign(tslib.__assign({}, blob), { tags: toTags(blob.tags), tagValue: tagValue });\n }) });\n return [2 /*return*/, wrappedResponse];\n case 3:\n e_9 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_9.message\n });\n throw e_9;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @private\n * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param {string} [marker] A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.\n * @returns {AsyncIterableIterator}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() {\n var response;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 6];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))];\n case 2:\n response = _a.sent();\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n return [4 /*yield*/, tslib.__await(response)];\n case 3: return [4 /*yield*/, _a.sent()];\n case 4:\n _a.sent();\n _a.label = 5;\n case 5:\n if (marker) return [3 /*break*/, 1];\n _a.label = 6;\n case 6: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @private\n * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to findBlobsByTagsItems.\n * @returns {AsyncIterableIterator}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() {\n var marker, _a, _b, segment, e_10_1;\n var e_10, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n segment = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_10_1 = _d.sent();\n e_10 = { error: e_10_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_10) throw e_10.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param {ServiceFindBlobByTagsOptions} [options={}] Options to find blobs by tags.\n * @returns {PagedAsyncIterableIterator}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n // AsyncIterableIterator to iterate over blobs\n var listSegmentOptions = tslib.__assign({}, options);\n var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return _a = {\n /**\n * @member {Promise} [next] The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));\n },\n _a;\n };\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @private\n * @param {string} [marker] A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.\n * @returns {AsyncIterableIterator}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.listSegments = function (marker, options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listSegments_1() {\n var listContainersSegmentResponse;\n return tslib.__generator(this, function (_a) {\n switch (_a.label) {\n case 0:\n if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];\n _a.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))];\n case 2:\n listContainersSegmentResponse = _a.sent();\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];\n case 4: return [4 /*yield*/, _a.sent()];\n case 5:\n _a.sent();\n _a.label = 6;\n case 6:\n if (marker) return [3 /*break*/, 1];\n _a.label = 7;\n case 7: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @private\n * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.\n * @returns {AsyncIterableIterator}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.listItems = function (options) {\n if (options === void 0) { options = {}; }\n return tslib.__asyncGenerator(this, arguments, function listItems_1() {\n var marker, _a, _b, segment, e_11_1;\n var e_11, _c;\n return tslib.__generator(this, function (_d) {\n switch (_d.label) {\n case 0:\n _d.trys.push([0, 7, 8, 13]);\n _a = tslib.__asyncValues(this.listSegments(marker, options));\n _d.label = 1;\n case 1: return [4 /*yield*/, tslib.__await(_a.next())];\n case 2:\n if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];\n segment = _b.value;\n return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))];\n case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];\n case 4:\n _d.sent();\n _d.label = 5;\n case 5: return [3 /*break*/, 1];\n case 6: return [3 /*break*/, 13];\n case 7:\n e_11_1 = _d.sent();\n e_11 = { error: e_11_1 };\n return [3 /*break*/, 13];\n case 8:\n _d.trys.push([8, , 11, 12]);\n if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];\n return [4 /*yield*/, tslib.__await(_c.call(_a))];\n case 9:\n _d.sent();\n _d.label = 10;\n case 10: return [3 /*break*/, 12];\n case 11:\n if (e_11) throw e_11.error;\n return [7 /*endfinally*/];\n case 12: return [7 /*endfinally*/];\n case 13: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param {ServiceListContainersOptions} [options={}] Options to list containers.\n * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.listContainers = function (options) {\n var _a;\n var _this = this;\n if (options === void 0) { options = {}; }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n var include = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n // AsyncIterableIterator to iterate over containers\n var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));\n var iter = this.listItems(listSegmentOptions);\n return _a = {\n /**\n * @member {Promise} [next] The next method, part of the iteration protocol\n */\n next: function () {\n return iter.next();\n }\n },\n /**\n * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol\n */\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n /**\n * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time\n */\n _a.byPage = function (settings) {\n if (settings === void 0) { settings = {}; }\n return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));\n },\n _a;\n };\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param {Date} startsOn The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param {Date} expiresOn The end time for the user delegation SAS. Must be within 7 days of the current time\n * @returns {Promise}\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) {\n if (options === void 0) { options = {}; }\n return tslib.__awaiter(this, void 0, void 0, function () {\n var _a, span, spanOptions, response, userDelegationKey, res, e_12;\n return tslib.__generator(this, function (_b) {\n switch (_b.label) {\n case 0:\n _a = createSpan(\"BlobServiceClient-getUserDelegationKey\", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;\n _b.label = 1;\n case 1:\n _b.trys.push([1, 3, 4, 5]);\n return [4 /*yield*/, this.serviceContext.getUserDelegationKey({\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false)\n }, {\n abortSignal: options.abortSignal,\n spanOptions: spanOptions\n })];\n case 2:\n response = _b.sent();\n userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value\n };\n res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);\n return [2 /*return*/, res];\n case 3:\n e_12 = _b.sent();\n span.setStatus({\n code: api.CanonicalCode.UNKNOWN,\n message: e_12.message\n });\n throw e_12;\n case 4:\n span.end();\n return [7 /*endfinally*/];\n case 5: return [2 /*return*/];\n }\n });\n });\n };\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns {BlobBatchClient} A new BlobBatchClient object for this service.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.getBlobBatchClient = function () {\n return new BlobBatchClient(this.url, this.pipeline);\n };\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param {Date} expiresOn Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param {AccountSASPermissions} [permissions=AccountSASPermissions.parse(\"r\")] Specifies the list of permissions to be associated with the SAS.\n * @param {string} [resourceTypes=\"sco\"] Specifies the resource types associated with the shared access signature.\n * @param {ServiceGenerateAccountSasUrlOptions} [options={}] Optional parameters.\n * @returns {string} An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n * @memberof BlobServiceClient\n */\n BlobServiceClient.prototype.generateAccountSasUrl = function (expiresOn, permissions, resourceTypes, options) {\n if (permissions === void 0) { permissions = AccountSASPermissions.parse(\"r\"); }\n if (resourceTypes === void 0) { resourceTypes = \"sco\"; }\n if (options === void 0) { options = {}; }\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\"Can only generate the account SAS when the client is initialized with a shared key credential\");\n }\n if (expiresOn === undefined) {\n var now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n var sas = generateAccountSASQueryParameters(tslib.__assign({ permissions: permissions,\n expiresOn: expiresOn,\n resourceTypes: resourceTypes, services: AccountSASServices.parse(\"b\").toString() }, options), this.credential).toString();\n return appendToURLQuery(this.url, sas);\n };\n return BlobServiceClient;\n}(StorageClient));\n\nObject.defineProperty(exports, 'BaseRequestPolicy', {\n enumerable: true,\n get: function () {\n return coreHttp.BaseRequestPolicy;\n }\n});\nObject.defineProperty(exports, 'HttpHeaders', {\n enumerable: true,\n get: function () {\n return coreHttp.HttpHeaders;\n }\n});\nObject.defineProperty(exports, 'RequestPolicyOptions', {\n enumerable: true,\n get: function () {\n return coreHttp.RequestPolicyOptions;\n }\n});\nObject.defineProperty(exports, 'RestError', {\n enumerable: true,\n get: function () {\n return coreHttp.RestError;\n }\n});\nObject.defineProperty(exports, 'WebResource', {\n enumerable: true,\n get: function () {\n return coreHttp.WebResource;\n }\n});\nObject.defineProperty(exports, 'deserializationPolicy', {\n enumerable: true,\n get: function () {\n return coreHttp.deserializationPolicy;\n }\n});\nexports.AccountSASPermissions = AccountSASPermissions;\nexports.AccountSASResourceTypes = AccountSASResourceTypes;\nexports.AccountSASServices = AccountSASServices;\nexports.AnonymousCredential = AnonymousCredential;\nexports.AnonymousCredentialPolicy = AnonymousCredentialPolicy;\nexports.AppendBlobClient = AppendBlobClient;\nexports.BlobBatch = BlobBatch;\nexports.BlobBatchClient = BlobBatchClient;\nexports.BlobClient = BlobClient;\nexports.BlobLeaseClient = BlobLeaseClient;\nexports.BlobSASPermissions = BlobSASPermissions;\nexports.BlobServiceClient = BlobServiceClient;\nexports.BlockBlobClient = BlockBlobClient;\nexports.ContainerClient = ContainerClient;\nexports.ContainerSASPermissions = ContainerSASPermissions;\nexports.Credential = Credential;\nexports.CredentialPolicy = CredentialPolicy;\nexports.PageBlobClient = PageBlobClient;\nexports.Pipeline = Pipeline;\nexports.SASQueryParameters = SASQueryParameters;\nexports.StorageBrowserPolicy = StorageBrowserPolicy;\nexports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory;\nexports.StorageOAuthScopes = StorageOAuthScopes;\nexports.StorageRetryPolicy = StorageRetryPolicy;\nexports.StorageRetryPolicyFactory = StorageRetryPolicyFactory;\nexports.StorageSharedKeyCredential = StorageSharedKeyCredential;\nexports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy;\nexports.generateAccountSASQueryParameters = generateAccountSASQueryParameters;\nexports.generateBlobSASQueryParameters = generateBlobSASQueryParameters;\nexports.logger = logger;\nexports.newPipeline = newPipeline;\n//# sourceMappingURL=index.js.map\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from) {\r\n for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)\r\n to[j] = from[i];\r\n return to;\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ContextAPI = void 0;\nvar context_base_1 = require(\"@opentelemetry/context-base\");\nvar global_utils_1 = require(\"./global-utils\");\nvar NOOP_CONTEXT_MANAGER = new context_base_1.NoopContextManager();\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nvar ContextAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function ContextAPI() {\n }\n /** Get the singleton instance of the Context API */\n ContextAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n return this._instance;\n };\n /**\n * Set the current context manager. Returns the initialized context manager\n */\n ContextAPI.prototype.setGlobalContextManager = function (contextManager) {\n if (global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) {\n // global context manager has already been set\n return this._getContextManager();\n }\n global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, contextManager, NOOP_CONTEXT_MANAGER);\n return contextManager;\n };\n /**\n * Get the currently active context\n */\n ContextAPI.prototype.active = function () {\n return this._getContextManager().active();\n };\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n */\n ContextAPI.prototype.with = function (context, fn) {\n return this._getContextManager().with(context, fn);\n };\n /**\n * Bind a context to a target function or event emitter\n *\n * @param target function or event emitter to bind\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n */\n ContextAPI.prototype.bind = function (target, context) {\n if (context === void 0) { context = this.active(); }\n return this._getContextManager().bind(target, context);\n };\n ContextAPI.prototype._getContextManager = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NOOP_CONTEXT_MANAGER);\n };\n /** Disable and remove the global context manager */\n ContextAPI.prototype.disable = function () {\n this._getContextManager().disable();\n delete global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY];\n };\n return ContextAPI;\n}());\nexports.ContextAPI = ContextAPI;\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.API_BACKWARDS_COMPATIBILITY_VERSION = exports.makeGetter = exports._global = exports.GLOBAL_TRACE_API_KEY = exports.GLOBAL_PROPAGATION_API_KEY = exports.GLOBAL_METRICS_API_KEY = exports.GLOBAL_CONTEXT_MANAGER_API_KEY = void 0;\nvar platform_1 = require(\"../platform\");\nexports.GLOBAL_CONTEXT_MANAGER_API_KEY = Symbol.for('io.opentelemetry.js.api.context');\nexports.GLOBAL_METRICS_API_KEY = Symbol.for('io.opentelemetry.js.api.metrics');\nexports.GLOBAL_PROPAGATION_API_KEY = Symbol.for('io.opentelemetry.js.api.propagation');\nexports.GLOBAL_TRACE_API_KEY = Symbol.for('io.opentelemetry.js.api.trace');\nexports._global = platform_1._globalThis;\n/**\n * Make a function which accepts a version integer and returns the instance of an API if the version\n * is compatible, or a fallback version (usually NOOP) if it is not.\n *\n * @param requiredVersion Backwards compatibility version which is required to return the instance\n * @param instance Instance which should be returned if the required version is compatible\n * @param fallback Fallback instance, usually NOOP, which will be returned if the required version is not compatible\n */\nfunction makeGetter(requiredVersion, instance, fallback) {\n return function (version) {\n return version === requiredVersion ? instance : fallback;\n };\n}\nexports.makeGetter = makeGetter;\n/**\n * A number which should be incremented each time a backwards incompatible\n * change is made to the API. This number is used when an API package\n * attempts to access the global API to ensure it is getting a compatible\n * version. If the global API is not compatible with the API package\n * attempting to get it, a NOOP API implementation will be returned.\n */\nexports.API_BACKWARDS_COMPATIBILITY_VERSION = 0;\n//# sourceMappingURL=global-utils.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MetricsAPI = void 0;\nvar NoopMeterProvider_1 = require(\"../metrics/NoopMeterProvider\");\nvar global_utils_1 = require(\"./global-utils\");\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Metrics API\n */\nvar MetricsAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function MetricsAPI() {\n }\n /** Get the singleton instance of the Metrics API */\n MetricsAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new MetricsAPI();\n }\n return this._instance;\n };\n /**\n * Set the current global meter. Returns the initialized global meter provider.\n */\n MetricsAPI.prototype.setGlobalMeterProvider = function (provider) {\n if (global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) {\n // global meter provider has already been set\n return this.getMeterProvider();\n }\n global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopMeterProvider_1.NOOP_METER_PROVIDER);\n return provider;\n };\n /**\n * Returns the global meter provider.\n */\n MetricsAPI.prototype.getMeterProvider = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopMeterProvider_1.NOOP_METER_PROVIDER);\n };\n /**\n * Returns a meter from the global meter provider.\n */\n MetricsAPI.prototype.getMeter = function (name, version) {\n return this.getMeterProvider().getMeter(name, version);\n };\n /** Remove the global meter provider */\n MetricsAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY];\n };\n return MetricsAPI;\n}());\nexports.MetricsAPI = MetricsAPI;\n//# sourceMappingURL=metrics.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PropagationAPI = void 0;\nvar getter_1 = require(\"../context/propagation/getter\");\nvar NoopHttpTextPropagator_1 = require(\"../context/propagation/NoopHttpTextPropagator\");\nvar setter_1 = require(\"../context/propagation/setter\");\nvar context_1 = require(\"./context\");\nvar global_utils_1 = require(\"./global-utils\");\nvar contextApi = context_1.ContextAPI.getInstance();\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nvar PropagationAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function PropagationAPI() {\n }\n /** Get the singleton instance of the Propagator API */\n PropagationAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n return this._instance;\n };\n /**\n * Set the current propagator. Returns the initialized propagator\n */\n PropagationAPI.prototype.setGlobalPropagator = function (propagator) {\n if (global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY]) {\n // global propagator has already been set\n return this._getGlobalPropagator();\n }\n global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, propagator, NoopHttpTextPropagator_1.NOOP_HTTP_TEXT_PROPAGATOR);\n return propagator;\n };\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n * @param context Context carrying tracing data to inject. Defaults to the currently active context.\n */\n PropagationAPI.prototype.inject = function (carrier, setter, context) {\n if (setter === void 0) { setter = setter_1.defaultSetter; }\n if (context === void 0) { context = contextApi.active(); }\n return this._getGlobalPropagator().inject(context, carrier, setter);\n };\n /**\n * Extract context from a carrier\n *\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n * @param context Context which the newly created context will inherit from. Defaults to the currently active context.\n */\n PropagationAPI.prototype.extract = function (carrier, getter, context) {\n if (getter === void 0) { getter = getter_1.defaultGetter; }\n if (context === void 0) { context = contextApi.active(); }\n return this._getGlobalPropagator().extract(context, carrier, getter);\n };\n /** Remove the global propagator */\n PropagationAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY];\n };\n PropagationAPI.prototype._getGlobalPropagator = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_PROPAGATION_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopHttpTextPropagator_1.NOOP_HTTP_TEXT_PROPAGATOR);\n };\n return PropagationAPI;\n}());\nexports.PropagationAPI = PropagationAPI;\n//# sourceMappingURL=propagation.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceAPI = void 0;\nvar NoopTracerProvider_1 = require(\"../trace/NoopTracerProvider\");\nvar global_utils_1 = require(\"./global-utils\");\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nvar TraceAPI = /** @class */ (function () {\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n function TraceAPI() {\n }\n /** Get the singleton instance of the Trace API */\n TraceAPI.getInstance = function () {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n return this._instance;\n };\n /**\n * Set the current global tracer. Returns the initialized global tracer provider\n */\n TraceAPI.prototype.setGlobalTracerProvider = function (provider) {\n if (global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY]) {\n // global tracer provider has already been set\n return this.getTracerProvider();\n }\n global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopTracerProvider_1.NOOP_TRACER_PROVIDER);\n return this.getTracerProvider();\n };\n /**\n * Returns the global tracer provider.\n */\n TraceAPI.prototype.getTracerProvider = function () {\n var _a, _b;\n return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopTracerProvider_1.NOOP_TRACER_PROVIDER);\n };\n /**\n * Returns a tracer from the global tracer provider.\n */\n TraceAPI.prototype.getTracer = function (name, version) {\n return this.getTracerProvider().getTracer(name, version);\n };\n /** Remove the global tracer provider */\n TraceAPI.prototype.disable = function () {\n delete global_utils_1._global[global_utils_1.GLOBAL_TRACE_API_KEY];\n };\n return TraceAPI;\n}());\nexports.TraceAPI = TraceAPI;\n//# sourceMappingURL=trace.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Logger.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Time.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=HttpTextPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_HTTP_TEXT_PROPAGATOR = exports.NoopHttpTextPropagator = void 0;\n/**\n * No-op implementations of {@link HttpTextPropagator}.\n */\nvar NoopHttpTextPropagator = /** @class */ (function () {\n function NoopHttpTextPropagator() {\n }\n /** Noop inject function does nothing */\n NoopHttpTextPropagator.prototype.inject = function (context, carrier, setter) { };\n /** Noop extract function does nothing and returns the input context */\n NoopHttpTextPropagator.prototype.extract = function (context, carrier, getter) {\n return context;\n };\n return NoopHttpTextPropagator;\n}());\nexports.NoopHttpTextPropagator = NoopHttpTextPropagator;\nexports.NOOP_HTTP_TEXT_PROPAGATOR = new NoopHttpTextPropagator();\n//# sourceMappingURL=NoopHttpTextPropagator.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultGetter = void 0;\n/**\n * Default getter which just does a simple property access. Returns\n * undefined if the key is not set.\n *\n * @param carrier\n * @param key\n */\nfunction defaultGetter(carrier, key) {\n return carrier[key];\n}\nexports.defaultGetter = defaultGetter;\n//# sourceMappingURL=getter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultSetter = void 0;\n/**\n * Default setter which sets value via direct property access\n *\n * @param carrier\n * @param key\n */\nfunction defaultSetter(carrier, key, value) {\n carrier[key] = value;\n}\nexports.defaultSetter = defaultSetter;\n//# sourceMappingURL=setter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=CorrelationContext.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.EntryTtl = void 0;\n/**\n * EntryTtl is an integer that represents number of hops an entry can propagate.\n *\n * For now, ONLY special values (0 and -1) are supported.\n */\nvar EntryTtl;\n(function (EntryTtl) {\n /**\n * NO_PROPAGATION is considered to have local context and is used within the\n * process it created.\n */\n EntryTtl[EntryTtl[\"NO_PROPAGATION\"] = 0] = \"NO_PROPAGATION\";\n /** UNLIMITED_PROPAGATION can propagate unlimited hops. */\n EntryTtl[EntryTtl[\"UNLIMITED_PROPAGATION\"] = -1] = \"UNLIMITED_PROPAGATION\";\n})(EntryTtl = exports.EntryTtl || (exports.EntryTtl = {}));\n//# sourceMappingURL=EntryValue.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.propagation = exports.metrics = exports.trace = exports.context = void 0;\n__exportStar(require(\"./common/Logger\"), exports);\n__exportStar(require(\"./common/Time\"), exports);\n__exportStar(require(\"./context/propagation/getter\"), exports);\n__exportStar(require(\"./context/propagation/HttpTextPropagator\"), exports);\n__exportStar(require(\"./context/propagation/NoopHttpTextPropagator\"), exports);\n__exportStar(require(\"./context/propagation/setter\"), exports);\n__exportStar(require(\"./correlation_context/CorrelationContext\"), exports);\n__exportStar(require(\"./correlation_context/EntryValue\"), exports);\n__exportStar(require(\"./metrics/BatchObserverResult\"), exports);\n__exportStar(require(\"./metrics/BoundInstrument\"), exports);\n__exportStar(require(\"./metrics/Meter\"), exports);\n__exportStar(require(\"./metrics/MeterProvider\"), exports);\n__exportStar(require(\"./metrics/Metric\"), exports);\n__exportStar(require(\"./metrics/NoopMeter\"), exports);\n__exportStar(require(\"./metrics/NoopMeterProvider\"), exports);\n__exportStar(require(\"./metrics/Observation\"), exports);\n__exportStar(require(\"./metrics/ObserverResult\"), exports);\n__exportStar(require(\"./trace/attributes\"), exports);\n__exportStar(require(\"./trace/Event\"), exports);\n__exportStar(require(\"./trace/instrumentation/Plugin\"), exports);\n__exportStar(require(\"./trace/link_context\"), exports);\n__exportStar(require(\"./trace/link\"), exports);\n__exportStar(require(\"./trace/NoopSpan\"), exports);\n__exportStar(require(\"./trace/NoopTracer\"), exports);\n__exportStar(require(\"./trace/NoopTracerProvider\"), exports);\n__exportStar(require(\"./trace/Sampler\"), exports);\n__exportStar(require(\"./trace/SamplingResult\"), exports);\n__exportStar(require(\"./trace/span_context\"), exports);\n__exportStar(require(\"./trace/span_kind\"), exports);\n__exportStar(require(\"./trace/span\"), exports);\n__exportStar(require(\"./trace/SpanOptions\"), exports);\n__exportStar(require(\"./trace/status\"), exports);\n__exportStar(require(\"./trace/TimedEvent\"), exports);\n__exportStar(require(\"./trace/trace_flags\"), exports);\n__exportStar(require(\"./trace/trace_state\"), exports);\n__exportStar(require(\"./trace/tracer_provider\"), exports);\n__exportStar(require(\"./trace/tracer\"), exports);\nvar context_base_1 = require(\"@opentelemetry/context-base\");\nObject.defineProperty(exports, \"Context\", { enumerable: true, get: function () { return context_base_1.Context; } });\nvar context_1 = require(\"./api/context\");\n/** Entrypoint for context API */\nexports.context = context_1.ContextAPI.getInstance();\nvar trace_1 = require(\"./api/trace\");\n/** Entrypoint for trace API */\nexports.trace = trace_1.TraceAPI.getInstance();\nvar metrics_1 = require(\"./api/metrics\");\n/** Entrypoint for metrics API */\nexports.metrics = metrics_1.MetricsAPI.getInstance();\nvar propagation_1 = require(\"./api/propagation\");\n/** Entrypoint for propagation API */\nexports.propagation = propagation_1.PropagationAPI.getInstance();\nexports.default = {\n trace: exports.trace,\n metrics: exports.metrics,\n context: exports.context,\n propagation: exports.propagation,\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=BatchObserverResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=BoundInstrument.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Meter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=MeterProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ValueType = void 0;\n/** The Type of value. It describes how the data is reported. */\nvar ValueType;\n(function (ValueType) {\n ValueType[ValueType[\"INT\"] = 0] = \"INT\";\n ValueType[ValueType[\"DOUBLE\"] = 1] = \"DOUBLE\";\n})(ValueType = exports.ValueType || (exports.ValueType = {}));\n//# sourceMappingURL=Metric.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __extends = (this && this.__extends) || (function () {\n var extendStatics = function (d, b) {\n extendStatics = Object.setPrototypeOf ||\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\n return extendStatics(d, b);\n };\n return function (d, b) {\n extendStatics(d, b);\n function __() { this.constructor = d; }\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\n };\n})();\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_BATCH_OBSERVER_METRIC = exports.NOOP_SUM_OBSERVER_METRIC = exports.NOOP_UP_DOWN_SUM_OBSERVER_METRIC = exports.NOOP_VALUE_OBSERVER_METRIC = exports.NOOP_BOUND_BASE_OBSERVER = exports.NOOP_VALUE_RECORDER_METRIC = exports.NOOP_BOUND_VALUE_RECORDER = exports.NOOP_COUNTER_METRIC = exports.NOOP_BOUND_COUNTER = exports.NOOP_METER = exports.NoopBoundBaseObserver = exports.NoopBoundValueRecorder = exports.NoopBoundCounter = exports.NoopBatchObserverMetric = exports.NoopBaseObserverMetric = exports.NoopValueRecorderMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0;\n/**\n * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses\n * constant NoopMetrics for all of its methods.\n */\nvar NoopMeter = /** @class */ (function () {\n function NoopMeter() {\n }\n /**\n * Returns constant noop value recorder.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createValueRecorder = function (name, options) {\n return exports.NOOP_VALUE_RECORDER_METRIC;\n };\n /**\n * Returns a constant noop counter.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createCounter = function (name, options) {\n return exports.NOOP_COUNTER_METRIC;\n };\n /**\n * Returns a constant noop UpDownCounter.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n NoopMeter.prototype.createUpDownCounter = function (name, options) {\n return exports.NOOP_COUNTER_METRIC;\n };\n /**\n * Returns constant noop value observer.\n * @param name the name of the metric.\n * @param [options] the metric options.\n * @param [callback] the value observer callback\n */\n NoopMeter.prototype.createValueObserver = function (name, options, callback) {\n return exports.NOOP_VALUE_OBSERVER_METRIC;\n };\n /**\n * Returns constant noop batch observer.\n * @param name the name of the metric.\n * @param callback the batch observer callback\n */\n NoopMeter.prototype.createBatchObserver = function (name, callback) {\n return exports.NOOP_BATCH_OBSERVER_METRIC;\n };\n return NoopMeter;\n}());\nexports.NoopMeter = NoopMeter;\nvar NoopMetric = /** @class */ (function () {\n function NoopMetric(instrument) {\n this._instrument = instrument;\n }\n /**\n * Returns a Bound Instrument associated with specified Labels.\n * It is recommended to keep a reference to the Bound Instrument instead of\n * always calling this method for every operations.\n * @param labels key-values pairs that are associated with a specific metric\n * that you want to record.\n */\n NoopMetric.prototype.bind = function (labels) {\n return this._instrument;\n };\n /**\n * Removes the Binding from the metric, if it is present.\n * @param labels key-values pairs that are associated with a specific metric.\n */\n NoopMetric.prototype.unbind = function (labels) {\n return;\n };\n /**\n * Clears all timeseries from the Metric.\n */\n NoopMetric.prototype.clear = function () {\n return;\n };\n return NoopMetric;\n}());\nexports.NoopMetric = NoopMetric;\nvar NoopCounterMetric = /** @class */ (function (_super) {\n __extends(NoopCounterMetric, _super);\n function NoopCounterMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopCounterMetric.prototype.add = function (value, labels) {\n this.bind(labels).add(value);\n };\n return NoopCounterMetric;\n}(NoopMetric));\nexports.NoopCounterMetric = NoopCounterMetric;\nvar NoopValueRecorderMetric = /** @class */ (function (_super) {\n __extends(NoopValueRecorderMetric, _super);\n function NoopValueRecorderMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopValueRecorderMetric.prototype.record = function (value, labels, correlationContext, spanContext) {\n if (typeof correlationContext === 'undefined') {\n this.bind(labels).record(value);\n }\n else if (typeof spanContext === 'undefined') {\n this.bind(labels).record(value, correlationContext);\n }\n else {\n this.bind(labels).record(value, correlationContext, spanContext);\n }\n };\n return NoopValueRecorderMetric;\n}(NoopMetric));\nexports.NoopValueRecorderMetric = NoopValueRecorderMetric;\nvar NoopBaseObserverMetric = /** @class */ (function (_super) {\n __extends(NoopBaseObserverMetric, _super);\n function NoopBaseObserverMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n NoopBaseObserverMetric.prototype.observation = function () {\n return {\n observer: this,\n value: 0,\n };\n };\n return NoopBaseObserverMetric;\n}(NoopMetric));\nexports.NoopBaseObserverMetric = NoopBaseObserverMetric;\nvar NoopBatchObserverMetric = /** @class */ (function (_super) {\n __extends(NoopBatchObserverMetric, _super);\n function NoopBatchObserverMetric() {\n return _super !== null && _super.apply(this, arguments) || this;\n }\n return NoopBatchObserverMetric;\n}(NoopMetric));\nexports.NoopBatchObserverMetric = NoopBatchObserverMetric;\nvar NoopBoundCounter = /** @class */ (function () {\n function NoopBoundCounter() {\n }\n NoopBoundCounter.prototype.add = function (value) {\n return;\n };\n return NoopBoundCounter;\n}());\nexports.NoopBoundCounter = NoopBoundCounter;\nvar NoopBoundValueRecorder = /** @class */ (function () {\n function NoopBoundValueRecorder() {\n }\n NoopBoundValueRecorder.prototype.record = function (value, correlationContext, spanContext) {\n return;\n };\n return NoopBoundValueRecorder;\n}());\nexports.NoopBoundValueRecorder = NoopBoundValueRecorder;\nvar NoopBoundBaseObserver = /** @class */ (function () {\n function NoopBoundBaseObserver() {\n }\n NoopBoundBaseObserver.prototype.update = function (value) { };\n return NoopBoundBaseObserver;\n}());\nexports.NoopBoundBaseObserver = NoopBoundBaseObserver;\nexports.NOOP_METER = new NoopMeter();\nexports.NOOP_BOUND_COUNTER = new NoopBoundCounter();\nexports.NOOP_COUNTER_METRIC = new NoopCounterMetric(exports.NOOP_BOUND_COUNTER);\nexports.NOOP_BOUND_VALUE_RECORDER = new NoopBoundValueRecorder();\nexports.NOOP_VALUE_RECORDER_METRIC = new NoopValueRecorderMetric(exports.NOOP_BOUND_VALUE_RECORDER);\nexports.NOOP_BOUND_BASE_OBSERVER = new NoopBoundBaseObserver();\nexports.NOOP_VALUE_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_UP_DOWN_SUM_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_SUM_OBSERVER_METRIC = new NoopBaseObserverMetric(exports.NOOP_BOUND_BASE_OBSERVER);\nexports.NOOP_BATCH_OBSERVER_METRIC = new NoopBatchObserverMetric();\n//# sourceMappingURL=NoopMeter.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0;\nvar NoopMeter_1 = require(\"./NoopMeter\");\n/**\n * An implementation of the {@link MeterProvider} which returns an impotent Meter\n * for all calls to `getMeter`\n */\nvar NoopMeterProvider = /** @class */ (function () {\n function NoopMeterProvider() {\n }\n NoopMeterProvider.prototype.getMeter = function (_name, _version) {\n return NoopMeter_1.NOOP_METER;\n };\n return NoopMeterProvider;\n}());\nexports.NoopMeterProvider = NoopMeterProvider;\nexports.NOOP_METER_PROVIDER = new NoopMeterProvider();\n//# sourceMappingURL=NoopMeterProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Observation.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=ObserverResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./node\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports._globalThis = void 0;\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexports._globalThis = typeof globalThis === 'object' ? globalThis : global;\n//# sourceMappingURL=globalThis.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./globalThis\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Event.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_SPAN = exports.NoopSpan = exports.INVALID_SPAN_ID = exports.INVALID_TRACE_ID = void 0;\nvar trace_flags_1 = require(\"./trace_flags\");\nexports.INVALID_TRACE_ID = '0';\nexports.INVALID_SPAN_ID = '0';\nvar INVALID_SPAN_CONTEXT = {\n traceId: exports.INVALID_TRACE_ID,\n spanId: exports.INVALID_SPAN_ID,\n traceFlags: trace_flags_1.TraceFlags.NONE,\n};\n/**\n * The NoopSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nvar NoopSpan = /** @class */ (function () {\n function NoopSpan(_spanContext) {\n if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; }\n this._spanContext = _spanContext;\n }\n // Returns a SpanContext.\n NoopSpan.prototype.context = function () {\n return this._spanContext;\n };\n // By default does nothing\n NoopSpan.prototype.setAttribute = function (key, value) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setAttributes = function (attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.addEvent = function (name, attributes) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.setStatus = function (status) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.updateName = function (name) {\n return this;\n };\n // By default does nothing\n NoopSpan.prototype.end = function (endTime) { };\n // isRecording always returns false for noopSpan.\n NoopSpan.prototype.isRecording = function () {\n return false;\n };\n return NoopSpan;\n}());\nexports.NoopSpan = NoopSpan;\nexports.NOOP_SPAN = new NoopSpan();\n//# sourceMappingURL=NoopSpan.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER = exports.NoopTracer = void 0;\nvar NoopSpan_1 = require(\"./NoopSpan\");\n/**\n * No-op implementations of {@link Tracer}.\n */\nvar NoopTracer = /** @class */ (function () {\n function NoopTracer() {\n }\n NoopTracer.prototype.getCurrentSpan = function () {\n return NoopSpan_1.NOOP_SPAN;\n };\n // startSpan starts a noop span.\n NoopTracer.prototype.startSpan = function (name, options) {\n return NoopSpan_1.NOOP_SPAN;\n };\n NoopTracer.prototype.withSpan = function (span, fn) {\n return fn();\n };\n NoopTracer.prototype.bind = function (target, span) {\n return target;\n };\n return NoopTracer;\n}());\nexports.NoopTracer = NoopTracer;\nexports.NOOP_TRACER = new NoopTracer();\n//# sourceMappingURL=NoopTracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0;\nvar NoopTracer_1 = require(\"./NoopTracer\");\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nvar NoopTracerProvider = /** @class */ (function () {\n function NoopTracerProvider() {\n }\n NoopTracerProvider.prototype.getTracer = function (_name, _version) {\n return NoopTracer_1.NOOP_TRACER;\n };\n return NoopTracerProvider;\n}());\nexports.NoopTracerProvider = NoopTracerProvider;\nexports.NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n//# sourceMappingURL=NoopTracerProvider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Sampler.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SamplingDecision = void 0;\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nvar SamplingDecision;\n(function (SamplingDecision) {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n SamplingDecision[SamplingDecision[\"NOT_RECORD\"] = 0] = \"NOT_RECORD\";\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD\"] = 1] = \"RECORD\";\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n SamplingDecision[SamplingDecision[\"RECORD_AND_SAMPLED\"] = 2] = \"RECORD_AND_SAMPLED\";\n})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {}));\n//# sourceMappingURL=SamplingResult.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=SpanOptions.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=TimedEvent.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=attributes.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=Plugin.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=link_context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=span_context.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SpanKind = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar SpanKind;\n(function (SpanKind) {\n /** Default value. Indicates that the span is used internally. */\n SpanKind[SpanKind[\"INTERNAL\"] = 0] = \"INTERNAL\";\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SpanKind[SpanKind[\"SERVER\"] = 1] = \"SERVER\";\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n SpanKind[SpanKind[\"CLIENT\"] = 2] = \"CLIENT\";\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"PRODUCER\"] = 3] = \"PRODUCER\";\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n SpanKind[SpanKind[\"CONSUMER\"] = 4] = \"CONSUMER\";\n})(SpanKind = exports.SpanKind || (exports.SpanKind = {}));\n//# sourceMappingURL=span_kind.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CanonicalCode = void 0;\n/**\n * An enumeration of canonical status codes.\n */\nvar CanonicalCode;\n(function (CanonicalCode) {\n /**\n * Not an error; returned on success\n */\n CanonicalCode[CanonicalCode[\"OK\"] = 0] = \"OK\";\n /**\n * The operation was cancelled (typically by the caller).\n */\n CanonicalCode[CanonicalCode[\"CANCELLED\"] = 1] = \"CANCELLED\";\n /**\n * Unknown error. An example of where this error may be returned is\n * if a status value received from another address space belongs to\n * an error-space that is not known in this address space. Also\n * errors raised by APIs that do not return enough error information\n * may be converted to this error.\n */\n CanonicalCode[CanonicalCode[\"UNKNOWN\"] = 2] = \"UNKNOWN\";\n /**\n * Client specified an invalid argument. Note that this differs\n * from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments\n * that are problematic regardless of the state of the system\n * (e.g., a malformed file name).\n */\n CanonicalCode[CanonicalCode[\"INVALID_ARGUMENT\"] = 3] = \"INVALID_ARGUMENT\";\n /**\n * Deadline expired before operation could complete. For operations\n * that change the state of the system, this error may be returned\n * even if the operation has completed successfully. For example, a\n * successful response from a server could have been delayed long\n * enough for the deadline to expire.\n */\n CanonicalCode[CanonicalCode[\"DEADLINE_EXCEEDED\"] = 4] = \"DEADLINE_EXCEEDED\";\n /**\n * Some requested entity (e.g., file or directory) was not found.\n */\n CanonicalCode[CanonicalCode[\"NOT_FOUND\"] = 5] = \"NOT_FOUND\";\n /**\n * Some entity that we attempted to create (e.g., file or directory)\n * already exists.\n */\n CanonicalCode[CanonicalCode[\"ALREADY_EXISTS\"] = 6] = \"ALREADY_EXISTS\";\n /**\n * The caller does not have permission to execute the specified\n * operation. PERMISSION_DENIED must not be used for rejections\n * caused by exhausting some resource (use RESOURCE_EXHAUSTED\n * instead for those errors). PERMISSION_DENIED must not be\n * used if the caller can not be identified (use UNAUTHENTICATED\n * instead for those errors).\n */\n CanonicalCode[CanonicalCode[\"PERMISSION_DENIED\"] = 7] = \"PERMISSION_DENIED\";\n /**\n * Some resource has been exhausted, perhaps a per-user quota, or\n * perhaps the entire file system is out of space.\n */\n CanonicalCode[CanonicalCode[\"RESOURCE_EXHAUSTED\"] = 8] = \"RESOURCE_EXHAUSTED\";\n /**\n * Operation was rejected because the system is not in a state\n * required for the operation's execution. For example, directory\n * to be deleted may be non-empty, an rmdir operation is applied to\n * a non-directory, etc.\n *\n * A litmus test that may help a service implementor in deciding\n * between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:\n *\n * - Use UNAVAILABLE if the client can retry just the failing call.\n * - Use ABORTED if the client should retry at a higher-level\n * (e.g., restarting a read-modify-write sequence).\n * - Use FAILED_PRECONDITION if the client should not retry until\n * the system state has been explicitly fixed. E.g., if an \"rmdir\"\n * fails because the directory is non-empty, FAILED_PRECONDITION\n * should be returned since the client should not retry unless\n * they have first fixed up the directory by deleting files from it.\n * - Use FAILED_PRECONDITION if the client performs conditional\n * REST Get/Update/Delete on a resource and the resource on the\n * server does not match the condition. E.g., conflicting\n * read-modify-write on the same resource.\n */\n CanonicalCode[CanonicalCode[\"FAILED_PRECONDITION\"] = 9] = \"FAILED_PRECONDITION\";\n /**\n * The operation was aborted, typically due to a concurrency issue\n * like sequencer check failures, transaction aborts, etc.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION,\n * ABORTED, and UNAVAILABLE.\n */\n CanonicalCode[CanonicalCode[\"ABORTED\"] = 10] = \"ABORTED\";\n /**\n * Operation was attempted past the valid range. E.g., seeking or\n * reading past end of file.\n *\n * Unlike INVALID_ARGUMENT, this error indicates a problem that may\n * be fixed if the system state changes. For example, a 32-bit file\n * system will generate INVALID_ARGUMENT if asked to read at an\n * offset that is not in the range [0,2^32-1], but it will generate\n * OUT_OF_RANGE if asked to read from an offset past the current\n * file size.\n *\n * There is a fair bit of overlap between FAILED_PRECONDITION and\n * OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific\n * error) when it applies so that callers who are iterating through\n * a space can easily look for an OUT_OF_RANGE error to detect when\n * they are done.\n */\n CanonicalCode[CanonicalCode[\"OUT_OF_RANGE\"] = 11] = \"OUT_OF_RANGE\";\n /**\n * Operation is not implemented or not supported/enabled in this service.\n */\n CanonicalCode[CanonicalCode[\"UNIMPLEMENTED\"] = 12] = \"UNIMPLEMENTED\";\n /**\n * Internal errors. Means some invariants expected by underlying\n * system has been broken. If you see one of these errors,\n * something is very broken.\n */\n CanonicalCode[CanonicalCode[\"INTERNAL\"] = 13] = \"INTERNAL\";\n /**\n * The service is currently unavailable. This is a most likely a\n * transient condition and may be corrected by retrying with\n * a backoff.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION,\n * ABORTED, and UNAVAILABLE.\n */\n CanonicalCode[CanonicalCode[\"UNAVAILABLE\"] = 14] = \"UNAVAILABLE\";\n /**\n * Unrecoverable data loss or corruption.\n */\n CanonicalCode[CanonicalCode[\"DATA_LOSS\"] = 15] = \"DATA_LOSS\";\n /**\n * The request does not have valid authentication credentials for the\n * operation.\n */\n CanonicalCode[CanonicalCode[\"UNAUTHENTICATED\"] = 16] = \"UNAUTHENTICATED\";\n})(CanonicalCode = exports.CanonicalCode || (exports.CanonicalCode = {}));\n//# sourceMappingURL=status.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TraceFlags = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar TraceFlags;\n(function (TraceFlags) {\n /** Represents no flag set. */\n TraceFlags[TraceFlags[\"NONE\"] = 0] = \"NONE\";\n /** Bit to represent whether trace is sampled in trace flags. */\n TraceFlags[TraceFlags[\"SAMPLED\"] = 1] = \"SAMPLED\";\n})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {}));\n//# sourceMappingURL=trace_flags.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=trace_state.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=tracer_provider.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NoopContextManager = void 0;\nvar context_1 = require(\"./context\");\nvar NoopContextManager = /** @class */ (function () {\n function NoopContextManager() {\n }\n NoopContextManager.prototype.active = function () {\n return context_1.Context.ROOT_CONTEXT;\n };\n NoopContextManager.prototype.with = function (context, fn) {\n return fn();\n };\n NoopContextManager.prototype.bind = function (target, context) {\n return target;\n };\n NoopContextManager.prototype.enable = function () {\n return this;\n };\n NoopContextManager.prototype.disable = function () {\n return this;\n };\n return NoopContextManager;\n}());\nexports.NoopContextManager = NoopContextManager;\n//# sourceMappingURL=NoopContextManager.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar Context = /** @class */ (function () {\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n function Context(parentContext) {\n this._currentContext = parentContext ? new Map(parentContext) : new Map();\n }\n /** Get a key to uniquely identify a context value */\n Context.createKey = function (description) {\n return Symbol(description);\n };\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n Context.prototype.getValue = function (key) {\n return this._currentContext.get(key);\n };\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n Context.prototype.setValue = function (key, value) {\n var context = new Context(this._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n Context.prototype.deleteValue = function (key) {\n var context = new Context(this._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n /** The root context is used as the default parent context when there is no active context */\n Context.ROOT_CONTEXT = new Context();\n /**\n * This is another identifier to the root context which allows developers to easily search the\n * codebase for direct uses of context which need to be removed in later PRs.\n *\n * It's existence is temporary and it should be removed when all references are fixed.\n */\n Context.TODO = Context.ROOT_CONTEXT;\n return Context;\n}());\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./types\"), exports);\n__exportStar(require(\"./context\"), exports);\n__exportStar(require(\"./NoopContextManager\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=types.js.map","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","module.exports =\n{\n parallel : require('./parallel.js'),\n serial : require('./serial.js'),\n serialOrdered : require('./serialOrdered.js')\n};\n","// API\nmodule.exports = abort;\n\n/**\n * Aborts leftover active jobs\n *\n * @param {object} state - current state object\n */\nfunction abort(state)\n{\n Object.keys(state.jobs).forEach(clean.bind(state));\n\n // reset leftover jobs\n state.jobs = {};\n}\n\n/**\n * Cleans up leftover job by invoking abort function for the provided job id\n *\n * @this state\n * @param {string|number} key - job id to abort\n */\nfunction clean(key)\n{\n if (typeof this.jobs[key] == 'function')\n {\n this.jobs[key]();\n }\n}\n","var defer = require('./defer.js');\n\n// API\nmodule.exports = async;\n\n/**\n * Runs provided callback asynchronously\n * even if callback itself is not\n *\n * @param {function} callback - callback to invoke\n * @returns {function} - augmented callback\n */\nfunction async(callback)\n{\n var isAsync = false;\n\n // check if async happened\n defer(function() { isAsync = true; });\n\n return function async_callback(err, result)\n {\n if (isAsync)\n {\n callback(err, result);\n }\n else\n {\n defer(function nextTick_callback()\n {\n callback(err, result);\n });\n }\n };\n}\n","module.exports = defer;\n\n/**\n * Runs provided function on next iteration of the event loop\n *\n * @param {function} fn - function to run\n */\nfunction defer(fn)\n{\n var nextTick = typeof setImmediate == 'function'\n ? setImmediate\n : (\n typeof process == 'object' && typeof process.nextTick == 'function'\n ? process.nextTick\n : null\n );\n\n if (nextTick)\n {\n nextTick(fn);\n }\n else\n {\n setTimeout(fn, 0);\n }\n}\n","var async = require('./async.js')\n , abort = require('./abort.js')\n ;\n\n// API\nmodule.exports = iterate;\n\n/**\n * Iterates over each job object\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {object} state - current job status\n * @param {function} callback - invoked when all elements processed\n */\nfunction iterate(list, iterator, state, callback)\n{\n // store current index\n var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;\n\n state.jobs[key] = runJob(iterator, key, list[key], function(error, output)\n {\n // don't repeat yourself\n // skip secondary callbacks\n if (!(key in state.jobs))\n {\n return;\n }\n\n // clean up jobs\n delete state.jobs[key];\n\n if (error)\n {\n // don't process rest of the results\n // stop still active jobs\n // and reset the list\n abort(state);\n }\n else\n {\n state.results[key] = output;\n }\n\n // return salvaged results\n callback(error, state.results);\n });\n}\n\n/**\n * Runs iterator over provided job element\n *\n * @param {function} iterator - iterator to invoke\n * @param {string|number} key - key/index of the element in the list of jobs\n * @param {mixed} item - job description\n * @param {function} callback - invoked after iterator is done with the job\n * @returns {function|mixed} - job abort function or something else\n */\nfunction runJob(iterator, key, item, callback)\n{\n var aborter;\n\n // allow shortcut if iterator expects only two arguments\n if (iterator.length == 2)\n {\n aborter = iterator(item, async(callback));\n }\n // otherwise go with full three arguments\n else\n {\n aborter = iterator(item, key, async(callback));\n }\n\n return aborter;\n}\n","// API\nmodule.exports = state;\n\n/**\n * Creates initial state object\n * for iteration over list\n *\n * @param {array|object} list - list to iterate over\n * @param {function|null} sortMethod - function to use for keys sort,\n * or `null` to keep them as is\n * @returns {object} - initial state object\n */\nfunction state(list, sortMethod)\n{\n var isNamedList = !Array.isArray(list)\n , initState =\n {\n index : 0,\n keyedList: isNamedList || sortMethod ? Object.keys(list) : null,\n jobs : {},\n results : isNamedList ? {} : [],\n size : isNamedList ? Object.keys(list).length : list.length\n }\n ;\n\n if (sortMethod)\n {\n // sort array keys based on it's values\n // sort object's keys just on own merit\n initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)\n {\n return sortMethod(list[a], list[b]);\n });\n }\n\n return initState;\n}\n","var abort = require('./abort.js')\n , async = require('./async.js')\n ;\n\n// API\nmodule.exports = terminator;\n\n/**\n * Terminates jobs in the attached state context\n *\n * @this AsyncKitState#\n * @param {function} callback - final callback to invoke after termination\n */\nfunction terminator(callback)\n{\n if (!Object.keys(this.jobs).length)\n {\n return;\n }\n\n // fast forward iteration index\n this.index = this.size;\n\n // abort jobs\n abort(this);\n\n // send back results we have so far\n async(callback)(null, this.results);\n}\n","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = parallel;\n\n/**\n * Runs iterator over provided array elements in parallel\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction parallel(list, iterator, callback)\n{\n var state = initState(list);\n\n while (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, function(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n // looks like it's the last one\n if (Object.keys(state.jobs).length === 0)\n {\n callback(null, state.results);\n return;\n }\n });\n\n state.index++;\n }\n\n return terminator.bind(state, callback);\n}\n","var serialOrdered = require('./serialOrdered.js');\n\n// Public API\nmodule.exports = serial;\n\n/**\n * Runs iterator over provided array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serial(list, iterator, callback)\n{\n return serialOrdered(list, iterator, null, callback);\n}\n","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = serialOrdered;\n// sorting helpers\nmodule.exports.ascending = ascending;\nmodule.exports.descending = descending;\n\n/**\n * Runs iterator over provided sorted array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} sortMethod - custom sort function\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serialOrdered(list, iterator, sortMethod, callback)\n{\n var state = initState(list, sortMethod);\n\n iterate(list, iterator, state, function iteratorHandler(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n state.index++;\n\n // are we there yet?\n if (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, iteratorHandler);\n return;\n }\n\n // done here\n callback(null, state.results);\n });\n\n return terminator.bind(state, callback);\n}\n\n/*\n * -- Sort methods\n */\n\n/**\n * sort helper to sort array elements in ascending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction ascending(a, b)\n{\n return a < b ? -1 : a > b ? 1 : 0;\n}\n\n/**\n * sort helper to sort array elements in descending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction descending(a, b)\n{\n return -1 * ascending(a, b);\n}\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var bigInt = (function (undefined) {\r\n \"use strict\";\r\n\r\n var BASE = 1e7,\r\n LOG_BASE = 7,\r\n MAX_INT = 9007199254740992,\r\n MAX_INT_ARR = smallToArray(MAX_INT),\r\n DEFAULT_ALPHABET = \"0123456789abcdefghijklmnopqrstuvwxyz\";\r\n\r\n var supportsNativeBigInt = typeof BigInt === \"function\";\r\n\r\n function Integer(v, radix, alphabet, caseSensitive) {\r\n if (typeof v === \"undefined\") return Integer[0];\r\n if (typeof radix !== \"undefined\") return +radix === 10 && !alphabet ? parseValue(v) : parseBase(v, radix, alphabet, caseSensitive);\r\n return parseValue(v);\r\n }\r\n\r\n function BigInteger(value, sign) {\r\n this.value = value;\r\n this.sign = sign;\r\n this.isSmall = false;\r\n }\r\n BigInteger.prototype = Object.create(Integer.prototype);\r\n\r\n function SmallInteger(value) {\r\n this.value = value;\r\n this.sign = value < 0;\r\n this.isSmall = true;\r\n }\r\n SmallInteger.prototype = Object.create(Integer.prototype);\r\n\r\n function NativeBigInt(value) {\r\n this.value = value;\r\n }\r\n NativeBigInt.prototype = Object.create(Integer.prototype);\r\n\r\n function isPrecise(n) {\r\n return -MAX_INT < n && n < MAX_INT;\r\n }\r\n\r\n function smallToArray(n) { // For performance reasons doesn't reference BASE, need to change this function if BASE changes\r\n if (n < 1e7)\r\n return [n];\r\n if (n < 1e14)\r\n return [n % 1e7, Math.floor(n / 1e7)];\r\n return [n % 1e7, Math.floor(n / 1e7) % 1e7, Math.floor(n / 1e14)];\r\n }\r\n\r\n function arrayToSmall(arr) { // If BASE changes this function may need to change\r\n trim(arr);\r\n var length = arr.length;\r\n if (length < 4 && compareAbs(arr, MAX_INT_ARR) < 0) {\r\n switch (length) {\r\n case 0: return 0;\r\n case 1: return arr[0];\r\n case 2: return arr[0] + arr[1] * BASE;\r\n default: return arr[0] + (arr[1] + arr[2] * BASE) * BASE;\r\n }\r\n }\r\n return arr;\r\n }\r\n\r\n function trim(v) {\r\n var i = v.length;\r\n while (v[--i] === 0);\r\n v.length = i + 1;\r\n }\r\n\r\n function createArray(length) { // function shamelessly stolen from Yaffle's library https://github.com/Yaffle/BigInteger\r\n var x = new Array(length);\r\n var i = -1;\r\n while (++i < length) {\r\n x[i] = 0;\r\n }\r\n return x;\r\n }\r\n\r\n function truncate(n) {\r\n if (n > 0) return Math.floor(n);\r\n return Math.ceil(n);\r\n }\r\n\r\n function add(a, b) { // assumes a and b are arrays with a.length >= b.length\r\n var l_a = a.length,\r\n l_b = b.length,\r\n r = new Array(l_a),\r\n carry = 0,\r\n base = BASE,\r\n sum, i;\r\n for (i = 0; i < l_b; i++) {\r\n sum = a[i] + b[i] + carry;\r\n carry = sum >= base ? 1 : 0;\r\n r[i] = sum - carry * base;\r\n }\r\n while (i < l_a) {\r\n sum = a[i] + carry;\r\n carry = sum === base ? 1 : 0;\r\n r[i++] = sum - carry * base;\r\n }\r\n if (carry > 0) r.push(carry);\r\n return r;\r\n }\r\n\r\n function addAny(a, b) {\r\n if (a.length >= b.length) return add(a, b);\r\n return add(b, a);\r\n }\r\n\r\n function addSmall(a, carry) { // assumes a is array, carry is number with 0 <= carry < MAX_INT\r\n var l = a.length,\r\n r = new Array(l),\r\n base = BASE,\r\n sum, i;\r\n for (i = 0; i < l; i++) {\r\n sum = a[i] - base + carry;\r\n carry = Math.floor(sum / base);\r\n r[i] = sum - carry * base;\r\n carry += 1;\r\n }\r\n while (carry > 0) {\r\n r[i++] = carry % base;\r\n carry = Math.floor(carry / base);\r\n }\r\n return r;\r\n }\r\n\r\n BigInteger.prototype.add = function (v) {\r\n var n = parseValue(v);\r\n if (this.sign !== n.sign) {\r\n return this.subtract(n.negate());\r\n }\r\n var a = this.value, b = n.value;\r\n if (n.isSmall) {\r\n return new BigInteger(addSmall(a, Math.abs(b)), this.sign);\r\n }\r\n return new BigInteger(addAny(a, b), this.sign);\r\n };\r\n BigInteger.prototype.plus = BigInteger.prototype.add;\r\n\r\n SmallInteger.prototype.add = function (v) {\r\n var n = parseValue(v);\r\n var a = this.value;\r\n if (a < 0 !== n.sign) {\r\n return this.subtract(n.negate());\r\n }\r\n var b = n.value;\r\n if (n.isSmall) {\r\n if (isPrecise(a + b)) return new SmallInteger(a + b);\r\n b = smallToArray(Math.abs(b));\r\n }\r\n return new BigInteger(addSmall(b, Math.abs(a)), a < 0);\r\n };\r\n SmallInteger.prototype.plus = SmallInteger.prototype.add;\r\n\r\n NativeBigInt.prototype.add = function (v) {\r\n return new NativeBigInt(this.value + parseValue(v).value);\r\n }\r\n NativeBigInt.prototype.plus = NativeBigInt.prototype.add;\r\n\r\n function subtract(a, b) { // assumes a and b are arrays with a >= b\r\n var a_l = a.length,\r\n b_l = b.length,\r\n r = new Array(a_l),\r\n borrow = 0,\r\n base = BASE,\r\n i, difference;\r\n for (i = 0; i < b_l; i++) {\r\n difference = a[i] - borrow - b[i];\r\n if (difference < 0) {\r\n difference += base;\r\n borrow = 1;\r\n } else borrow = 0;\r\n r[i] = difference;\r\n }\r\n for (i = b_l; i < a_l; i++) {\r\n difference = a[i] - borrow;\r\n if (difference < 0) difference += base;\r\n else {\r\n r[i++] = difference;\r\n break;\r\n }\r\n r[i] = difference;\r\n }\r\n for (; i < a_l; i++) {\r\n r[i] = a[i];\r\n }\r\n trim(r);\r\n return r;\r\n }\r\n\r\n function subtractAny(a, b, sign) {\r\n var value;\r\n if (compareAbs(a, b) >= 0) {\r\n value = subtract(a, b);\r\n } else {\r\n value = subtract(b, a);\r\n sign = !sign;\r\n }\r\n value = arrayToSmall(value);\r\n if (typeof value === \"number\") {\r\n if (sign) value = -value;\r\n return new SmallInteger(value);\r\n }\r\n return new BigInteger(value, sign);\r\n }\r\n\r\n function subtractSmall(a, b, sign) { // assumes a is array, b is number with 0 <= b < MAX_INT\r\n var l = a.length,\r\n r = new Array(l),\r\n carry = -b,\r\n base = BASE,\r\n i, difference;\r\n for (i = 0; i < l; i++) {\r\n difference = a[i] + carry;\r\n carry = Math.floor(difference / base);\r\n difference %= base;\r\n r[i] = difference < 0 ? difference + base : difference;\r\n }\r\n r = arrayToSmall(r);\r\n if (typeof r === \"number\") {\r\n if (sign) r = -r;\r\n return new SmallInteger(r);\r\n } return new BigInteger(r, sign);\r\n }\r\n\r\n BigInteger.prototype.subtract = function (v) {\r\n var n = parseValue(v);\r\n if (this.sign !== n.sign) {\r\n return this.add(n.negate());\r\n }\r\n var a = this.value, b = n.value;\r\n if (n.isSmall)\r\n return subtractSmall(a, Math.abs(b), this.sign);\r\n return subtractAny(a, b, this.sign);\r\n };\r\n BigInteger.prototype.minus = BigInteger.prototype.subtract;\r\n\r\n SmallInteger.prototype.subtract = function (v) {\r\n var n = parseValue(v);\r\n var a = this.value;\r\n if (a < 0 !== n.sign) {\r\n return this.add(n.negate());\r\n }\r\n var b = n.value;\r\n if (n.isSmall) {\r\n return new SmallInteger(a - b);\r\n }\r\n return subtractSmall(b, Math.abs(a), a >= 0);\r\n };\r\n SmallInteger.prototype.minus = SmallInteger.prototype.subtract;\r\n\r\n NativeBigInt.prototype.subtract = function (v) {\r\n return new NativeBigInt(this.value - parseValue(v).value);\r\n }\r\n NativeBigInt.prototype.minus = NativeBigInt.prototype.subtract;\r\n\r\n BigInteger.prototype.negate = function () {\r\n return new BigInteger(this.value, !this.sign);\r\n };\r\n SmallInteger.prototype.negate = function () {\r\n var sign = this.sign;\r\n var small = new SmallInteger(-this.value);\r\n small.sign = !sign;\r\n return small;\r\n };\r\n NativeBigInt.prototype.negate = function () {\r\n return new NativeBigInt(-this.value);\r\n }\r\n\r\n BigInteger.prototype.abs = function () {\r\n return new BigInteger(this.value, false);\r\n };\r\n SmallInteger.prototype.abs = function () {\r\n return new SmallInteger(Math.abs(this.value));\r\n };\r\n NativeBigInt.prototype.abs = function () {\r\n return new NativeBigInt(this.value >= 0 ? this.value : -this.value);\r\n }\r\n\r\n\r\n function multiplyLong(a, b) {\r\n var a_l = a.length,\r\n b_l = b.length,\r\n l = a_l + b_l,\r\n r = createArray(l),\r\n base = BASE,\r\n product, carry, i, a_i, b_j;\r\n for (i = 0; i < a_l; ++i) {\r\n a_i = a[i];\r\n for (var j = 0; j < b_l; ++j) {\r\n b_j = b[j];\r\n product = a_i * b_j + r[i + j];\r\n carry = Math.floor(product / base);\r\n r[i + j] = product - carry * base;\r\n r[i + j + 1] += carry;\r\n }\r\n }\r\n trim(r);\r\n return r;\r\n }\r\n\r\n function multiplySmall(a, b) { // assumes a is array, b is number with |b| < BASE\r\n var l = a.length,\r\n r = new Array(l),\r\n base = BASE,\r\n carry = 0,\r\n product, i;\r\n for (i = 0; i < l; i++) {\r\n product = a[i] * b + carry;\r\n carry = Math.floor(product / base);\r\n r[i] = product - carry * base;\r\n }\r\n while (carry > 0) {\r\n r[i++] = carry % base;\r\n carry = Math.floor(carry / base);\r\n }\r\n return r;\r\n }\r\n\r\n function shiftLeft(x, n) {\r\n var r = [];\r\n while (n-- > 0) r.push(0);\r\n return r.concat(x);\r\n }\r\n\r\n function multiplyKaratsuba(x, y) {\r\n var n = Math.max(x.length, y.length);\r\n\r\n if (n <= 30) return multiplyLong(x, y);\r\n n = Math.ceil(n / 2);\r\n\r\n var b = x.slice(n),\r\n a = x.slice(0, n),\r\n d = y.slice(n),\r\n c = y.slice(0, n);\r\n\r\n var ac = multiplyKaratsuba(a, c),\r\n bd = multiplyKaratsuba(b, d),\r\n abcd = multiplyKaratsuba(addAny(a, b), addAny(c, d));\r\n\r\n var product = addAny(addAny(ac, shiftLeft(subtract(subtract(abcd, ac), bd), n)), shiftLeft(bd, 2 * n));\r\n trim(product);\r\n return product;\r\n }\r\n\r\n // The following function is derived from a surface fit of a graph plotting the performance difference\r\n // between long multiplication and karatsuba multiplication versus the lengths of the two arrays.\r\n function useKaratsuba(l1, l2) {\r\n return -0.012 * l1 - 0.012 * l2 + 0.000015 * l1 * l2 > 0;\r\n }\r\n\r\n BigInteger.prototype.multiply = function (v) {\r\n var n = parseValue(v),\r\n a = this.value, b = n.value,\r\n sign = this.sign !== n.sign,\r\n abs;\r\n if (n.isSmall) {\r\n if (b === 0) return Integer[0];\r\n if (b === 1) return this;\r\n if (b === -1) return this.negate();\r\n abs = Math.abs(b);\r\n if (abs < BASE) {\r\n return new BigInteger(multiplySmall(a, abs), sign);\r\n }\r\n b = smallToArray(abs);\r\n }\r\n if (useKaratsuba(a.length, b.length)) // Karatsuba is only faster for certain array sizes\r\n return new BigInteger(multiplyKaratsuba(a, b), sign);\r\n return new BigInteger(multiplyLong(a, b), sign);\r\n };\r\n\r\n BigInteger.prototype.times = BigInteger.prototype.multiply;\r\n\r\n function multiplySmallAndArray(a, b, sign) { // a >= 0\r\n if (a < BASE) {\r\n return new BigInteger(multiplySmall(b, a), sign);\r\n }\r\n return new BigInteger(multiplyLong(b, smallToArray(a)), sign);\r\n }\r\n SmallInteger.prototype._multiplyBySmall = function (a) {\r\n if (isPrecise(a.value * this.value)) {\r\n return new SmallInteger(a.value * this.value);\r\n }\r\n return multiplySmallAndArray(Math.abs(a.value), smallToArray(Math.abs(this.value)), this.sign !== a.sign);\r\n };\r\n BigInteger.prototype._multiplyBySmall = function (a) {\r\n if (a.value === 0) return Integer[0];\r\n if (a.value === 1) return this;\r\n if (a.value === -1) return this.negate();\r\n return multiplySmallAndArray(Math.abs(a.value), this.value, this.sign !== a.sign);\r\n };\r\n SmallInteger.prototype.multiply = function (v) {\r\n return parseValue(v)._multiplyBySmall(this);\r\n };\r\n SmallInteger.prototype.times = SmallInteger.prototype.multiply;\r\n\r\n NativeBigInt.prototype.multiply = function (v) {\r\n return new NativeBigInt(this.value * parseValue(v).value);\r\n }\r\n NativeBigInt.prototype.times = NativeBigInt.prototype.multiply;\r\n\r\n function square(a) {\r\n //console.assert(2 * BASE * BASE < MAX_INT);\r\n var l = a.length,\r\n r = createArray(l + l),\r\n base = BASE,\r\n product, carry, i, a_i, a_j;\r\n for (i = 0; i < l; i++) {\r\n a_i = a[i];\r\n carry = 0 - a_i * a_i;\r\n for (var j = i; j < l; j++) {\r\n a_j = a[j];\r\n product = 2 * (a_i * a_j) + r[i + j] + carry;\r\n carry = Math.floor(product / base);\r\n r[i + j] = product - carry * base;\r\n }\r\n r[i + l] = carry;\r\n }\r\n trim(r);\r\n return r;\r\n }\r\n\r\n BigInteger.prototype.square = function () {\r\n return new BigInteger(square(this.value), false);\r\n };\r\n\r\n SmallInteger.prototype.square = function () {\r\n var value = this.value * this.value;\r\n if (isPrecise(value)) return new SmallInteger(value);\r\n return new BigInteger(square(smallToArray(Math.abs(this.value))), false);\r\n };\r\n\r\n NativeBigInt.prototype.square = function (v) {\r\n return new NativeBigInt(this.value * this.value);\r\n }\r\n\r\n function divMod1(a, b) { // Left over from previous version. Performs faster than divMod2 on smaller input sizes.\r\n var a_l = a.length,\r\n b_l = b.length,\r\n base = BASE,\r\n result = createArray(b.length),\r\n divisorMostSignificantDigit = b[b_l - 1],\r\n // normalization\r\n lambda = Math.ceil(base / (2 * divisorMostSignificantDigit)),\r\n remainder = multiplySmall(a, lambda),\r\n divisor = multiplySmall(b, lambda),\r\n quotientDigit, shift, carry, borrow, i, l, q;\r\n if (remainder.length <= a_l) remainder.push(0);\r\n divisor.push(0);\r\n divisorMostSignificantDigit = divisor[b_l - 1];\r\n for (shift = a_l - b_l; shift >= 0; shift--) {\r\n quotientDigit = base - 1;\r\n if (remainder[shift + b_l] !== divisorMostSignificantDigit) {\r\n quotientDigit = Math.floor((remainder[shift + b_l] * base + remainder[shift + b_l - 1]) / divisorMostSignificantDigit);\r\n }\r\n // quotientDigit <= base - 1\r\n carry = 0;\r\n borrow = 0;\r\n l = divisor.length;\r\n for (i = 0; i < l; i++) {\r\n carry += quotientDigit * divisor[i];\r\n q = Math.floor(carry / base);\r\n borrow += remainder[shift + i] - (carry - q * base);\r\n carry = q;\r\n if (borrow < 0) {\r\n remainder[shift + i] = borrow + base;\r\n borrow = -1;\r\n } else {\r\n remainder[shift + i] = borrow;\r\n borrow = 0;\r\n }\r\n }\r\n while (borrow !== 0) {\r\n quotientDigit -= 1;\r\n carry = 0;\r\n for (i = 0; i < l; i++) {\r\n carry += remainder[shift + i] - base + divisor[i];\r\n if (carry < 0) {\r\n remainder[shift + i] = carry + base;\r\n carry = 0;\r\n } else {\r\n remainder[shift + i] = carry;\r\n carry = 1;\r\n }\r\n }\r\n borrow += carry;\r\n }\r\n result[shift] = quotientDigit;\r\n }\r\n // denormalization\r\n remainder = divModSmall(remainder, lambda)[0];\r\n return [arrayToSmall(result), arrayToSmall(remainder)];\r\n }\r\n\r\n function divMod2(a, b) { // Implementation idea shamelessly stolen from Silent Matt's library http://silentmatt.com/biginteger/\r\n // Performs faster than divMod1 on larger input sizes.\r\n var a_l = a.length,\r\n b_l = b.length,\r\n result = [],\r\n part = [],\r\n base = BASE,\r\n guess, xlen, highx, highy, check;\r\n while (a_l) {\r\n part.unshift(a[--a_l]);\r\n trim(part);\r\n if (compareAbs(part, b) < 0) {\r\n result.push(0);\r\n continue;\r\n }\r\n xlen = part.length;\r\n highx = part[xlen - 1] * base + part[xlen - 2];\r\n highy = b[b_l - 1] * base + b[b_l - 2];\r\n if (xlen > b_l) {\r\n highx = (highx + 1) * base;\r\n }\r\n guess = Math.ceil(highx / highy);\r\n do {\r\n check = multiplySmall(b, guess);\r\n if (compareAbs(check, part) <= 0) break;\r\n guess--;\r\n } while (guess);\r\n result.push(guess);\r\n part = subtract(part, check);\r\n }\r\n result.reverse();\r\n return [arrayToSmall(result), arrayToSmall(part)];\r\n }\r\n\r\n function divModSmall(value, lambda) {\r\n var length = value.length,\r\n quotient = createArray(length),\r\n base = BASE,\r\n i, q, remainder, divisor;\r\n remainder = 0;\r\n for (i = length - 1; i >= 0; --i) {\r\n divisor = remainder * base + value[i];\r\n q = truncate(divisor / lambda);\r\n remainder = divisor - q * lambda;\r\n quotient[i] = q | 0;\r\n }\r\n return [quotient, remainder | 0];\r\n }\r\n\r\n function divModAny(self, v) {\r\n var value, n = parseValue(v);\r\n if (supportsNativeBigInt) {\r\n return [new NativeBigInt(self.value / n.value), new NativeBigInt(self.value % n.value)];\r\n }\r\n var a = self.value, b = n.value;\r\n var quotient;\r\n if (b === 0) throw new Error(\"Cannot divide by zero\");\r\n if (self.isSmall) {\r\n if (n.isSmall) {\r\n return [new SmallInteger(truncate(a / b)), new SmallInteger(a % b)];\r\n }\r\n return [Integer[0], self];\r\n }\r\n if (n.isSmall) {\r\n if (b === 1) return [self, Integer[0]];\r\n if (b == -1) return [self.negate(), Integer[0]];\r\n var abs = Math.abs(b);\r\n if (abs < BASE) {\r\n value = divModSmall(a, abs);\r\n quotient = arrayToSmall(value[0]);\r\n var remainder = value[1];\r\n if (self.sign) remainder = -remainder;\r\n if (typeof quotient === \"number\") {\r\n if (self.sign !== n.sign) quotient = -quotient;\r\n return [new SmallInteger(quotient), new SmallInteger(remainder)];\r\n }\r\n return [new BigInteger(quotient, self.sign !== n.sign), new SmallInteger(remainder)];\r\n }\r\n b = smallToArray(abs);\r\n }\r\n var comparison = compareAbs(a, b);\r\n if (comparison === -1) return [Integer[0], self];\r\n if (comparison === 0) return [Integer[self.sign === n.sign ? 1 : -1], Integer[0]];\r\n\r\n // divMod1 is faster on smaller input sizes\r\n if (a.length + b.length <= 200)\r\n value = divMod1(a, b);\r\n else value = divMod2(a, b);\r\n\r\n quotient = value[0];\r\n var qSign = self.sign !== n.sign,\r\n mod = value[1],\r\n mSign = self.sign;\r\n if (typeof quotient === \"number\") {\r\n if (qSign) quotient = -quotient;\r\n quotient = new SmallInteger(quotient);\r\n } else quotient = new BigInteger(quotient, qSign);\r\n if (typeof mod === \"number\") {\r\n if (mSign) mod = -mod;\r\n mod = new SmallInteger(mod);\r\n } else mod = new BigInteger(mod, mSign);\r\n return [quotient, mod];\r\n }\r\n\r\n BigInteger.prototype.divmod = function (v) {\r\n var result = divModAny(this, v);\r\n return {\r\n quotient: result[0],\r\n remainder: result[1]\r\n };\r\n };\r\n NativeBigInt.prototype.divmod = SmallInteger.prototype.divmod = BigInteger.prototype.divmod;\r\n\r\n\r\n BigInteger.prototype.divide = function (v) {\r\n return divModAny(this, v)[0];\r\n };\r\n NativeBigInt.prototype.over = NativeBigInt.prototype.divide = function (v) {\r\n return new NativeBigInt(this.value / parseValue(v).value);\r\n };\r\n SmallInteger.prototype.over = SmallInteger.prototype.divide = BigInteger.prototype.over = BigInteger.prototype.divide;\r\n\r\n BigInteger.prototype.mod = function (v) {\r\n return divModAny(this, v)[1];\r\n };\r\n NativeBigInt.prototype.mod = NativeBigInt.prototype.remainder = function (v) {\r\n return new NativeBigInt(this.value % parseValue(v).value);\r\n };\r\n SmallInteger.prototype.remainder = SmallInteger.prototype.mod = BigInteger.prototype.remainder = BigInteger.prototype.mod;\r\n\r\n BigInteger.prototype.pow = function (v) {\r\n var n = parseValue(v),\r\n a = this.value,\r\n b = n.value,\r\n value, x, y;\r\n if (b === 0) return Integer[1];\r\n if (a === 0) return Integer[0];\r\n if (a === 1) return Integer[1];\r\n if (a === -1) return n.isEven() ? Integer[1] : Integer[-1];\r\n if (n.sign) {\r\n return Integer[0];\r\n }\r\n if (!n.isSmall) throw new Error(\"The exponent \" + n.toString() + \" is too large.\");\r\n if (this.isSmall) {\r\n if (isPrecise(value = Math.pow(a, b)))\r\n return new SmallInteger(truncate(value));\r\n }\r\n x = this;\r\n y = Integer[1];\r\n while (true) {\r\n if (b & 1 === 1) {\r\n y = y.times(x);\r\n --b;\r\n }\r\n if (b === 0) break;\r\n b /= 2;\r\n x = x.square();\r\n }\r\n return y;\r\n };\r\n SmallInteger.prototype.pow = BigInteger.prototype.pow;\r\n\r\n NativeBigInt.prototype.pow = function (v) {\r\n var n = parseValue(v);\r\n var a = this.value, b = n.value;\r\n var _0 = BigInt(0), _1 = BigInt(1), _2 = BigInt(2);\r\n if (b === _0) return Integer[1];\r\n if (a === _0) return Integer[0];\r\n if (a === _1) return Integer[1];\r\n if (a === BigInt(-1)) return n.isEven() ? Integer[1] : Integer[-1];\r\n if (n.isNegative()) return new NativeBigInt(_0);\r\n var x = this;\r\n var y = Integer[1];\r\n while (true) {\r\n if ((b & _1) === _1) {\r\n y = y.times(x);\r\n --b;\r\n }\r\n if (b === _0) break;\r\n b /= _2;\r\n x = x.square();\r\n }\r\n return y;\r\n }\r\n\r\n BigInteger.prototype.modPow = function (exp, mod) {\r\n exp = parseValue(exp);\r\n mod = parseValue(mod);\r\n if (mod.isZero()) throw new Error(\"Cannot take modPow with modulus 0\");\r\n var r = Integer[1],\r\n base = this.mod(mod);\r\n if (exp.isNegative()) {\r\n exp = exp.multiply(Integer[-1]);\r\n base = base.modInv(mod);\r\n }\r\n while (exp.isPositive()) {\r\n if (base.isZero()) return Integer[0];\r\n if (exp.isOdd()) r = r.multiply(base).mod(mod);\r\n exp = exp.divide(2);\r\n base = base.square().mod(mod);\r\n }\r\n return r;\r\n };\r\n NativeBigInt.prototype.modPow = SmallInteger.prototype.modPow = BigInteger.prototype.modPow;\r\n\r\n function compareAbs(a, b) {\r\n if (a.length !== b.length) {\r\n return a.length > b.length ? 1 : -1;\r\n }\r\n for (var i = a.length - 1; i >= 0; i--) {\r\n if (a[i] !== b[i]) return a[i] > b[i] ? 1 : -1;\r\n }\r\n return 0;\r\n }\r\n\r\n BigInteger.prototype.compareAbs = function (v) {\r\n var n = parseValue(v),\r\n a = this.value,\r\n b = n.value;\r\n if (n.isSmall) return 1;\r\n return compareAbs(a, b);\r\n };\r\n SmallInteger.prototype.compareAbs = function (v) {\r\n var n = parseValue(v),\r\n a = Math.abs(this.value),\r\n b = n.value;\r\n if (n.isSmall) {\r\n b = Math.abs(b);\r\n return a === b ? 0 : a > b ? 1 : -1;\r\n }\r\n return -1;\r\n };\r\n NativeBigInt.prototype.compareAbs = function (v) {\r\n var a = this.value;\r\n var b = parseValue(v).value;\r\n a = a >= 0 ? a : -a;\r\n b = b >= 0 ? b : -b;\r\n return a === b ? 0 : a > b ? 1 : -1;\r\n }\r\n\r\n BigInteger.prototype.compare = function (v) {\r\n // See discussion about comparison with Infinity:\r\n // https://github.com/peterolson/BigInteger.js/issues/61\r\n if (v === Infinity) {\r\n return -1;\r\n }\r\n if (v === -Infinity) {\r\n return 1;\r\n }\r\n\r\n var n = parseValue(v),\r\n a = this.value,\r\n b = n.value;\r\n if (this.sign !== n.sign) {\r\n return n.sign ? 1 : -1;\r\n }\r\n if (n.isSmall) {\r\n return this.sign ? -1 : 1;\r\n }\r\n return compareAbs(a, b) * (this.sign ? -1 : 1);\r\n };\r\n BigInteger.prototype.compareTo = BigInteger.prototype.compare;\r\n\r\n SmallInteger.prototype.compare = function (v) {\r\n if (v === Infinity) {\r\n return -1;\r\n }\r\n if (v === -Infinity) {\r\n return 1;\r\n }\r\n\r\n var n = parseValue(v),\r\n a = this.value,\r\n b = n.value;\r\n if (n.isSmall) {\r\n return a == b ? 0 : a > b ? 1 : -1;\r\n }\r\n if (a < 0 !== n.sign) {\r\n return a < 0 ? -1 : 1;\r\n }\r\n return a < 0 ? 1 : -1;\r\n };\r\n SmallInteger.prototype.compareTo = SmallInteger.prototype.compare;\r\n\r\n NativeBigInt.prototype.compare = function (v) {\r\n if (v === Infinity) {\r\n return -1;\r\n }\r\n if (v === -Infinity) {\r\n return 1;\r\n }\r\n var a = this.value;\r\n var b = parseValue(v).value;\r\n return a === b ? 0 : a > b ? 1 : -1;\r\n }\r\n NativeBigInt.prototype.compareTo = NativeBigInt.prototype.compare;\r\n\r\n BigInteger.prototype.equals = function (v) {\r\n return this.compare(v) === 0;\r\n };\r\n NativeBigInt.prototype.eq = NativeBigInt.prototype.equals = SmallInteger.prototype.eq = SmallInteger.prototype.equals = BigInteger.prototype.eq = BigInteger.prototype.equals;\r\n\r\n BigInteger.prototype.notEquals = function (v) {\r\n return this.compare(v) !== 0;\r\n };\r\n NativeBigInt.prototype.neq = NativeBigInt.prototype.notEquals = SmallInteger.prototype.neq = SmallInteger.prototype.notEquals = BigInteger.prototype.neq = BigInteger.prototype.notEquals;\r\n\r\n BigInteger.prototype.greater = function (v) {\r\n return this.compare(v) > 0;\r\n };\r\n NativeBigInt.prototype.gt = NativeBigInt.prototype.greater = SmallInteger.prototype.gt = SmallInteger.prototype.greater = BigInteger.prototype.gt = BigInteger.prototype.greater;\r\n\r\n BigInteger.prototype.lesser = function (v) {\r\n return this.compare(v) < 0;\r\n };\r\n NativeBigInt.prototype.lt = NativeBigInt.prototype.lesser = SmallInteger.prototype.lt = SmallInteger.prototype.lesser = BigInteger.prototype.lt = BigInteger.prototype.lesser;\r\n\r\n BigInteger.prototype.greaterOrEquals = function (v) {\r\n return this.compare(v) >= 0;\r\n };\r\n NativeBigInt.prototype.geq = NativeBigInt.prototype.greaterOrEquals = SmallInteger.prototype.geq = SmallInteger.prototype.greaterOrEquals = BigInteger.prototype.geq = BigInteger.prototype.greaterOrEquals;\r\n\r\n BigInteger.prototype.lesserOrEquals = function (v) {\r\n return this.compare(v) <= 0;\r\n };\r\n NativeBigInt.prototype.leq = NativeBigInt.prototype.lesserOrEquals = SmallInteger.prototype.leq = SmallInteger.prototype.lesserOrEquals = BigInteger.prototype.leq = BigInteger.prototype.lesserOrEquals;\r\n\r\n BigInteger.prototype.isEven = function () {\r\n return (this.value[0] & 1) === 0;\r\n };\r\n SmallInteger.prototype.isEven = function () {\r\n return (this.value & 1) === 0;\r\n };\r\n NativeBigInt.prototype.isEven = function () {\r\n return (this.value & BigInt(1)) === BigInt(0);\r\n }\r\n\r\n BigInteger.prototype.isOdd = function () {\r\n return (this.value[0] & 1) === 1;\r\n };\r\n SmallInteger.prototype.isOdd = function () {\r\n return (this.value & 1) === 1;\r\n };\r\n NativeBigInt.prototype.isOdd = function () {\r\n return (this.value & BigInt(1)) === BigInt(1);\r\n }\r\n\r\n BigInteger.prototype.isPositive = function () {\r\n return !this.sign;\r\n };\r\n SmallInteger.prototype.isPositive = function () {\r\n return this.value > 0;\r\n };\r\n NativeBigInt.prototype.isPositive = SmallInteger.prototype.isPositive;\r\n\r\n BigInteger.prototype.isNegative = function () {\r\n return this.sign;\r\n };\r\n SmallInteger.prototype.isNegative = function () {\r\n return this.value < 0;\r\n };\r\n NativeBigInt.prototype.isNegative = SmallInteger.prototype.isNegative;\r\n\r\n BigInteger.prototype.isUnit = function () {\r\n return false;\r\n };\r\n SmallInteger.prototype.isUnit = function () {\r\n return Math.abs(this.value) === 1;\r\n };\r\n NativeBigInt.prototype.isUnit = function () {\r\n return this.abs().value === BigInt(1);\r\n }\r\n\r\n BigInteger.prototype.isZero = function () {\r\n return false;\r\n };\r\n SmallInteger.prototype.isZero = function () {\r\n return this.value === 0;\r\n };\r\n NativeBigInt.prototype.isZero = function () {\r\n return this.value === BigInt(0);\r\n }\r\n\r\n BigInteger.prototype.isDivisibleBy = function (v) {\r\n var n = parseValue(v);\r\n if (n.isZero()) return false;\r\n if (n.isUnit()) return true;\r\n if (n.compareAbs(2) === 0) return this.isEven();\r\n return this.mod(n).isZero();\r\n };\r\n NativeBigInt.prototype.isDivisibleBy = SmallInteger.prototype.isDivisibleBy = BigInteger.prototype.isDivisibleBy;\r\n\r\n function isBasicPrime(v) {\r\n var n = v.abs();\r\n if (n.isUnit()) return false;\r\n if (n.equals(2) || n.equals(3) || n.equals(5)) return true;\r\n if (n.isEven() || n.isDivisibleBy(3) || n.isDivisibleBy(5)) return false;\r\n if (n.lesser(49)) return true;\r\n // we don't know if it's prime: let the other functions figure it out\r\n }\r\n\r\n function millerRabinTest(n, a) {\r\n var nPrev = n.prev(),\r\n b = nPrev,\r\n r = 0,\r\n d, t, i, x;\r\n while (b.isEven()) b = b.divide(2), r++;\r\n next: for (i = 0; i < a.length; i++) {\r\n if (n.lesser(a[i])) continue;\r\n x = bigInt(a[i]).modPow(b, n);\r\n if (x.isUnit() || x.equals(nPrev)) continue;\r\n for (d = r - 1; d != 0; d--) {\r\n x = x.square().mod(n);\r\n if (x.isUnit()) return false;\r\n if (x.equals(nPrev)) continue next;\r\n }\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n // Set \"strict\" to true to force GRH-supported lower bound of 2*log(N)^2\r\n BigInteger.prototype.isPrime = function (strict) {\r\n var isPrime = isBasicPrime(this);\r\n if (isPrime !== undefined) return isPrime;\r\n var n = this.abs();\r\n var bits = n.bitLength();\r\n if (bits <= 64)\r\n return millerRabinTest(n, [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37]);\r\n var logN = Math.log(2) * bits.toJSNumber();\r\n var t = Math.ceil((strict === true) ? (2 * Math.pow(logN, 2)) : logN);\r\n for (var a = [], i = 0; i < t; i++) {\r\n a.push(bigInt(i + 2));\r\n }\r\n return millerRabinTest(n, a);\r\n };\r\n NativeBigInt.prototype.isPrime = SmallInteger.prototype.isPrime = BigInteger.prototype.isPrime;\r\n\r\n BigInteger.prototype.isProbablePrime = function (iterations, rng) {\r\n var isPrime = isBasicPrime(this);\r\n if (isPrime !== undefined) return isPrime;\r\n var n = this.abs();\r\n var t = iterations === undefined ? 5 : iterations;\r\n for (var a = [], i = 0; i < t; i++) {\r\n a.push(bigInt.randBetween(2, n.minus(2), rng));\r\n }\r\n return millerRabinTest(n, a);\r\n };\r\n NativeBigInt.prototype.isProbablePrime = SmallInteger.prototype.isProbablePrime = BigInteger.prototype.isProbablePrime;\r\n\r\n BigInteger.prototype.modInv = function (n) {\r\n var t = bigInt.zero, newT = bigInt.one, r = parseValue(n), newR = this.abs(), q, lastT, lastR;\r\n while (!newR.isZero()) {\r\n q = r.divide(newR);\r\n lastT = t;\r\n lastR = r;\r\n t = newT;\r\n r = newR;\r\n newT = lastT.subtract(q.multiply(newT));\r\n newR = lastR.subtract(q.multiply(newR));\r\n }\r\n if (!r.isUnit()) throw new Error(this.toString() + \" and \" + n.toString() + \" are not co-prime\");\r\n if (t.compare(0) === -1) {\r\n t = t.add(n);\r\n }\r\n if (this.isNegative()) {\r\n return t.negate();\r\n }\r\n return t;\r\n };\r\n\r\n NativeBigInt.prototype.modInv = SmallInteger.prototype.modInv = BigInteger.prototype.modInv;\r\n\r\n BigInteger.prototype.next = function () {\r\n var value = this.value;\r\n if (this.sign) {\r\n return subtractSmall(value, 1, this.sign);\r\n }\r\n return new BigInteger(addSmall(value, 1), this.sign);\r\n };\r\n SmallInteger.prototype.next = function () {\r\n var value = this.value;\r\n if (value + 1 < MAX_INT) return new SmallInteger(value + 1);\r\n return new BigInteger(MAX_INT_ARR, false);\r\n };\r\n NativeBigInt.prototype.next = function () {\r\n return new NativeBigInt(this.value + BigInt(1));\r\n }\r\n\r\n BigInteger.prototype.prev = function () {\r\n var value = this.value;\r\n if (this.sign) {\r\n return new BigInteger(addSmall(value, 1), true);\r\n }\r\n return subtractSmall(value, 1, this.sign);\r\n };\r\n SmallInteger.prototype.prev = function () {\r\n var value = this.value;\r\n if (value - 1 > -MAX_INT) return new SmallInteger(value - 1);\r\n return new BigInteger(MAX_INT_ARR, true);\r\n };\r\n NativeBigInt.prototype.prev = function () {\r\n return new NativeBigInt(this.value - BigInt(1));\r\n }\r\n\r\n var powersOfTwo = [1];\r\n while (2 * powersOfTwo[powersOfTwo.length - 1] <= BASE) powersOfTwo.push(2 * powersOfTwo[powersOfTwo.length - 1]);\r\n var powers2Length = powersOfTwo.length, highestPower2 = powersOfTwo[powers2Length - 1];\r\n\r\n function shift_isSmall(n) {\r\n return Math.abs(n) <= BASE;\r\n }\r\n\r\n BigInteger.prototype.shiftLeft = function (v) {\r\n var n = parseValue(v).toJSNumber();\r\n if (!shift_isSmall(n)) {\r\n throw new Error(String(n) + \" is too large for shifting.\");\r\n }\r\n if (n < 0) return this.shiftRight(-n);\r\n var result = this;\r\n if (result.isZero()) return result;\r\n while (n >= powers2Length) {\r\n result = result.multiply(highestPower2);\r\n n -= powers2Length - 1;\r\n }\r\n return result.multiply(powersOfTwo[n]);\r\n };\r\n NativeBigInt.prototype.shiftLeft = SmallInteger.prototype.shiftLeft = BigInteger.prototype.shiftLeft;\r\n\r\n BigInteger.prototype.shiftRight = function (v) {\r\n var remQuo;\r\n var n = parseValue(v).toJSNumber();\r\n if (!shift_isSmall(n)) {\r\n throw new Error(String(n) + \" is too large for shifting.\");\r\n }\r\n if (n < 0) return this.shiftLeft(-n);\r\n var result = this;\r\n while (n >= powers2Length) {\r\n if (result.isZero() || (result.isNegative() && result.isUnit())) return result;\r\n remQuo = divModAny(result, highestPower2);\r\n result = remQuo[1].isNegative() ? remQuo[0].prev() : remQuo[0];\r\n n -= powers2Length - 1;\r\n }\r\n remQuo = divModAny(result, powersOfTwo[n]);\r\n return remQuo[1].isNegative() ? remQuo[0].prev() : remQuo[0];\r\n };\r\n NativeBigInt.prototype.shiftRight = SmallInteger.prototype.shiftRight = BigInteger.prototype.shiftRight;\r\n\r\n function bitwise(x, y, fn) {\r\n y = parseValue(y);\r\n var xSign = x.isNegative(), ySign = y.isNegative();\r\n var xRem = xSign ? x.not() : x,\r\n yRem = ySign ? y.not() : y;\r\n var xDigit = 0, yDigit = 0;\r\n var xDivMod = null, yDivMod = null;\r\n var result = [];\r\n while (!xRem.isZero() || !yRem.isZero()) {\r\n xDivMod = divModAny(xRem, highestPower2);\r\n xDigit = xDivMod[1].toJSNumber();\r\n if (xSign) {\r\n xDigit = highestPower2 - 1 - xDigit; // two's complement for negative numbers\r\n }\r\n\r\n yDivMod = divModAny(yRem, highestPower2);\r\n yDigit = yDivMod[1].toJSNumber();\r\n if (ySign) {\r\n yDigit = highestPower2 - 1 - yDigit; // two's complement for negative numbers\r\n }\r\n\r\n xRem = xDivMod[0];\r\n yRem = yDivMod[0];\r\n result.push(fn(xDigit, yDigit));\r\n }\r\n var sum = fn(xSign ? 1 : 0, ySign ? 1 : 0) !== 0 ? bigInt(-1) : bigInt(0);\r\n for (var i = result.length - 1; i >= 0; i -= 1) {\r\n sum = sum.multiply(highestPower2).add(bigInt(result[i]));\r\n }\r\n return sum;\r\n }\r\n\r\n BigInteger.prototype.not = function () {\r\n return this.negate().prev();\r\n };\r\n NativeBigInt.prototype.not = SmallInteger.prototype.not = BigInteger.prototype.not;\r\n\r\n BigInteger.prototype.and = function (n) {\r\n return bitwise(this, n, function (a, b) { return a & b; });\r\n };\r\n NativeBigInt.prototype.and = SmallInteger.prototype.and = BigInteger.prototype.and;\r\n\r\n BigInteger.prototype.or = function (n) {\r\n return bitwise(this, n, function (a, b) { return a | b; });\r\n };\r\n NativeBigInt.prototype.or = SmallInteger.prototype.or = BigInteger.prototype.or;\r\n\r\n BigInteger.prototype.xor = function (n) {\r\n return bitwise(this, n, function (a, b) { return a ^ b; });\r\n };\r\n NativeBigInt.prototype.xor = SmallInteger.prototype.xor = BigInteger.prototype.xor;\r\n\r\n var LOBMASK_I = 1 << 30, LOBMASK_BI = (BASE & -BASE) * (BASE & -BASE) | LOBMASK_I;\r\n function roughLOB(n) { // get lowestOneBit (rough)\r\n // SmallInteger: return Min(lowestOneBit(n), 1 << 30)\r\n // BigInteger: return Min(lowestOneBit(n), 1 << 14) [BASE=1e7]\r\n var v = n.value,\r\n x = typeof v === \"number\" ? v | LOBMASK_I :\r\n typeof v === \"bigint\" ? v | BigInt(LOBMASK_I) :\r\n v[0] + v[1] * BASE | LOBMASK_BI;\r\n return x & -x;\r\n }\r\n\r\n function integerLogarithm(value, base) {\r\n if (base.compareTo(value) <= 0) {\r\n var tmp = integerLogarithm(value, base.square(base));\r\n var p = tmp.p;\r\n var e = tmp.e;\r\n var t = p.multiply(base);\r\n return t.compareTo(value) <= 0 ? { p: t, e: e * 2 + 1 } : { p: p, e: e * 2 };\r\n }\r\n return { p: bigInt(1), e: 0 };\r\n }\r\n\r\n BigInteger.prototype.bitLength = function () {\r\n var n = this;\r\n if (n.compareTo(bigInt(0)) < 0) {\r\n n = n.negate().subtract(bigInt(1));\r\n }\r\n if (n.compareTo(bigInt(0)) === 0) {\r\n return bigInt(0);\r\n }\r\n return bigInt(integerLogarithm(n, bigInt(2)).e).add(bigInt(1));\r\n }\r\n NativeBigInt.prototype.bitLength = SmallInteger.prototype.bitLength = BigInteger.prototype.bitLength;\r\n\r\n function max(a, b) {\r\n a = parseValue(a);\r\n b = parseValue(b);\r\n return a.greater(b) ? a : b;\r\n }\r\n function min(a, b) {\r\n a = parseValue(a);\r\n b = parseValue(b);\r\n return a.lesser(b) ? a : b;\r\n }\r\n function gcd(a, b) {\r\n a = parseValue(a).abs();\r\n b = parseValue(b).abs();\r\n if (a.equals(b)) return a;\r\n if (a.isZero()) return b;\r\n if (b.isZero()) return a;\r\n var c = Integer[1], d, t;\r\n while (a.isEven() && b.isEven()) {\r\n d = min(roughLOB(a), roughLOB(b));\r\n a = a.divide(d);\r\n b = b.divide(d);\r\n c = c.multiply(d);\r\n }\r\n while (a.isEven()) {\r\n a = a.divide(roughLOB(a));\r\n }\r\n do {\r\n while (b.isEven()) {\r\n b = b.divide(roughLOB(b));\r\n }\r\n if (a.greater(b)) {\r\n t = b; b = a; a = t;\r\n }\r\n b = b.subtract(a);\r\n } while (!b.isZero());\r\n return c.isUnit() ? a : a.multiply(c);\r\n }\r\n function lcm(a, b) {\r\n a = parseValue(a).abs();\r\n b = parseValue(b).abs();\r\n return a.divide(gcd(a, b)).multiply(b);\r\n }\r\n function randBetween(a, b, rng) {\r\n a = parseValue(a);\r\n b = parseValue(b);\r\n var usedRNG = rng || Math.random;\r\n var low = min(a, b), high = max(a, b);\r\n var range = high.subtract(low).add(1);\r\n if (range.isSmall) return low.add(Math.floor(usedRNG() * range));\r\n var digits = toBase(range, BASE).value;\r\n var result = [], restricted = true;\r\n for (var i = 0; i < digits.length; i++) {\r\n var top = restricted ? digits[i] : BASE;\r\n var digit = truncate(usedRNG() * top);\r\n result.push(digit);\r\n if (digit < top) restricted = false;\r\n }\r\n return low.add(Integer.fromArray(result, BASE, false));\r\n }\r\n\r\n var parseBase = function (text, base, alphabet, caseSensitive) {\r\n alphabet = alphabet || DEFAULT_ALPHABET;\r\n text = String(text);\r\n if (!caseSensitive) {\r\n text = text.toLowerCase();\r\n alphabet = alphabet.toLowerCase();\r\n }\r\n var length = text.length;\r\n var i;\r\n var absBase = Math.abs(base);\r\n var alphabetValues = {};\r\n for (i = 0; i < alphabet.length; i++) {\r\n alphabetValues[alphabet[i]] = i;\r\n }\r\n for (i = 0; i < length; i++) {\r\n var c = text[i];\r\n if (c === \"-\") continue;\r\n if (c in alphabetValues) {\r\n if (alphabetValues[c] >= absBase) {\r\n if (c === \"1\" && absBase === 1) continue;\r\n throw new Error(c + \" is not a valid digit in base \" + base + \".\");\r\n }\r\n }\r\n }\r\n base = parseValue(base);\r\n var digits = [];\r\n var isNegative = text[0] === \"-\";\r\n for (i = isNegative ? 1 : 0; i < text.length; i++) {\r\n var c = text[i];\r\n if (c in alphabetValues) digits.push(parseValue(alphabetValues[c]));\r\n else if (c === \"<\") {\r\n var start = i;\r\n do { i++; } while (text[i] !== \">\" && i < text.length);\r\n digits.push(parseValue(text.slice(start + 1, i)));\r\n }\r\n else throw new Error(c + \" is not a valid character\");\r\n }\r\n return parseBaseFromArray(digits, base, isNegative);\r\n };\r\n\r\n function parseBaseFromArray(digits, base, isNegative) {\r\n var val = Integer[0], pow = Integer[1], i;\r\n for (i = digits.length - 1; i >= 0; i--) {\r\n val = val.add(digits[i].times(pow));\r\n pow = pow.times(base);\r\n }\r\n return isNegative ? val.negate() : val;\r\n }\r\n\r\n function stringify(digit, alphabet) {\r\n alphabet = alphabet || DEFAULT_ALPHABET;\r\n if (digit < alphabet.length) {\r\n return alphabet[digit];\r\n }\r\n return \"<\" + digit + \">\";\r\n }\r\n\r\n function toBase(n, base) {\r\n base = bigInt(base);\r\n if (base.isZero()) {\r\n if (n.isZero()) return { value: [0], isNegative: false };\r\n throw new Error(\"Cannot convert nonzero numbers to base 0.\");\r\n }\r\n if (base.equals(-1)) {\r\n if (n.isZero()) return { value: [0], isNegative: false };\r\n if (n.isNegative())\r\n return {\r\n value: [].concat.apply([], Array.apply(null, Array(-n.toJSNumber()))\r\n .map(Array.prototype.valueOf, [1, 0])\r\n ),\r\n isNegative: false\r\n };\r\n\r\n var arr = Array.apply(null, Array(n.toJSNumber() - 1))\r\n .map(Array.prototype.valueOf, [0, 1]);\r\n arr.unshift([1]);\r\n return {\r\n value: [].concat.apply([], arr),\r\n isNegative: false\r\n };\r\n }\r\n\r\n var neg = false;\r\n if (n.isNegative() && base.isPositive()) {\r\n neg = true;\r\n n = n.abs();\r\n }\r\n if (base.isUnit()) {\r\n if (n.isZero()) return { value: [0], isNegative: false };\r\n\r\n return {\r\n value: Array.apply(null, Array(n.toJSNumber()))\r\n .map(Number.prototype.valueOf, 1),\r\n isNegative: neg\r\n };\r\n }\r\n var out = [];\r\n var left = n, divmod;\r\n while (left.isNegative() || left.compareAbs(base) >= 0) {\r\n divmod = left.divmod(base);\r\n left = divmod.quotient;\r\n var digit = divmod.remainder;\r\n if (digit.isNegative()) {\r\n digit = base.minus(digit).abs();\r\n left = left.next();\r\n }\r\n out.push(digit.toJSNumber());\r\n }\r\n out.push(left.toJSNumber());\r\n return { value: out.reverse(), isNegative: neg };\r\n }\r\n\r\n function toBaseString(n, base, alphabet) {\r\n var arr = toBase(n, base);\r\n return (arr.isNegative ? \"-\" : \"\") + arr.value.map(function (x) {\r\n return stringify(x, alphabet);\r\n }).join('');\r\n }\r\n\r\n BigInteger.prototype.toArray = function (radix) {\r\n return toBase(this, radix);\r\n };\r\n\r\n SmallInteger.prototype.toArray = function (radix) {\r\n return toBase(this, radix);\r\n };\r\n\r\n NativeBigInt.prototype.toArray = function (radix) {\r\n return toBase(this, radix);\r\n };\r\n\r\n BigInteger.prototype.toString = function (radix, alphabet) {\r\n if (radix === undefined) radix = 10;\r\n if (radix !== 10) return toBaseString(this, radix, alphabet);\r\n var v = this.value, l = v.length, str = String(v[--l]), zeros = \"0000000\", digit;\r\n while (--l >= 0) {\r\n digit = String(v[l]);\r\n str += zeros.slice(digit.length) + digit;\r\n }\r\n var sign = this.sign ? \"-\" : \"\";\r\n return sign + str;\r\n };\r\n\r\n SmallInteger.prototype.toString = function (radix, alphabet) {\r\n if (radix === undefined) radix = 10;\r\n if (radix != 10) return toBaseString(this, radix, alphabet);\r\n return String(this.value);\r\n };\r\n\r\n NativeBigInt.prototype.toString = SmallInteger.prototype.toString;\r\n\r\n NativeBigInt.prototype.toJSON = BigInteger.prototype.toJSON = SmallInteger.prototype.toJSON = function () { return this.toString(); }\r\n\r\n BigInteger.prototype.valueOf = function () {\r\n return parseInt(this.toString(), 10);\r\n };\r\n BigInteger.prototype.toJSNumber = BigInteger.prototype.valueOf;\r\n\r\n SmallInteger.prototype.valueOf = function () {\r\n return this.value;\r\n };\r\n SmallInteger.prototype.toJSNumber = SmallInteger.prototype.valueOf;\r\n NativeBigInt.prototype.valueOf = NativeBigInt.prototype.toJSNumber = function () {\r\n return parseInt(this.toString(), 10);\r\n }\r\n\r\n function parseStringValue(v) {\r\n if (isPrecise(+v)) {\r\n var x = +v;\r\n if (x === truncate(x))\r\n return supportsNativeBigInt ? new NativeBigInt(BigInt(x)) : new SmallInteger(x);\r\n throw new Error(\"Invalid integer: \" + v);\r\n }\r\n var sign = v[0] === \"-\";\r\n if (sign) v = v.slice(1);\r\n var split = v.split(/e/i);\r\n if (split.length > 2) throw new Error(\"Invalid integer: \" + split.join(\"e\"));\r\n if (split.length === 2) {\r\n var exp = split[1];\r\n if (exp[0] === \"+\") exp = exp.slice(1);\r\n exp = +exp;\r\n if (exp !== truncate(exp) || !isPrecise(exp)) throw new Error(\"Invalid integer: \" + exp + \" is not a valid exponent.\");\r\n var text = split[0];\r\n var decimalPlace = text.indexOf(\".\");\r\n if (decimalPlace >= 0) {\r\n exp -= text.length - decimalPlace - 1;\r\n text = text.slice(0, decimalPlace) + text.slice(decimalPlace + 1);\r\n }\r\n if (exp < 0) throw new Error(\"Cannot include negative exponent part for integers\");\r\n text += (new Array(exp + 1)).join(\"0\");\r\n v = text;\r\n }\r\n var isValid = /^([0-9][0-9]*)$/.test(v);\r\n if (!isValid) throw new Error(\"Invalid integer: \" + v);\r\n if (supportsNativeBigInt) {\r\n return new NativeBigInt(BigInt(sign ? \"-\" + v : v));\r\n }\r\n var r = [], max = v.length, l = LOG_BASE, min = max - l;\r\n while (max > 0) {\r\n r.push(+v.slice(min, max));\r\n min -= l;\r\n if (min < 0) min = 0;\r\n max -= l;\r\n }\r\n trim(r);\r\n return new BigInteger(r, sign);\r\n }\r\n\r\n function parseNumberValue(v) {\r\n if (supportsNativeBigInt) {\r\n return new NativeBigInt(BigInt(v));\r\n }\r\n if (isPrecise(v)) {\r\n if (v !== truncate(v)) throw new Error(v + \" is not an integer.\");\r\n return new SmallInteger(v);\r\n }\r\n return parseStringValue(v.toString());\r\n }\r\n\r\n function parseValue(v) {\r\n if (typeof v === \"number\") {\r\n return parseNumberValue(v);\r\n }\r\n if (typeof v === \"string\") {\r\n return parseStringValue(v);\r\n }\r\n if (typeof v === \"bigint\") {\r\n return new NativeBigInt(v);\r\n }\r\n return v;\r\n }\r\n // Pre-define numbers in range [-999,999]\r\n for (var i = 0; i < 1000; i++) {\r\n Integer[i] = parseValue(i);\r\n if (i > 0) Integer[-i] = parseValue(-i);\r\n }\r\n // Backwards compatibility\r\n Integer.one = Integer[1];\r\n Integer.zero = Integer[0];\r\n Integer.minusOne = Integer[-1];\r\n Integer.max = max;\r\n Integer.min = min;\r\n Integer.gcd = gcd;\r\n Integer.lcm = lcm;\r\n Integer.isInstance = function (x) { return x instanceof BigInteger || x instanceof SmallInteger || x instanceof NativeBigInt; };\r\n Integer.randBetween = randBetween;\r\n\r\n Integer.fromArray = function (digits, base, isNegative) {\r\n return parseBaseFromArray(digits.map(parseValue), parseValue(base || 10), isNegative);\r\n };\r\n\r\n return Integer;\r\n})();\r\n\r\n// Node.js check\r\nif (typeof module !== \"undefined\" && module.hasOwnProperty(\"exports\")) {\r\n module.exports = bigInt;\r\n}\r\n\r\n//amd check\r\nif (typeof define === \"function\" && define.amd) {\r\n define( function () {\r\n return bigInt;\r\n });\r\n}\r\n","var Chainsaw = require('chainsaw');\nvar EventEmitter = require('events').EventEmitter;\nvar Buffers = require('buffers');\nvar Vars = require('./lib/vars.js');\nvar Stream = require('stream').Stream;\n\nexports = module.exports = function (bufOrEm, eventName) {\n if (Buffer.isBuffer(bufOrEm)) {\n return exports.parse(bufOrEm);\n }\n \n var s = exports.stream();\n if (bufOrEm && bufOrEm.pipe) {\n bufOrEm.pipe(s);\n }\n else if (bufOrEm) {\n bufOrEm.on(eventName || 'data', function (buf) {\n s.write(buf);\n });\n \n bufOrEm.on('end', function () {\n s.end();\n });\n }\n return s;\n};\n\nexports.stream = function (input) {\n if (input) return exports.apply(null, arguments);\n \n var pending = null;\n function getBytes (bytes, cb, skip) {\n pending = {\n bytes : bytes,\n skip : skip,\n cb : function (buf) {\n pending = null;\n cb(buf);\n },\n };\n dispatch();\n }\n \n var offset = null;\n function dispatch () {\n if (!pending) {\n if (caughtEnd) done = true;\n return;\n }\n if (typeof pending === 'function') {\n pending();\n }\n else {\n var bytes = offset + pending.bytes;\n \n if (buffers.length >= bytes) {\n var buf;\n if (offset == null) {\n buf = buffers.splice(0, bytes);\n if (!pending.skip) {\n buf = buf.slice();\n }\n }\n else {\n if (!pending.skip) {\n buf = buffers.slice(offset, bytes);\n }\n offset = bytes;\n }\n \n if (pending.skip) {\n pending.cb();\n }\n else {\n pending.cb(buf);\n }\n }\n }\n }\n \n function builder (saw) {\n function next () { if (!done) saw.next() }\n \n var self = words(function (bytes, cb) {\n return function (name) {\n getBytes(bytes, function (buf) {\n vars.set(name, cb(buf));\n next();\n });\n };\n });\n \n self.tap = function (cb) {\n saw.nest(cb, vars.store);\n };\n \n self.into = function (key, cb) {\n if (!vars.get(key)) vars.set(key, {});\n var parent = vars;\n vars = Vars(parent.get(key));\n \n saw.nest(function () {\n cb.apply(this, arguments);\n this.tap(function () {\n vars = parent;\n });\n }, vars.store);\n };\n \n self.flush = function () {\n vars.store = {};\n next();\n };\n \n self.loop = function (cb) {\n var end = false;\n \n saw.nest(false, function loop () {\n this.vars = vars.store;\n cb.call(this, function () {\n end = true;\n next();\n }, vars.store);\n this.tap(function () {\n if (end) saw.next()\n else loop.call(this)\n }.bind(this));\n }, vars.store);\n };\n \n self.buffer = function (name, bytes) {\n if (typeof bytes === 'string') {\n bytes = vars.get(bytes);\n }\n \n getBytes(bytes, function (buf) {\n vars.set(name, buf);\n next();\n });\n };\n \n self.skip = function (bytes) {\n if (typeof bytes === 'string') {\n bytes = vars.get(bytes);\n }\n \n getBytes(bytes, function () {\n next();\n });\n };\n \n self.scan = function find (name, search) {\n if (typeof search === 'string') {\n search = new Buffer(search);\n }\n else if (!Buffer.isBuffer(search)) {\n throw new Error('search must be a Buffer or a string');\n }\n \n var taken = 0;\n pending = function () {\n var pos = buffers.indexOf(search, offset + taken);\n var i = pos-offset-taken;\n if (pos !== -1) {\n pending = null;\n if (offset != null) {\n vars.set(\n name,\n buffers.slice(offset, offset + taken + i)\n );\n offset += taken + i + search.length;\n }\n else {\n vars.set(\n name,\n buffers.slice(0, taken + i)\n );\n buffers.splice(0, taken + i + search.length);\n }\n next();\n dispatch();\n } else {\n i = Math.max(buffers.length - search.length - offset - taken, 0);\n\t\t\t\t}\n taken += i;\n };\n dispatch();\n };\n \n self.peek = function (cb) {\n offset = 0;\n saw.nest(function () {\n cb.call(this, vars.store);\n this.tap(function () {\n offset = null;\n });\n });\n };\n \n return self;\n };\n \n var stream = Chainsaw.light(builder);\n stream.writable = true;\n \n var buffers = Buffers();\n \n stream.write = function (buf) {\n buffers.push(buf);\n dispatch();\n };\n \n var vars = Vars();\n \n var done = false, caughtEnd = false;\n stream.end = function () {\n caughtEnd = true;\n };\n \n stream.pipe = Stream.prototype.pipe;\n Object.getOwnPropertyNames(EventEmitter.prototype).forEach(function (name) {\n stream[name] = EventEmitter.prototype[name];\n });\n \n return stream;\n};\n\nexports.parse = function parse (buffer) {\n var self = words(function (bytes, cb) {\n return function (name) {\n if (offset + bytes <= buffer.length) {\n var buf = buffer.slice(offset, offset + bytes);\n offset += bytes;\n vars.set(name, cb(buf));\n }\n else {\n vars.set(name, null);\n }\n return self;\n };\n });\n \n var offset = 0;\n var vars = Vars();\n self.vars = vars.store;\n \n self.tap = function (cb) {\n cb.call(self, vars.store);\n return self;\n };\n \n self.into = function (key, cb) {\n if (!vars.get(key)) {\n vars.set(key, {});\n }\n var parent = vars;\n vars = Vars(parent.get(key));\n cb.call(self, vars.store);\n vars = parent;\n return self;\n };\n \n self.loop = function (cb) {\n var end = false;\n var ender = function () { end = true };\n while (end === false) {\n cb.call(self, ender, vars.store);\n }\n return self;\n };\n \n self.buffer = function (name, size) {\n if (typeof size === 'string') {\n size = vars.get(size);\n }\n var buf = buffer.slice(offset, Math.min(buffer.length, offset + size));\n offset += size;\n vars.set(name, buf);\n \n return self;\n };\n \n self.skip = function (bytes) {\n if (typeof bytes === 'string') {\n bytes = vars.get(bytes);\n }\n offset += bytes;\n \n return self;\n };\n \n self.scan = function (name, search) {\n if (typeof search === 'string') {\n search = new Buffer(search);\n }\n else if (!Buffer.isBuffer(search)) {\n throw new Error('search must be a Buffer or a string');\n }\n vars.set(name, null);\n \n // simple but slow string search\n for (var i = 0; i + offset <= buffer.length - search.length + 1; i++) {\n for (\n var j = 0;\n j < search.length && buffer[offset+i+j] === search[j];\n j++\n );\n if (j === search.length) break;\n }\n \n vars.set(name, buffer.slice(offset, offset + i));\n offset += i + search.length;\n return self;\n };\n \n self.peek = function (cb) {\n var was = offset;\n cb.call(self, vars.store);\n offset = was;\n return self;\n };\n \n self.flush = function () {\n vars.store = {};\n return self;\n };\n \n self.eof = function () {\n return offset >= buffer.length;\n };\n \n return self;\n};\n\n// convert byte strings to unsigned little endian numbers\nfunction decodeLEu (bytes) {\n var acc = 0;\n for (var i = 0; i < bytes.length; i++) {\n acc += Math.pow(256,i) * bytes[i];\n }\n return acc;\n}\n\n// convert byte strings to unsigned big endian numbers\nfunction decodeBEu (bytes) {\n var acc = 0;\n for (var i = 0; i < bytes.length; i++) {\n acc += Math.pow(256, bytes.length - i - 1) * bytes[i];\n }\n return acc;\n}\n\n// convert byte strings to signed big endian numbers\nfunction decodeBEs (bytes) {\n var val = decodeBEu(bytes);\n if ((bytes[0] & 0x80) == 0x80) {\n val -= Math.pow(256, bytes.length);\n }\n return val;\n}\n\n// convert byte strings to signed little endian numbers\nfunction decodeLEs (bytes) {\n var val = decodeLEu(bytes);\n if ((bytes[bytes.length - 1] & 0x80) == 0x80) {\n val -= Math.pow(256, bytes.length);\n }\n return val;\n}\n\nfunction words (decode) {\n var self = {};\n \n [ 1, 2, 4, 8 ].forEach(function (bytes) {\n var bits = bytes * 8;\n \n self['word' + bits + 'le']\n = self['word' + bits + 'lu']\n = decode(bytes, decodeLEu);\n \n self['word' + bits + 'ls']\n = decode(bytes, decodeLEs);\n \n self['word' + bits + 'be']\n = self['word' + bits + 'bu']\n = decode(bytes, decodeBEu);\n \n self['word' + bits + 'bs']\n = decode(bytes, decodeBEs);\n });\n \n // word8be(n) == word8le(n) for all n\n self.word8 = self.word8u = self.word8be;\n self.word8s = self.word8bs;\n \n return self;\n}\n","module.exports = function (store) {\n function getset (name, value) {\n var node = vars.store;\n var keys = name.split('.');\n keys.slice(0,-1).forEach(function (k) {\n if (node[k] === undefined) node[k] = {};\n node = node[k]\n });\n var key = keys[keys.length - 1];\n if (arguments.length == 1) {\n return node[key];\n }\n else {\n return node[key] = value;\n }\n }\n \n var vars = {\n get : function (name) {\n return getset(name);\n },\n set : function (name, value) {\n return getset(name, value);\n },\n store : store || {},\n };\n return vars;\n};\n","\"use strict\";\nmodule.exports = function(Promise) {\nvar SomePromiseArray = Promise._SomePromiseArray;\nfunction any(promises) {\n var ret = new SomePromiseArray(promises);\n var promise = ret.promise();\n ret.setHowMany(1);\n ret.setUnwrap();\n ret.init();\n return promise;\n}\n\nPromise.any = function (promises) {\n return any(promises);\n};\n\nPromise.prototype.any = function () {\n return any(this);\n};\n\n};\n","\"use strict\";\nvar firstLineError;\ntry {throw new Error(); } catch (e) {firstLineError = e;}\nvar schedule = require(\"./schedule\");\nvar Queue = require(\"./queue\");\nvar util = require(\"./util\");\n\nfunction Async() {\n this._customScheduler = false;\n this._isTickUsed = false;\n this._lateQueue = new Queue(16);\n this._normalQueue = new Queue(16);\n this._haveDrainedQueues = false;\n this._trampolineEnabled = true;\n var self = this;\n this.drainQueues = function () {\n self._drainQueues();\n };\n this._schedule = schedule;\n}\n\nAsync.prototype.setScheduler = function(fn) {\n var prev = this._schedule;\n this._schedule = fn;\n this._customScheduler = true;\n return prev;\n};\n\nAsync.prototype.hasCustomScheduler = function() {\n return this._customScheduler;\n};\n\nAsync.prototype.enableTrampoline = function() {\n this._trampolineEnabled = true;\n};\n\nAsync.prototype.disableTrampolineIfNecessary = function() {\n if (util.hasDevTools) {\n this._trampolineEnabled = false;\n }\n};\n\nAsync.prototype.haveItemsQueued = function () {\n return this._isTickUsed || this._haveDrainedQueues;\n};\n\n\nAsync.prototype.fatalError = function(e, isNode) {\n if (isNode) {\n process.stderr.write(\"Fatal \" + (e instanceof Error ? e.stack : e) +\n \"\\n\");\n process.exit(2);\n } else {\n this.throwLater(e);\n }\n};\n\nAsync.prototype.throwLater = function(fn, arg) {\n if (arguments.length === 1) {\n arg = fn;\n fn = function () { throw arg; };\n }\n if (typeof setTimeout !== \"undefined\") {\n setTimeout(function() {\n fn(arg);\n }, 0);\n } else try {\n this._schedule(function() {\n fn(arg);\n });\n } catch (e) {\n throw new Error(\"No async scheduler available\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n};\n\nfunction AsyncInvokeLater(fn, receiver, arg) {\n this._lateQueue.push(fn, receiver, arg);\n this._queueTick();\n}\n\nfunction AsyncInvoke(fn, receiver, arg) {\n this._normalQueue.push(fn, receiver, arg);\n this._queueTick();\n}\n\nfunction AsyncSettlePromises(promise) {\n this._normalQueue._pushOne(promise);\n this._queueTick();\n}\n\nif (!util.hasDevTools) {\n Async.prototype.invokeLater = AsyncInvokeLater;\n Async.prototype.invoke = AsyncInvoke;\n Async.prototype.settlePromises = AsyncSettlePromises;\n} else {\n Async.prototype.invokeLater = function (fn, receiver, arg) {\n if (this._trampolineEnabled) {\n AsyncInvokeLater.call(this, fn, receiver, arg);\n } else {\n this._schedule(function() {\n setTimeout(function() {\n fn.call(receiver, arg);\n }, 100);\n });\n }\n };\n\n Async.prototype.invoke = function (fn, receiver, arg) {\n if (this._trampolineEnabled) {\n AsyncInvoke.call(this, fn, receiver, arg);\n } else {\n this._schedule(function() {\n fn.call(receiver, arg);\n });\n }\n };\n\n Async.prototype.settlePromises = function(promise) {\n if (this._trampolineEnabled) {\n AsyncSettlePromises.call(this, promise);\n } else {\n this._schedule(function() {\n promise._settlePromises();\n });\n }\n };\n}\n\nAsync.prototype._drainQueue = function(queue) {\n while (queue.length() > 0) {\n var fn = queue.shift();\n if (typeof fn !== \"function\") {\n fn._settlePromises();\n continue;\n }\n var receiver = queue.shift();\n var arg = queue.shift();\n fn.call(receiver, arg);\n }\n};\n\nAsync.prototype._drainQueues = function () {\n this._drainQueue(this._normalQueue);\n this._reset();\n this._haveDrainedQueues = true;\n this._drainQueue(this._lateQueue);\n};\n\nAsync.prototype._queueTick = function () {\n if (!this._isTickUsed) {\n this._isTickUsed = true;\n this._schedule(this.drainQueues);\n }\n};\n\nAsync.prototype._reset = function () {\n this._isTickUsed = false;\n};\n\nmodule.exports = Async;\nmodule.exports.firstLineError = firstLineError;\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL, tryConvertToPromise, debug) {\nvar calledBind = false;\nvar rejectThis = function(_, e) {\n this._reject(e);\n};\n\nvar targetRejected = function(e, context) {\n context.promiseRejectionQueued = true;\n context.bindingPromise._then(rejectThis, rejectThis, null, this, e);\n};\n\nvar bindingResolved = function(thisArg, context) {\n if (((this._bitField & 50397184) === 0)) {\n this._resolveCallback(context.target);\n }\n};\n\nvar bindingRejected = function(e, context) {\n if (!context.promiseRejectionQueued) this._reject(e);\n};\n\nPromise.prototype.bind = function (thisArg) {\n if (!calledBind) {\n calledBind = true;\n Promise.prototype._propagateFrom = debug.propagateFromFunction();\n Promise.prototype._boundValue = debug.boundValueFunction();\n }\n var maybePromise = tryConvertToPromise(thisArg);\n var ret = new Promise(INTERNAL);\n ret._propagateFrom(this, 1);\n var target = this._target();\n ret._setBoundTo(maybePromise);\n if (maybePromise instanceof Promise) {\n var context = {\n promiseRejectionQueued: false,\n promise: ret,\n target: target,\n bindingPromise: maybePromise\n };\n target._then(INTERNAL, targetRejected, undefined, ret, context);\n maybePromise._then(\n bindingResolved, bindingRejected, undefined, ret, context);\n ret._setOnCancel(maybePromise);\n } else {\n ret._resolveCallback(target);\n }\n return ret;\n};\n\nPromise.prototype._setBoundTo = function (obj) {\n if (obj !== undefined) {\n this._bitField = this._bitField | 2097152;\n this._boundTo = obj;\n } else {\n this._bitField = this._bitField & (~2097152);\n }\n};\n\nPromise.prototype._isBound = function () {\n return (this._bitField & 2097152) === 2097152;\n};\n\nPromise.bind = function (thisArg, value) {\n return Promise.resolve(value).bind(thisArg);\n};\n};\n","\"use strict\";\nvar old;\nif (typeof Promise !== \"undefined\") old = Promise;\nfunction noConflict() {\n try { if (Promise === bluebird) Promise = old; }\n catch (e) {}\n return bluebird;\n}\nvar bluebird = require(\"./promise\")();\nbluebird.noConflict = noConflict;\nmodule.exports = bluebird;\n","\"use strict\";\nvar cr = Object.create;\nif (cr) {\n var callerCache = cr(null);\n var getterCache = cr(null);\n callerCache[\" size\"] = getterCache[\" size\"] = 0;\n}\n\nmodule.exports = function(Promise) {\nvar util = require(\"./util\");\nvar canEvaluate = util.canEvaluate;\nvar isIdentifier = util.isIdentifier;\n\nvar getMethodCaller;\nvar getGetter;\nif (!false) {\nvar makeMethodCaller = function (methodName) {\n return new Function(\"ensureMethod\", \" \\n\\\n return function(obj) { \\n\\\n 'use strict' \\n\\\n var len = this.length; \\n\\\n ensureMethod(obj, 'methodName'); \\n\\\n switch(len) { \\n\\\n case 1: return obj.methodName(this[0]); \\n\\\n case 2: return obj.methodName(this[0], this[1]); \\n\\\n case 3: return obj.methodName(this[0], this[1], this[2]); \\n\\\n case 0: return obj.methodName(); \\n\\\n default: \\n\\\n return obj.methodName.apply(obj, this); \\n\\\n } \\n\\\n }; \\n\\\n \".replace(/methodName/g, methodName))(ensureMethod);\n};\n\nvar makeGetter = function (propertyName) {\n return new Function(\"obj\", \" \\n\\\n 'use strict'; \\n\\\n return obj.propertyName; \\n\\\n \".replace(\"propertyName\", propertyName));\n};\n\nvar getCompiled = function(name, compiler, cache) {\n var ret = cache[name];\n if (typeof ret !== \"function\") {\n if (!isIdentifier(name)) {\n return null;\n }\n ret = compiler(name);\n cache[name] = ret;\n cache[\" size\"]++;\n if (cache[\" size\"] > 512) {\n var keys = Object.keys(cache);\n for (var i = 0; i < 256; ++i) delete cache[keys[i]];\n cache[\" size\"] = keys.length - 256;\n }\n }\n return ret;\n};\n\ngetMethodCaller = function(name) {\n return getCompiled(name, makeMethodCaller, callerCache);\n};\n\ngetGetter = function(name) {\n return getCompiled(name, makeGetter, getterCache);\n};\n}\n\nfunction ensureMethod(obj, methodName) {\n var fn;\n if (obj != null) fn = obj[methodName];\n if (typeof fn !== \"function\") {\n var message = \"Object \" + util.classString(obj) + \" has no method '\" +\n util.toString(methodName) + \"'\";\n throw new Promise.TypeError(message);\n }\n return fn;\n}\n\nfunction caller(obj) {\n var methodName = this.pop();\n var fn = ensureMethod(obj, methodName);\n return fn.apply(obj, this);\n}\nPromise.prototype.call = function (methodName) {\n var $_len = arguments.length;var args = new Array(Math.max($_len - 1, 0)); for(var $_i = 1; $_i < $_len; ++$_i) {args[$_i - 1] = arguments[$_i];};\n if (!false) {\n if (canEvaluate) {\n var maybeCaller = getMethodCaller(methodName);\n if (maybeCaller !== null) {\n return this._then(\n maybeCaller, undefined, undefined, args, undefined);\n }\n }\n }\n args.push(methodName);\n return this._then(caller, undefined, undefined, args, undefined);\n};\n\nfunction namedGetter(obj) {\n return obj[this];\n}\nfunction indexedGetter(obj) {\n var index = +this;\n if (index < 0) index = Math.max(0, index + obj.length);\n return obj[index];\n}\nPromise.prototype.get = function (propertyName) {\n var isIndex = (typeof propertyName === \"number\");\n var getter;\n if (!isIndex) {\n if (canEvaluate) {\n var maybeGetter = getGetter(propertyName);\n getter = maybeGetter !== null ? maybeGetter : namedGetter;\n } else {\n getter = namedGetter;\n }\n } else {\n getter = indexedGetter;\n }\n return this._then(getter, undefined, undefined, propertyName, undefined);\n};\n};\n","\"use strict\";\nmodule.exports = function(Promise, PromiseArray, apiRejection, debug) {\nvar util = require(\"./util\");\nvar tryCatch = util.tryCatch;\nvar errorObj = util.errorObj;\nvar async = Promise._async;\n\nPromise.prototype[\"break\"] = Promise.prototype.cancel = function() {\n if (!debug.cancellation()) return this._warn(\"cancellation is disabled\");\n\n var promise = this;\n var child = promise;\n while (promise._isCancellable()) {\n if (!promise._cancelBy(child)) {\n if (child._isFollowing()) {\n child._followee().cancel();\n } else {\n child._cancelBranched();\n }\n break;\n }\n\n var parent = promise._cancellationParent;\n if (parent == null || !parent._isCancellable()) {\n if (promise._isFollowing()) {\n promise._followee().cancel();\n } else {\n promise._cancelBranched();\n }\n break;\n } else {\n if (promise._isFollowing()) promise._followee().cancel();\n promise._setWillBeCancelled();\n child = promise;\n promise = parent;\n }\n }\n};\n\nPromise.prototype._branchHasCancelled = function() {\n this._branchesRemainingToCancel--;\n};\n\nPromise.prototype._enoughBranchesHaveCancelled = function() {\n return this._branchesRemainingToCancel === undefined ||\n this._branchesRemainingToCancel <= 0;\n};\n\nPromise.prototype._cancelBy = function(canceller) {\n if (canceller === this) {\n this._branchesRemainingToCancel = 0;\n this._invokeOnCancel();\n return true;\n } else {\n this._branchHasCancelled();\n if (this._enoughBranchesHaveCancelled()) {\n this._invokeOnCancel();\n return true;\n }\n }\n return false;\n};\n\nPromise.prototype._cancelBranched = function() {\n if (this._enoughBranchesHaveCancelled()) {\n this._cancel();\n }\n};\n\nPromise.prototype._cancel = function() {\n if (!this._isCancellable()) return;\n this._setCancelled();\n async.invoke(this._cancelPromises, this, undefined);\n};\n\nPromise.prototype._cancelPromises = function() {\n if (this._length() > 0) this._settlePromises();\n};\n\nPromise.prototype._unsetOnCancel = function() {\n this._onCancelField = undefined;\n};\n\nPromise.prototype._isCancellable = function() {\n return this.isPending() && !this._isCancelled();\n};\n\nPromise.prototype.isCancellable = function() {\n return this.isPending() && !this.isCancelled();\n};\n\nPromise.prototype._doInvokeOnCancel = function(onCancelCallback, internalOnly) {\n if (util.isArray(onCancelCallback)) {\n for (var i = 0; i < onCancelCallback.length; ++i) {\n this._doInvokeOnCancel(onCancelCallback[i], internalOnly);\n }\n } else if (onCancelCallback !== undefined) {\n if (typeof onCancelCallback === \"function\") {\n if (!internalOnly) {\n var e = tryCatch(onCancelCallback).call(this._boundValue());\n if (e === errorObj) {\n this._attachExtraTrace(e.e);\n async.throwLater(e.e);\n }\n }\n } else {\n onCancelCallback._resultCancelled(this);\n }\n }\n};\n\nPromise.prototype._invokeOnCancel = function() {\n var onCancelCallback = this._onCancel();\n this._unsetOnCancel();\n async.invoke(this._doInvokeOnCancel, this, onCancelCallback);\n};\n\nPromise.prototype._invokeInternalOnCancel = function() {\n if (this._isCancellable()) {\n this._doInvokeOnCancel(this._onCancel(), true);\n this._unsetOnCancel();\n }\n};\n\nPromise.prototype._resultCancelled = function() {\n this.cancel();\n};\n\n};\n","\"use strict\";\nmodule.exports = function(NEXT_FILTER) {\nvar util = require(\"./util\");\nvar getKeys = require(\"./es5\").keys;\nvar tryCatch = util.tryCatch;\nvar errorObj = util.errorObj;\n\nfunction catchFilter(instances, cb, promise) {\n return function(e) {\n var boundTo = promise._boundValue();\n predicateLoop: for (var i = 0; i < instances.length; ++i) {\n var item = instances[i];\n\n if (item === Error ||\n (item != null && item.prototype instanceof Error)) {\n if (e instanceof item) {\n return tryCatch(cb).call(boundTo, e);\n }\n } else if (typeof item === \"function\") {\n var matchesPredicate = tryCatch(item).call(boundTo, e);\n if (matchesPredicate === errorObj) {\n return matchesPredicate;\n } else if (matchesPredicate) {\n return tryCatch(cb).call(boundTo, e);\n }\n } else if (util.isObject(e)) {\n var keys = getKeys(item);\n for (var j = 0; j < keys.length; ++j) {\n var key = keys[j];\n if (item[key] != e[key]) {\n continue predicateLoop;\n }\n }\n return tryCatch(cb).call(boundTo, e);\n }\n }\n return NEXT_FILTER;\n };\n}\n\nreturn catchFilter;\n};\n","\"use strict\";\nmodule.exports = function(Promise) {\nvar longStackTraces = false;\nvar contextStack = [];\n\nPromise.prototype._promiseCreated = function() {};\nPromise.prototype._pushContext = function() {};\nPromise.prototype._popContext = function() {return null;};\nPromise._peekContext = Promise.prototype._peekContext = function() {};\n\nfunction Context() {\n this._trace = new Context.CapturedTrace(peekContext());\n}\nContext.prototype._pushContext = function () {\n if (this._trace !== undefined) {\n this._trace._promiseCreated = null;\n contextStack.push(this._trace);\n }\n};\n\nContext.prototype._popContext = function () {\n if (this._trace !== undefined) {\n var trace = contextStack.pop();\n var ret = trace._promiseCreated;\n trace._promiseCreated = null;\n return ret;\n }\n return null;\n};\n\nfunction createContext() {\n if (longStackTraces) return new Context();\n}\n\nfunction peekContext() {\n var lastIndex = contextStack.length - 1;\n if (lastIndex >= 0) {\n return contextStack[lastIndex];\n }\n return undefined;\n}\nContext.CapturedTrace = null;\nContext.create = createContext;\nContext.deactivateLongStackTraces = function() {};\nContext.activateLongStackTraces = function() {\n var Promise_pushContext = Promise.prototype._pushContext;\n var Promise_popContext = Promise.prototype._popContext;\n var Promise_PeekContext = Promise._peekContext;\n var Promise_peekContext = Promise.prototype._peekContext;\n var Promise_promiseCreated = Promise.prototype._promiseCreated;\n Context.deactivateLongStackTraces = function() {\n Promise.prototype._pushContext = Promise_pushContext;\n Promise.prototype._popContext = Promise_popContext;\n Promise._peekContext = Promise_PeekContext;\n Promise.prototype._peekContext = Promise_peekContext;\n Promise.prototype._promiseCreated = Promise_promiseCreated;\n longStackTraces = false;\n };\n longStackTraces = true;\n Promise.prototype._pushContext = Context.prototype._pushContext;\n Promise.prototype._popContext = Context.prototype._popContext;\n Promise._peekContext = Promise.prototype._peekContext = peekContext;\n Promise.prototype._promiseCreated = function() {\n var ctx = this._peekContext();\n if (ctx && ctx._promiseCreated == null) ctx._promiseCreated = this;\n };\n};\nreturn Context;\n};\n","\"use strict\";\nmodule.exports = function(Promise, Context) {\nvar getDomain = Promise._getDomain;\nvar async = Promise._async;\nvar Warning = require(\"./errors\").Warning;\nvar util = require(\"./util\");\nvar canAttachTrace = util.canAttachTrace;\nvar unhandledRejectionHandled;\nvar possiblyUnhandledRejection;\nvar bluebirdFramePattern =\n /[\\\\\\/]bluebird[\\\\\\/]js[\\\\\\/](release|debug|instrumented)/;\nvar nodeFramePattern = /\\((?:timers\\.js):\\d+:\\d+\\)/;\nvar parseLinePattern = /[\\/<\\(](.+?):(\\d+):(\\d+)\\)?\\s*$/;\nvar stackFramePattern = null;\nvar formatStack = null;\nvar indentStackFrames = false;\nvar printWarning;\nvar debugging = !!(util.env(\"BLUEBIRD_DEBUG\") != 0 &&\n (false ||\n util.env(\"BLUEBIRD_DEBUG\") ||\n util.env(\"NODE_ENV\") === \"development\"));\n\nvar warnings = !!(util.env(\"BLUEBIRD_WARNINGS\") != 0 &&\n (debugging || util.env(\"BLUEBIRD_WARNINGS\")));\n\nvar longStackTraces = !!(util.env(\"BLUEBIRD_LONG_STACK_TRACES\") != 0 &&\n (debugging || util.env(\"BLUEBIRD_LONG_STACK_TRACES\")));\n\nvar wForgottenReturn = util.env(\"BLUEBIRD_W_FORGOTTEN_RETURN\") != 0 &&\n (warnings || !!util.env(\"BLUEBIRD_W_FORGOTTEN_RETURN\"));\n\nPromise.prototype.suppressUnhandledRejections = function() {\n var target = this._target();\n target._bitField = ((target._bitField & (~1048576)) |\n 524288);\n};\n\nPromise.prototype._ensurePossibleRejectionHandled = function () {\n if ((this._bitField & 524288) !== 0) return;\n this._setRejectionIsUnhandled();\n async.invokeLater(this._notifyUnhandledRejection, this, undefined);\n};\n\nPromise.prototype._notifyUnhandledRejectionIsHandled = function () {\n fireRejectionEvent(\"rejectionHandled\",\n unhandledRejectionHandled, undefined, this);\n};\n\nPromise.prototype._setReturnedNonUndefined = function() {\n this._bitField = this._bitField | 268435456;\n};\n\nPromise.prototype._returnedNonUndefined = function() {\n return (this._bitField & 268435456) !== 0;\n};\n\nPromise.prototype._notifyUnhandledRejection = function () {\n if (this._isRejectionUnhandled()) {\n var reason = this._settledValue();\n this._setUnhandledRejectionIsNotified();\n fireRejectionEvent(\"unhandledRejection\",\n possiblyUnhandledRejection, reason, this);\n }\n};\n\nPromise.prototype._setUnhandledRejectionIsNotified = function () {\n this._bitField = this._bitField | 262144;\n};\n\nPromise.prototype._unsetUnhandledRejectionIsNotified = function () {\n this._bitField = this._bitField & (~262144);\n};\n\nPromise.prototype._isUnhandledRejectionNotified = function () {\n return (this._bitField & 262144) > 0;\n};\n\nPromise.prototype._setRejectionIsUnhandled = function () {\n this._bitField = this._bitField | 1048576;\n};\n\nPromise.prototype._unsetRejectionIsUnhandled = function () {\n this._bitField = this._bitField & (~1048576);\n if (this._isUnhandledRejectionNotified()) {\n this._unsetUnhandledRejectionIsNotified();\n this._notifyUnhandledRejectionIsHandled();\n }\n};\n\nPromise.prototype._isRejectionUnhandled = function () {\n return (this._bitField & 1048576) > 0;\n};\n\nPromise.prototype._warn = function(message, shouldUseOwnTrace, promise) {\n return warn(message, shouldUseOwnTrace, promise || this);\n};\n\nPromise.onPossiblyUnhandledRejection = function (fn) {\n var domain = getDomain();\n possiblyUnhandledRejection =\n typeof fn === \"function\" ? (domain === null ?\n fn : util.domainBind(domain, fn))\n : undefined;\n};\n\nPromise.onUnhandledRejectionHandled = function (fn) {\n var domain = getDomain();\n unhandledRejectionHandled =\n typeof fn === \"function\" ? (domain === null ?\n fn : util.domainBind(domain, fn))\n : undefined;\n};\n\nvar disableLongStackTraces = function() {};\nPromise.longStackTraces = function () {\n if (async.haveItemsQueued() && !config.longStackTraces) {\n throw new Error(\"cannot enable long stack traces after promises have been created\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n if (!config.longStackTraces && longStackTracesIsSupported()) {\n var Promise_captureStackTrace = Promise.prototype._captureStackTrace;\n var Promise_attachExtraTrace = Promise.prototype._attachExtraTrace;\n config.longStackTraces = true;\n disableLongStackTraces = function() {\n if (async.haveItemsQueued() && !config.longStackTraces) {\n throw new Error(\"cannot enable long stack traces after promises have been created\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n Promise.prototype._captureStackTrace = Promise_captureStackTrace;\n Promise.prototype._attachExtraTrace = Promise_attachExtraTrace;\n Context.deactivateLongStackTraces();\n async.enableTrampoline();\n config.longStackTraces = false;\n };\n Promise.prototype._captureStackTrace = longStackTracesCaptureStackTrace;\n Promise.prototype._attachExtraTrace = longStackTracesAttachExtraTrace;\n Context.activateLongStackTraces();\n async.disableTrampolineIfNecessary();\n }\n};\n\nPromise.hasLongStackTraces = function () {\n return config.longStackTraces && longStackTracesIsSupported();\n};\n\nvar fireDomEvent = (function() {\n try {\n if (typeof CustomEvent === \"function\") {\n var event = new CustomEvent(\"CustomEvent\");\n util.global.dispatchEvent(event);\n return function(name, event) {\n var domEvent = new CustomEvent(name.toLowerCase(), {\n detail: event,\n cancelable: true\n });\n return !util.global.dispatchEvent(domEvent);\n };\n } else if (typeof Event === \"function\") {\n var event = new Event(\"CustomEvent\");\n util.global.dispatchEvent(event);\n return function(name, event) {\n var domEvent = new Event(name.toLowerCase(), {\n cancelable: true\n });\n domEvent.detail = event;\n return !util.global.dispatchEvent(domEvent);\n };\n } else {\n var event = document.createEvent(\"CustomEvent\");\n event.initCustomEvent(\"testingtheevent\", false, true, {});\n util.global.dispatchEvent(event);\n return function(name, event) {\n var domEvent = document.createEvent(\"CustomEvent\");\n domEvent.initCustomEvent(name.toLowerCase(), false, true,\n event);\n return !util.global.dispatchEvent(domEvent);\n };\n }\n } catch (e) {}\n return function() {\n return false;\n };\n})();\n\nvar fireGlobalEvent = (function() {\n if (util.isNode) {\n return function() {\n return process.emit.apply(process, arguments);\n };\n } else {\n if (!util.global) {\n return function() {\n return false;\n };\n }\n return function(name) {\n var methodName = \"on\" + name.toLowerCase();\n var method = util.global[methodName];\n if (!method) return false;\n method.apply(util.global, [].slice.call(arguments, 1));\n return true;\n };\n }\n})();\n\nfunction generatePromiseLifecycleEventObject(name, promise) {\n return {promise: promise};\n}\n\nvar eventToObjectGenerator = {\n promiseCreated: generatePromiseLifecycleEventObject,\n promiseFulfilled: generatePromiseLifecycleEventObject,\n promiseRejected: generatePromiseLifecycleEventObject,\n promiseResolved: generatePromiseLifecycleEventObject,\n promiseCancelled: generatePromiseLifecycleEventObject,\n promiseChained: function(name, promise, child) {\n return {promise: promise, child: child};\n },\n warning: function(name, warning) {\n return {warning: warning};\n },\n unhandledRejection: function (name, reason, promise) {\n return {reason: reason, promise: promise};\n },\n rejectionHandled: generatePromiseLifecycleEventObject\n};\n\nvar activeFireEvent = function (name) {\n var globalEventFired = false;\n try {\n globalEventFired = fireGlobalEvent.apply(null, arguments);\n } catch (e) {\n async.throwLater(e);\n globalEventFired = true;\n }\n\n var domEventFired = false;\n try {\n domEventFired = fireDomEvent(name,\n eventToObjectGenerator[name].apply(null, arguments));\n } catch (e) {\n async.throwLater(e);\n domEventFired = true;\n }\n\n return domEventFired || globalEventFired;\n};\n\nPromise.config = function(opts) {\n opts = Object(opts);\n if (\"longStackTraces\" in opts) {\n if (opts.longStackTraces) {\n Promise.longStackTraces();\n } else if (!opts.longStackTraces && Promise.hasLongStackTraces()) {\n disableLongStackTraces();\n }\n }\n if (\"warnings\" in opts) {\n var warningsOption = opts.warnings;\n config.warnings = !!warningsOption;\n wForgottenReturn = config.warnings;\n\n if (util.isObject(warningsOption)) {\n if (\"wForgottenReturn\" in warningsOption) {\n wForgottenReturn = !!warningsOption.wForgottenReturn;\n }\n }\n }\n if (\"cancellation\" in opts && opts.cancellation && !config.cancellation) {\n if (async.haveItemsQueued()) {\n throw new Error(\n \"cannot enable cancellation after promises are in use\");\n }\n Promise.prototype._clearCancellationData =\n cancellationClearCancellationData;\n Promise.prototype._propagateFrom = cancellationPropagateFrom;\n Promise.prototype._onCancel = cancellationOnCancel;\n Promise.prototype._setOnCancel = cancellationSetOnCancel;\n Promise.prototype._attachCancellationCallback =\n cancellationAttachCancellationCallback;\n Promise.prototype._execute = cancellationExecute;\n propagateFromFunction = cancellationPropagateFrom;\n config.cancellation = true;\n }\n if (\"monitoring\" in opts) {\n if (opts.monitoring && !config.monitoring) {\n config.monitoring = true;\n Promise.prototype._fireEvent = activeFireEvent;\n } else if (!opts.monitoring && config.monitoring) {\n config.monitoring = false;\n Promise.prototype._fireEvent = defaultFireEvent;\n }\n }\n return Promise;\n};\n\nfunction defaultFireEvent() { return false; }\n\nPromise.prototype._fireEvent = defaultFireEvent;\nPromise.prototype._execute = function(executor, resolve, reject) {\n try {\n executor(resolve, reject);\n } catch (e) {\n return e;\n }\n};\nPromise.prototype._onCancel = function () {};\nPromise.prototype._setOnCancel = function (handler) { ; };\nPromise.prototype._attachCancellationCallback = function(onCancel) {\n ;\n};\nPromise.prototype._captureStackTrace = function () {};\nPromise.prototype._attachExtraTrace = function () {};\nPromise.prototype._clearCancellationData = function() {};\nPromise.prototype._propagateFrom = function (parent, flags) {\n ;\n ;\n};\n\nfunction cancellationExecute(executor, resolve, reject) {\n var promise = this;\n try {\n executor(resolve, reject, function(onCancel) {\n if (typeof onCancel !== \"function\") {\n throw new TypeError(\"onCancel must be a function, got: \" +\n util.toString(onCancel));\n }\n promise._attachCancellationCallback(onCancel);\n });\n } catch (e) {\n return e;\n }\n}\n\nfunction cancellationAttachCancellationCallback(onCancel) {\n if (!this._isCancellable()) return this;\n\n var previousOnCancel = this._onCancel();\n if (previousOnCancel !== undefined) {\n if (util.isArray(previousOnCancel)) {\n previousOnCancel.push(onCancel);\n } else {\n this._setOnCancel([previousOnCancel, onCancel]);\n }\n } else {\n this._setOnCancel(onCancel);\n }\n}\n\nfunction cancellationOnCancel() {\n return this._onCancelField;\n}\n\nfunction cancellationSetOnCancel(onCancel) {\n this._onCancelField = onCancel;\n}\n\nfunction cancellationClearCancellationData() {\n this._cancellationParent = undefined;\n this._onCancelField = undefined;\n}\n\nfunction cancellationPropagateFrom(parent, flags) {\n if ((flags & 1) !== 0) {\n this._cancellationParent = parent;\n var branchesRemainingToCancel = parent._branchesRemainingToCancel;\n if (branchesRemainingToCancel === undefined) {\n branchesRemainingToCancel = 0;\n }\n parent._branchesRemainingToCancel = branchesRemainingToCancel + 1;\n }\n if ((flags & 2) !== 0 && parent._isBound()) {\n this._setBoundTo(parent._boundTo);\n }\n}\n\nfunction bindingPropagateFrom(parent, flags) {\n if ((flags & 2) !== 0 && parent._isBound()) {\n this._setBoundTo(parent._boundTo);\n }\n}\nvar propagateFromFunction = bindingPropagateFrom;\n\nfunction boundValueFunction() {\n var ret = this._boundTo;\n if (ret !== undefined) {\n if (ret instanceof Promise) {\n if (ret.isFulfilled()) {\n return ret.value();\n } else {\n return undefined;\n }\n }\n }\n return ret;\n}\n\nfunction longStackTracesCaptureStackTrace() {\n this._trace = new CapturedTrace(this._peekContext());\n}\n\nfunction longStackTracesAttachExtraTrace(error, ignoreSelf) {\n if (canAttachTrace(error)) {\n var trace = this._trace;\n if (trace !== undefined) {\n if (ignoreSelf) trace = trace._parent;\n }\n if (trace !== undefined) {\n trace.attachExtraTrace(error);\n } else if (!error.__stackCleaned__) {\n var parsed = parseStackAndMessage(error);\n util.notEnumerableProp(error, \"stack\",\n parsed.message + \"\\n\" + parsed.stack.join(\"\\n\"));\n util.notEnumerableProp(error, \"__stackCleaned__\", true);\n }\n }\n}\n\nfunction checkForgottenReturns(returnValue, promiseCreated, name, promise,\n parent) {\n if (returnValue === undefined && promiseCreated !== null &&\n wForgottenReturn) {\n if (parent !== undefined && parent._returnedNonUndefined()) return;\n if ((promise._bitField & 65535) === 0) return;\n\n if (name) name = name + \" \";\n var handlerLine = \"\";\n var creatorLine = \"\";\n if (promiseCreated._trace) {\n var traceLines = promiseCreated._trace.stack.split(\"\\n\");\n var stack = cleanStack(traceLines);\n for (var i = stack.length - 1; i >= 0; --i) {\n var line = stack[i];\n if (!nodeFramePattern.test(line)) {\n var lineMatches = line.match(parseLinePattern);\n if (lineMatches) {\n handlerLine = \"at \" + lineMatches[1] +\n \":\" + lineMatches[2] + \":\" + lineMatches[3] + \" \";\n }\n break;\n }\n }\n\n if (stack.length > 0) {\n var firstUserLine = stack[0];\n for (var i = 0; i < traceLines.length; ++i) {\n\n if (traceLines[i] === firstUserLine) {\n if (i > 0) {\n creatorLine = \"\\n\" + traceLines[i - 1];\n }\n break;\n }\n }\n\n }\n }\n var msg = \"a promise was created in a \" + name +\n \"handler \" + handlerLine + \"but was not returned from it, \" +\n \"see http://goo.gl/rRqMUw\" +\n creatorLine;\n promise._warn(msg, true, promiseCreated);\n }\n}\n\nfunction deprecated(name, replacement) {\n var message = name +\n \" is deprecated and will be removed in a future version.\";\n if (replacement) message += \" Use \" + replacement + \" instead.\";\n return warn(message);\n}\n\nfunction warn(message, shouldUseOwnTrace, promise) {\n if (!config.warnings) return;\n var warning = new Warning(message);\n var ctx;\n if (shouldUseOwnTrace) {\n promise._attachExtraTrace(warning);\n } else if (config.longStackTraces && (ctx = Promise._peekContext())) {\n ctx.attachExtraTrace(warning);\n } else {\n var parsed = parseStackAndMessage(warning);\n warning.stack = parsed.message + \"\\n\" + parsed.stack.join(\"\\n\");\n }\n\n if (!activeFireEvent(\"warning\", warning)) {\n formatAndLogError(warning, \"\", true);\n }\n}\n\nfunction reconstructStack(message, stacks) {\n for (var i = 0; i < stacks.length - 1; ++i) {\n stacks[i].push(\"From previous event:\");\n stacks[i] = stacks[i].join(\"\\n\");\n }\n if (i < stacks.length) {\n stacks[i] = stacks[i].join(\"\\n\");\n }\n return message + \"\\n\" + stacks.join(\"\\n\");\n}\n\nfunction removeDuplicateOrEmptyJumps(stacks) {\n for (var i = 0; i < stacks.length; ++i) {\n if (stacks[i].length === 0 ||\n ((i + 1 < stacks.length) && stacks[i][0] === stacks[i+1][0])) {\n stacks.splice(i, 1);\n i--;\n }\n }\n}\n\nfunction removeCommonRoots(stacks) {\n var current = stacks[0];\n for (var i = 1; i < stacks.length; ++i) {\n var prev = stacks[i];\n var currentLastIndex = current.length - 1;\n var currentLastLine = current[currentLastIndex];\n var commonRootMeetPoint = -1;\n\n for (var j = prev.length - 1; j >= 0; --j) {\n if (prev[j] === currentLastLine) {\n commonRootMeetPoint = j;\n break;\n }\n }\n\n for (var j = commonRootMeetPoint; j >= 0; --j) {\n var line = prev[j];\n if (current[currentLastIndex] === line) {\n current.pop();\n currentLastIndex--;\n } else {\n break;\n }\n }\n current = prev;\n }\n}\n\nfunction cleanStack(stack) {\n var ret = [];\n for (var i = 0; i < stack.length; ++i) {\n var line = stack[i];\n var isTraceLine = \" (No stack trace)\" === line ||\n stackFramePattern.test(line);\n var isInternalFrame = isTraceLine && shouldIgnore(line);\n if (isTraceLine && !isInternalFrame) {\n if (indentStackFrames && line.charAt(0) !== \" \") {\n line = \" \" + line;\n }\n ret.push(line);\n }\n }\n return ret;\n}\n\nfunction stackFramesAsArray(error) {\n var stack = error.stack.replace(/\\s+$/g, \"\").split(\"\\n\");\n for (var i = 0; i < stack.length; ++i) {\n var line = stack[i];\n if (\" (No stack trace)\" === line || stackFramePattern.test(line)) {\n break;\n }\n }\n if (i > 0 && error.name != \"SyntaxError\") {\n stack = stack.slice(i);\n }\n return stack;\n}\n\nfunction parseStackAndMessage(error) {\n var stack = error.stack;\n var message = error.toString();\n stack = typeof stack === \"string\" && stack.length > 0\n ? stackFramesAsArray(error) : [\" (No stack trace)\"];\n return {\n message: message,\n stack: error.name == \"SyntaxError\" ? stack : cleanStack(stack)\n };\n}\n\nfunction formatAndLogError(error, title, isSoft) {\n if (typeof console !== \"undefined\") {\n var message;\n if (util.isObject(error)) {\n var stack = error.stack;\n message = title + formatStack(stack, error);\n } else {\n message = title + String(error);\n }\n if (typeof printWarning === \"function\") {\n printWarning(message, isSoft);\n } else if (typeof console.log === \"function\" ||\n typeof console.log === \"object\") {\n console.log(message);\n }\n }\n}\n\nfunction fireRejectionEvent(name, localHandler, reason, promise) {\n var localEventFired = false;\n try {\n if (typeof localHandler === \"function\") {\n localEventFired = true;\n if (name === \"rejectionHandled\") {\n localHandler(promise);\n } else {\n localHandler(reason, promise);\n }\n }\n } catch (e) {\n async.throwLater(e);\n }\n\n if (name === \"unhandledRejection\") {\n if (!activeFireEvent(name, reason, promise) && !localEventFired) {\n formatAndLogError(reason, \"Unhandled rejection \");\n }\n } else {\n activeFireEvent(name, promise);\n }\n}\n\nfunction formatNonError(obj) {\n var str;\n if (typeof obj === \"function\") {\n str = \"[function \" +\n (obj.name || \"anonymous\") +\n \"]\";\n } else {\n str = obj && typeof obj.toString === \"function\"\n ? obj.toString() : util.toString(obj);\n var ruselessToString = /\\[object [a-zA-Z0-9$_]+\\]/;\n if (ruselessToString.test(str)) {\n try {\n var newStr = JSON.stringify(obj);\n str = newStr;\n }\n catch(e) {\n\n }\n }\n if (str.length === 0) {\n str = \"(empty array)\";\n }\n }\n return (\"(<\" + snip(str) + \">, no stack trace)\");\n}\n\nfunction snip(str) {\n var maxChars = 41;\n if (str.length < maxChars) {\n return str;\n }\n return str.substr(0, maxChars - 3) + \"...\";\n}\n\nfunction longStackTracesIsSupported() {\n return typeof captureStackTrace === \"function\";\n}\n\nvar shouldIgnore = function() { return false; };\nvar parseLineInfoRegex = /[\\/<\\(]([^:\\/]+):(\\d+):(?:\\d+)\\)?\\s*$/;\nfunction parseLineInfo(line) {\n var matches = line.match(parseLineInfoRegex);\n if (matches) {\n return {\n fileName: matches[1],\n line: parseInt(matches[2], 10)\n };\n }\n}\n\nfunction setBounds(firstLineError, lastLineError) {\n if (!longStackTracesIsSupported()) return;\n var firstStackLines = firstLineError.stack.split(\"\\n\");\n var lastStackLines = lastLineError.stack.split(\"\\n\");\n var firstIndex = -1;\n var lastIndex = -1;\n var firstFileName;\n var lastFileName;\n for (var i = 0; i < firstStackLines.length; ++i) {\n var result = parseLineInfo(firstStackLines[i]);\n if (result) {\n firstFileName = result.fileName;\n firstIndex = result.line;\n break;\n }\n }\n for (var i = 0; i < lastStackLines.length; ++i) {\n var result = parseLineInfo(lastStackLines[i]);\n if (result) {\n lastFileName = result.fileName;\n lastIndex = result.line;\n break;\n }\n }\n if (firstIndex < 0 || lastIndex < 0 || !firstFileName || !lastFileName ||\n firstFileName !== lastFileName || firstIndex >= lastIndex) {\n return;\n }\n\n shouldIgnore = function(line) {\n if (bluebirdFramePattern.test(line)) return true;\n var info = parseLineInfo(line);\n if (info) {\n if (info.fileName === firstFileName &&\n (firstIndex <= info.line && info.line <= lastIndex)) {\n return true;\n }\n }\n return false;\n };\n}\n\nfunction CapturedTrace(parent) {\n this._parent = parent;\n this._promisesCreated = 0;\n var length = this._length = 1 + (parent === undefined ? 0 : parent._length);\n captureStackTrace(this, CapturedTrace);\n if (length > 32) this.uncycle();\n}\nutil.inherits(CapturedTrace, Error);\nContext.CapturedTrace = CapturedTrace;\n\nCapturedTrace.prototype.uncycle = function() {\n var length = this._length;\n if (length < 2) return;\n var nodes = [];\n var stackToIndex = {};\n\n for (var i = 0, node = this; node !== undefined; ++i) {\n nodes.push(node);\n node = node._parent;\n }\n length = this._length = i;\n for (var i = length - 1; i >= 0; --i) {\n var stack = nodes[i].stack;\n if (stackToIndex[stack] === undefined) {\n stackToIndex[stack] = i;\n }\n }\n for (var i = 0; i < length; ++i) {\n var currentStack = nodes[i].stack;\n var index = stackToIndex[currentStack];\n if (index !== undefined && index !== i) {\n if (index > 0) {\n nodes[index - 1]._parent = undefined;\n nodes[index - 1]._length = 1;\n }\n nodes[i]._parent = undefined;\n nodes[i]._length = 1;\n var cycleEdgeNode = i > 0 ? nodes[i - 1] : this;\n\n if (index < length - 1) {\n cycleEdgeNode._parent = nodes[index + 1];\n cycleEdgeNode._parent.uncycle();\n cycleEdgeNode._length =\n cycleEdgeNode._parent._length + 1;\n } else {\n cycleEdgeNode._parent = undefined;\n cycleEdgeNode._length = 1;\n }\n var currentChildLength = cycleEdgeNode._length + 1;\n for (var j = i - 2; j >= 0; --j) {\n nodes[j]._length = currentChildLength;\n currentChildLength++;\n }\n return;\n }\n }\n};\n\nCapturedTrace.prototype.attachExtraTrace = function(error) {\n if (error.__stackCleaned__) return;\n this.uncycle();\n var parsed = parseStackAndMessage(error);\n var message = parsed.message;\n var stacks = [parsed.stack];\n\n var trace = this;\n while (trace !== undefined) {\n stacks.push(cleanStack(trace.stack.split(\"\\n\")));\n trace = trace._parent;\n }\n removeCommonRoots(stacks);\n removeDuplicateOrEmptyJumps(stacks);\n util.notEnumerableProp(error, \"stack\", reconstructStack(message, stacks));\n util.notEnumerableProp(error, \"__stackCleaned__\", true);\n};\n\nvar captureStackTrace = (function stackDetection() {\n var v8stackFramePattern = /^\\s*at\\s*/;\n var v8stackFormatter = function(stack, error) {\n if (typeof stack === \"string\") return stack;\n\n if (error.name !== undefined &&\n error.message !== undefined) {\n return error.toString();\n }\n return formatNonError(error);\n };\n\n if (typeof Error.stackTraceLimit === \"number\" &&\n typeof Error.captureStackTrace === \"function\") {\n Error.stackTraceLimit += 6;\n stackFramePattern = v8stackFramePattern;\n formatStack = v8stackFormatter;\n var captureStackTrace = Error.captureStackTrace;\n\n shouldIgnore = function(line) {\n return bluebirdFramePattern.test(line);\n };\n return function(receiver, ignoreUntil) {\n Error.stackTraceLimit += 6;\n captureStackTrace(receiver, ignoreUntil);\n Error.stackTraceLimit -= 6;\n };\n }\n var err = new Error();\n\n if (typeof err.stack === \"string\" &&\n err.stack.split(\"\\n\")[0].indexOf(\"stackDetection@\") >= 0) {\n stackFramePattern = /@/;\n formatStack = v8stackFormatter;\n indentStackFrames = true;\n return function captureStackTrace(o) {\n o.stack = new Error().stack;\n };\n }\n\n var hasStackAfterThrow;\n try { throw new Error(); }\n catch(e) {\n hasStackAfterThrow = (\"stack\" in e);\n }\n if (!(\"stack\" in err) && hasStackAfterThrow &&\n typeof Error.stackTraceLimit === \"number\") {\n stackFramePattern = v8stackFramePattern;\n formatStack = v8stackFormatter;\n return function captureStackTrace(o) {\n Error.stackTraceLimit += 6;\n try { throw new Error(); }\n catch(e) { o.stack = e.stack; }\n Error.stackTraceLimit -= 6;\n };\n }\n\n formatStack = function(stack, error) {\n if (typeof stack === \"string\") return stack;\n\n if ((typeof error === \"object\" ||\n typeof error === \"function\") &&\n error.name !== undefined &&\n error.message !== undefined) {\n return error.toString();\n }\n return formatNonError(error);\n };\n\n return null;\n\n})([]);\n\nif (typeof console !== \"undefined\" && typeof console.warn !== \"undefined\") {\n printWarning = function (message) {\n console.warn(message);\n };\n if (util.isNode && process.stderr.isTTY) {\n printWarning = function(message, isSoft) {\n var color = isSoft ? \"\\u001b[33m\" : \"\\u001b[31m\";\n console.warn(color + message + \"\\u001b[0m\\n\");\n };\n } else if (!util.isNode && typeof (new Error().stack) === \"string\") {\n printWarning = function(message, isSoft) {\n console.warn(\"%c\" + message,\n isSoft ? \"color: darkorange\" : \"color: red\");\n };\n }\n}\n\nvar config = {\n warnings: warnings,\n longStackTraces: false,\n cancellation: false,\n monitoring: false\n};\n\nif (longStackTraces) Promise.longStackTraces();\n\nreturn {\n longStackTraces: function() {\n return config.longStackTraces;\n },\n warnings: function() {\n return config.warnings;\n },\n cancellation: function() {\n return config.cancellation;\n },\n monitoring: function() {\n return config.monitoring;\n },\n propagateFromFunction: function() {\n return propagateFromFunction;\n },\n boundValueFunction: function() {\n return boundValueFunction;\n },\n checkForgottenReturns: checkForgottenReturns,\n setBounds: setBounds,\n warn: warn,\n deprecated: deprecated,\n CapturedTrace: CapturedTrace,\n fireDomEvent: fireDomEvent,\n fireGlobalEvent: fireGlobalEvent\n};\n};\n","\"use strict\";\nmodule.exports = function(Promise) {\nfunction returner() {\n return this.value;\n}\nfunction thrower() {\n throw this.reason;\n}\n\nPromise.prototype[\"return\"] =\nPromise.prototype.thenReturn = function (value) {\n if (value instanceof Promise) value.suppressUnhandledRejections();\n return this._then(\n returner, undefined, undefined, {value: value}, undefined);\n};\n\nPromise.prototype[\"throw\"] =\nPromise.prototype.thenThrow = function (reason) {\n return this._then(\n thrower, undefined, undefined, {reason: reason}, undefined);\n};\n\nPromise.prototype.catchThrow = function (reason) {\n if (arguments.length <= 1) {\n return this._then(\n undefined, thrower, undefined, {reason: reason}, undefined);\n } else {\n var _reason = arguments[1];\n var handler = function() {throw _reason;};\n return this.caught(reason, handler);\n }\n};\n\nPromise.prototype.catchReturn = function (value) {\n if (arguments.length <= 1) {\n if (value instanceof Promise) value.suppressUnhandledRejections();\n return this._then(\n undefined, returner, undefined, {value: value}, undefined);\n } else {\n var _value = arguments[1];\n if (_value instanceof Promise) _value.suppressUnhandledRejections();\n var handler = function() {return _value;};\n return this.caught(value, handler);\n }\n};\n};\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL) {\nvar PromiseReduce = Promise.reduce;\nvar PromiseAll = Promise.all;\n\nfunction promiseAllThis() {\n return PromiseAll(this);\n}\n\nfunction PromiseMapSeries(promises, fn) {\n return PromiseReduce(promises, fn, INTERNAL, INTERNAL);\n}\n\nPromise.prototype.each = function (fn) {\n return PromiseReduce(this, fn, INTERNAL, 0)\n ._then(promiseAllThis, undefined, undefined, this, undefined);\n};\n\nPromise.prototype.mapSeries = function (fn) {\n return PromiseReduce(this, fn, INTERNAL, INTERNAL);\n};\n\nPromise.each = function (promises, fn) {\n return PromiseReduce(promises, fn, INTERNAL, 0)\n ._then(promiseAllThis, undefined, undefined, promises, undefined);\n};\n\nPromise.mapSeries = PromiseMapSeries;\n};\n\n","\"use strict\";\nvar es5 = require(\"./es5\");\nvar Objectfreeze = es5.freeze;\nvar util = require(\"./util\");\nvar inherits = util.inherits;\nvar notEnumerableProp = util.notEnumerableProp;\n\nfunction subError(nameProperty, defaultMessage) {\n function SubError(message) {\n if (!(this instanceof SubError)) return new SubError(message);\n notEnumerableProp(this, \"message\",\n typeof message === \"string\" ? message : defaultMessage);\n notEnumerableProp(this, \"name\", nameProperty);\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n } else {\n Error.call(this);\n }\n }\n inherits(SubError, Error);\n return SubError;\n}\n\nvar _TypeError, _RangeError;\nvar Warning = subError(\"Warning\", \"warning\");\nvar CancellationError = subError(\"CancellationError\", \"cancellation error\");\nvar TimeoutError = subError(\"TimeoutError\", \"timeout error\");\nvar AggregateError = subError(\"AggregateError\", \"aggregate error\");\ntry {\n _TypeError = TypeError;\n _RangeError = RangeError;\n} catch(e) {\n _TypeError = subError(\"TypeError\", \"type error\");\n _RangeError = subError(\"RangeError\", \"range error\");\n}\n\nvar methods = (\"join pop push shift unshift slice filter forEach some \" +\n \"every map indexOf lastIndexOf reduce reduceRight sort reverse\").split(\" \");\n\nfor (var i = 0; i < methods.length; ++i) {\n if (typeof Array.prototype[methods[i]] === \"function\") {\n AggregateError.prototype[methods[i]] = Array.prototype[methods[i]];\n }\n}\n\nes5.defineProperty(AggregateError.prototype, \"length\", {\n value: 0,\n configurable: false,\n writable: true,\n enumerable: true\n});\nAggregateError.prototype[\"isOperational\"] = true;\nvar level = 0;\nAggregateError.prototype.toString = function() {\n var indent = Array(level * 4 + 1).join(\" \");\n var ret = \"\\n\" + indent + \"AggregateError of:\" + \"\\n\";\n level++;\n indent = Array(level * 4 + 1).join(\" \");\n for (var i = 0; i < this.length; ++i) {\n var str = this[i] === this ? \"[Circular AggregateError]\" : this[i] + \"\";\n var lines = str.split(\"\\n\");\n for (var j = 0; j < lines.length; ++j) {\n lines[j] = indent + lines[j];\n }\n str = lines.join(\"\\n\");\n ret += str + \"\\n\";\n }\n level--;\n return ret;\n};\n\nfunction OperationalError(message) {\n if (!(this instanceof OperationalError))\n return new OperationalError(message);\n notEnumerableProp(this, \"name\", \"OperationalError\");\n notEnumerableProp(this, \"message\", message);\n this.cause = message;\n this[\"isOperational\"] = true;\n\n if (message instanceof Error) {\n notEnumerableProp(this, \"message\", message.message);\n notEnumerableProp(this, \"stack\", message.stack);\n } else if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n}\ninherits(OperationalError, Error);\n\nvar errorTypes = Error[\"__BluebirdErrorTypes__\"];\nif (!errorTypes) {\n errorTypes = Objectfreeze({\n CancellationError: CancellationError,\n TimeoutError: TimeoutError,\n OperationalError: OperationalError,\n RejectionError: OperationalError,\n AggregateError: AggregateError\n });\n es5.defineProperty(Error, \"__BluebirdErrorTypes__\", {\n value: errorTypes,\n writable: false,\n enumerable: false,\n configurable: false\n });\n}\n\nmodule.exports = {\n Error: Error,\n TypeError: _TypeError,\n RangeError: _RangeError,\n CancellationError: errorTypes.CancellationError,\n OperationalError: errorTypes.OperationalError,\n TimeoutError: errorTypes.TimeoutError,\n AggregateError: errorTypes.AggregateError,\n Warning: Warning\n};\n","var isES5 = (function(){\n \"use strict\";\n return this === undefined;\n})();\n\nif (isES5) {\n module.exports = {\n freeze: Object.freeze,\n defineProperty: Object.defineProperty,\n getDescriptor: Object.getOwnPropertyDescriptor,\n keys: Object.keys,\n names: Object.getOwnPropertyNames,\n getPrototypeOf: Object.getPrototypeOf,\n isArray: Array.isArray,\n isES5: isES5,\n propertyIsWritable: function(obj, prop) {\n var descriptor = Object.getOwnPropertyDescriptor(obj, prop);\n return !!(!descriptor || descriptor.writable || descriptor.set);\n }\n };\n} else {\n var has = {}.hasOwnProperty;\n var str = {}.toString;\n var proto = {}.constructor.prototype;\n\n var ObjectKeys = function (o) {\n var ret = [];\n for (var key in o) {\n if (has.call(o, key)) {\n ret.push(key);\n }\n }\n return ret;\n };\n\n var ObjectGetDescriptor = function(o, key) {\n return {value: o[key]};\n };\n\n var ObjectDefineProperty = function (o, key, desc) {\n o[key] = desc.value;\n return o;\n };\n\n var ObjectFreeze = function (obj) {\n return obj;\n };\n\n var ObjectGetPrototypeOf = function (obj) {\n try {\n return Object(obj).constructor.prototype;\n }\n catch (e) {\n return proto;\n }\n };\n\n var ArrayIsArray = function (obj) {\n try {\n return str.call(obj) === \"[object Array]\";\n }\n catch(e) {\n return false;\n }\n };\n\n module.exports = {\n isArray: ArrayIsArray,\n keys: ObjectKeys,\n names: ObjectKeys,\n defineProperty: ObjectDefineProperty,\n getDescriptor: ObjectGetDescriptor,\n freeze: ObjectFreeze,\n getPrototypeOf: ObjectGetPrototypeOf,\n isES5: isES5,\n propertyIsWritable: function() {\n return true;\n }\n };\n}\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL) {\nvar PromiseMap = Promise.map;\n\nPromise.prototype.filter = function (fn, options) {\n return PromiseMap(this, fn, options, INTERNAL);\n};\n\nPromise.filter = function (promises, fn, options) {\n return PromiseMap(promises, fn, options, INTERNAL);\n};\n};\n","\"use strict\";\nmodule.exports = function(Promise, tryConvertToPromise) {\nvar util = require(\"./util\");\nvar CancellationError = Promise.CancellationError;\nvar errorObj = util.errorObj;\n\nfunction PassThroughHandlerContext(promise, type, handler) {\n this.promise = promise;\n this.type = type;\n this.handler = handler;\n this.called = false;\n this.cancelPromise = null;\n}\n\nPassThroughHandlerContext.prototype.isFinallyHandler = function() {\n return this.type === 0;\n};\n\nfunction FinallyHandlerCancelReaction(finallyHandler) {\n this.finallyHandler = finallyHandler;\n}\n\nFinallyHandlerCancelReaction.prototype._resultCancelled = function() {\n checkCancel(this.finallyHandler);\n};\n\nfunction checkCancel(ctx, reason) {\n if (ctx.cancelPromise != null) {\n if (arguments.length > 1) {\n ctx.cancelPromise._reject(reason);\n } else {\n ctx.cancelPromise._cancel();\n }\n ctx.cancelPromise = null;\n return true;\n }\n return false;\n}\n\nfunction succeed() {\n return finallyHandler.call(this, this.promise._target()._settledValue());\n}\nfunction fail(reason) {\n if (checkCancel(this, reason)) return;\n errorObj.e = reason;\n return errorObj;\n}\nfunction finallyHandler(reasonOrValue) {\n var promise = this.promise;\n var handler = this.handler;\n\n if (!this.called) {\n this.called = true;\n var ret = this.isFinallyHandler()\n ? handler.call(promise._boundValue())\n : handler.call(promise._boundValue(), reasonOrValue);\n if (ret !== undefined) {\n promise._setReturnedNonUndefined();\n var maybePromise = tryConvertToPromise(ret, promise);\n if (maybePromise instanceof Promise) {\n if (this.cancelPromise != null) {\n if (maybePromise._isCancelled()) {\n var reason =\n new CancellationError(\"late cancellation observer\");\n promise._attachExtraTrace(reason);\n errorObj.e = reason;\n return errorObj;\n } else if (maybePromise.isPending()) {\n maybePromise._attachCancellationCallback(\n new FinallyHandlerCancelReaction(this));\n }\n }\n return maybePromise._then(\n succeed, fail, undefined, this, undefined);\n }\n }\n }\n\n if (promise.isRejected()) {\n checkCancel(this);\n errorObj.e = reasonOrValue;\n return errorObj;\n } else {\n checkCancel(this);\n return reasonOrValue;\n }\n}\n\nPromise.prototype._passThrough = function(handler, type, success, fail) {\n if (typeof handler !== \"function\") return this.then();\n return this._then(success,\n fail,\n undefined,\n new PassThroughHandlerContext(this, type, handler),\n undefined);\n};\n\nPromise.prototype.lastly =\nPromise.prototype[\"finally\"] = function (handler) {\n return this._passThrough(handler,\n 0,\n finallyHandler,\n finallyHandler);\n};\n\nPromise.prototype.tap = function (handler) {\n return this._passThrough(handler, 1, finallyHandler);\n};\n\nreturn PassThroughHandlerContext;\n};\n","\"use strict\";\nmodule.exports = function(Promise,\n apiRejection,\n INTERNAL,\n tryConvertToPromise,\n Proxyable,\n debug) {\nvar errors = require(\"./errors\");\nvar TypeError = errors.TypeError;\nvar util = require(\"./util\");\nvar errorObj = util.errorObj;\nvar tryCatch = util.tryCatch;\nvar yieldHandlers = [];\n\nfunction promiseFromYieldHandler(value, yieldHandlers, traceParent) {\n for (var i = 0; i < yieldHandlers.length; ++i) {\n traceParent._pushContext();\n var result = tryCatch(yieldHandlers[i])(value);\n traceParent._popContext();\n if (result === errorObj) {\n traceParent._pushContext();\n var ret = Promise.reject(errorObj.e);\n traceParent._popContext();\n return ret;\n }\n var maybePromise = tryConvertToPromise(result, traceParent);\n if (maybePromise instanceof Promise) return maybePromise;\n }\n return null;\n}\n\nfunction PromiseSpawn(generatorFunction, receiver, yieldHandler, stack) {\n if (debug.cancellation()) {\n var internal = new Promise(INTERNAL);\n var _finallyPromise = this._finallyPromise = new Promise(INTERNAL);\n this._promise = internal.lastly(function() {\n return _finallyPromise;\n });\n internal._captureStackTrace();\n internal._setOnCancel(this);\n } else {\n var promise = this._promise = new Promise(INTERNAL);\n promise._captureStackTrace();\n }\n this._stack = stack;\n this._generatorFunction = generatorFunction;\n this._receiver = receiver;\n this._generator = undefined;\n this._yieldHandlers = typeof yieldHandler === \"function\"\n ? [yieldHandler].concat(yieldHandlers)\n : yieldHandlers;\n this._yieldedPromise = null;\n this._cancellationPhase = false;\n}\nutil.inherits(PromiseSpawn, Proxyable);\n\nPromiseSpawn.prototype._isResolved = function() {\n return this._promise === null;\n};\n\nPromiseSpawn.prototype._cleanup = function() {\n this._promise = this._generator = null;\n if (debug.cancellation() && this._finallyPromise !== null) {\n this._finallyPromise._fulfill();\n this._finallyPromise = null;\n }\n};\n\nPromiseSpawn.prototype._promiseCancelled = function() {\n if (this._isResolved()) return;\n var implementsReturn = typeof this._generator[\"return\"] !== \"undefined\";\n\n var result;\n if (!implementsReturn) {\n var reason = new Promise.CancellationError(\n \"generator .return() sentinel\");\n Promise.coroutine.returnSentinel = reason;\n this._promise._attachExtraTrace(reason);\n this._promise._pushContext();\n result = tryCatch(this._generator[\"throw\"]).call(this._generator,\n reason);\n this._promise._popContext();\n } else {\n this._promise._pushContext();\n result = tryCatch(this._generator[\"return\"]).call(this._generator,\n undefined);\n this._promise._popContext();\n }\n this._cancellationPhase = true;\n this._yieldedPromise = null;\n this._continue(result);\n};\n\nPromiseSpawn.prototype._promiseFulfilled = function(value) {\n this._yieldedPromise = null;\n this._promise._pushContext();\n var result = tryCatch(this._generator.next).call(this._generator, value);\n this._promise._popContext();\n this._continue(result);\n};\n\nPromiseSpawn.prototype._promiseRejected = function(reason) {\n this._yieldedPromise = null;\n this._promise._attachExtraTrace(reason);\n this._promise._pushContext();\n var result = tryCatch(this._generator[\"throw\"])\n .call(this._generator, reason);\n this._promise._popContext();\n this._continue(result);\n};\n\nPromiseSpawn.prototype._resultCancelled = function() {\n if (this._yieldedPromise instanceof Promise) {\n var promise = this._yieldedPromise;\n this._yieldedPromise = null;\n promise.cancel();\n }\n};\n\nPromiseSpawn.prototype.promise = function () {\n return this._promise;\n};\n\nPromiseSpawn.prototype._run = function () {\n this._generator = this._generatorFunction.call(this._receiver);\n this._receiver =\n this._generatorFunction = undefined;\n this._promiseFulfilled(undefined);\n};\n\nPromiseSpawn.prototype._continue = function (result) {\n var promise = this._promise;\n if (result === errorObj) {\n this._cleanup();\n if (this._cancellationPhase) {\n return promise.cancel();\n } else {\n return promise._rejectCallback(result.e, false);\n }\n }\n\n var value = result.value;\n if (result.done === true) {\n this._cleanup();\n if (this._cancellationPhase) {\n return promise.cancel();\n } else {\n return promise._resolveCallback(value);\n }\n } else {\n var maybePromise = tryConvertToPromise(value, this._promise);\n if (!(maybePromise instanceof Promise)) {\n maybePromise =\n promiseFromYieldHandler(maybePromise,\n this._yieldHandlers,\n this._promise);\n if (maybePromise === null) {\n this._promiseRejected(\n new TypeError(\n \"A value %s was yielded that could not be treated as a promise\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\\u000a\".replace(\"%s\", value) +\n \"From coroutine:\\u000a\" +\n this._stack.split(\"\\n\").slice(1, -7).join(\"\\n\")\n )\n );\n return;\n }\n }\n maybePromise = maybePromise._target();\n var bitField = maybePromise._bitField;\n ;\n if (((bitField & 50397184) === 0)) {\n this._yieldedPromise = maybePromise;\n maybePromise._proxy(this, null);\n } else if (((bitField & 33554432) !== 0)) {\n Promise._async.invoke(\n this._promiseFulfilled, this, maybePromise._value()\n );\n } else if (((bitField & 16777216) !== 0)) {\n Promise._async.invoke(\n this._promiseRejected, this, maybePromise._reason()\n );\n } else {\n this._promiseCancelled();\n }\n }\n};\n\nPromise.coroutine = function (generatorFunction, options) {\n if (typeof generatorFunction !== \"function\") {\n throw new TypeError(\"generatorFunction must be a function\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n var yieldHandler = Object(options).yieldHandler;\n var PromiseSpawn$ = PromiseSpawn;\n var stack = new Error().stack;\n return function () {\n var generator = generatorFunction.apply(this, arguments);\n var spawn = new PromiseSpawn$(undefined, undefined, yieldHandler,\n stack);\n var ret = spawn.promise();\n spawn._generator = generator;\n spawn._promiseFulfilled(undefined);\n return ret;\n };\n};\n\nPromise.coroutine.addYieldHandler = function(fn) {\n if (typeof fn !== \"function\") {\n throw new TypeError(\"expecting a function but got \" + util.classString(fn));\n }\n yieldHandlers.push(fn);\n};\n\nPromise.spawn = function (generatorFunction) {\n debug.deprecated(\"Promise.spawn()\", \"Promise.coroutine()\");\n if (typeof generatorFunction !== \"function\") {\n return apiRejection(\"generatorFunction must be a function\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n var spawn = new PromiseSpawn(generatorFunction, this);\n var ret = spawn.promise();\n spawn._run(Promise.spawn);\n return ret;\n};\n};\n","\"use strict\";\nmodule.exports =\nfunction(Promise, PromiseArray, tryConvertToPromise, INTERNAL, async,\n getDomain) {\nvar util = require(\"./util\");\nvar canEvaluate = util.canEvaluate;\nvar tryCatch = util.tryCatch;\nvar errorObj = util.errorObj;\nvar reject;\n\nif (!false) {\nif (canEvaluate) {\n var thenCallback = function(i) {\n return new Function(\"value\", \"holder\", \" \\n\\\n 'use strict'; \\n\\\n holder.pIndex = value; \\n\\\n holder.checkFulfillment(this); \\n\\\n \".replace(/Index/g, i));\n };\n\n var promiseSetter = function(i) {\n return new Function(\"promise\", \"holder\", \" \\n\\\n 'use strict'; \\n\\\n holder.pIndex = promise; \\n\\\n \".replace(/Index/g, i));\n };\n\n var generateHolderClass = function(total) {\n var props = new Array(total);\n for (var i = 0; i < props.length; ++i) {\n props[i] = \"this.p\" + (i+1);\n }\n var assignment = props.join(\" = \") + \" = null;\";\n var cancellationCode= \"var promise;\\n\" + props.map(function(prop) {\n return \" \\n\\\n promise = \" + prop + \"; \\n\\\n if (promise instanceof Promise) { \\n\\\n promise.cancel(); \\n\\\n } \\n\\\n \";\n }).join(\"\\n\");\n var passedArguments = props.join(\", \");\n var name = \"Holder$\" + total;\n\n\n var code = \"return function(tryCatch, errorObj, Promise, async) { \\n\\\n 'use strict'; \\n\\\n function [TheName](fn) { \\n\\\n [TheProperties] \\n\\\n this.fn = fn; \\n\\\n this.asyncNeeded = true; \\n\\\n this.now = 0; \\n\\\n } \\n\\\n \\n\\\n [TheName].prototype._callFunction = function(promise) { \\n\\\n promise._pushContext(); \\n\\\n var ret = tryCatch(this.fn)([ThePassedArguments]); \\n\\\n promise._popContext(); \\n\\\n if (ret === errorObj) { \\n\\\n promise._rejectCallback(ret.e, false); \\n\\\n } else { \\n\\\n promise._resolveCallback(ret); \\n\\\n } \\n\\\n }; \\n\\\n \\n\\\n [TheName].prototype.checkFulfillment = function(promise) { \\n\\\n var now = ++this.now; \\n\\\n if (now === [TheTotal]) { \\n\\\n if (this.asyncNeeded) { \\n\\\n async.invoke(this._callFunction, this, promise); \\n\\\n } else { \\n\\\n this._callFunction(promise); \\n\\\n } \\n\\\n \\n\\\n } \\n\\\n }; \\n\\\n \\n\\\n [TheName].prototype._resultCancelled = function() { \\n\\\n [CancellationCode] \\n\\\n }; \\n\\\n \\n\\\n return [TheName]; \\n\\\n }(tryCatch, errorObj, Promise, async); \\n\\\n \";\n\n code = code.replace(/\\[TheName\\]/g, name)\n .replace(/\\[TheTotal\\]/g, total)\n .replace(/\\[ThePassedArguments\\]/g, passedArguments)\n .replace(/\\[TheProperties\\]/g, assignment)\n .replace(/\\[CancellationCode\\]/g, cancellationCode);\n\n return new Function(\"tryCatch\", \"errorObj\", \"Promise\", \"async\", code)\n (tryCatch, errorObj, Promise, async);\n };\n\n var holderClasses = [];\n var thenCallbacks = [];\n var promiseSetters = [];\n\n for (var i = 0; i < 8; ++i) {\n holderClasses.push(generateHolderClass(i + 1));\n thenCallbacks.push(thenCallback(i + 1));\n promiseSetters.push(promiseSetter(i + 1));\n }\n\n reject = function (reason) {\n this._reject(reason);\n };\n}}\n\nPromise.join = function () {\n var last = arguments.length - 1;\n var fn;\n if (last > 0 && typeof arguments[last] === \"function\") {\n fn = arguments[last];\n if (!false) {\n if (last <= 8 && canEvaluate) {\n var ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n var HolderClass = holderClasses[last - 1];\n var holder = new HolderClass(fn);\n var callbacks = thenCallbacks;\n\n for (var i = 0; i < last; ++i) {\n var maybePromise = tryConvertToPromise(arguments[i], ret);\n if (maybePromise instanceof Promise) {\n maybePromise = maybePromise._target();\n var bitField = maybePromise._bitField;\n ;\n if (((bitField & 50397184) === 0)) {\n maybePromise._then(callbacks[i], reject,\n undefined, ret, holder);\n promiseSetters[i](maybePromise, holder);\n holder.asyncNeeded = false;\n } else if (((bitField & 33554432) !== 0)) {\n callbacks[i].call(ret,\n maybePromise._value(), holder);\n } else if (((bitField & 16777216) !== 0)) {\n ret._reject(maybePromise._reason());\n } else {\n ret._cancel();\n }\n } else {\n callbacks[i].call(ret, maybePromise, holder);\n }\n }\n\n if (!ret._isFateSealed()) {\n if (holder.asyncNeeded) {\n var domain = getDomain();\n if (domain !== null) {\n holder.fn = util.domainBind(domain, holder.fn);\n }\n }\n ret._setAsyncGuaranteed();\n ret._setOnCancel(holder);\n }\n return ret;\n }\n }\n }\n var $_len = arguments.length;var args = new Array($_len); for(var $_i = 0; $_i < $_len; ++$_i) {args[$_i] = arguments[$_i];};\n if (fn) args.pop();\n var ret = new PromiseArray(args).promise();\n return fn !== undefined ? ret.spread(fn) : ret;\n};\n\n};\n","\"use strict\";\nmodule.exports = function(Promise,\n PromiseArray,\n apiRejection,\n tryConvertToPromise,\n INTERNAL,\n debug) {\nvar getDomain = Promise._getDomain;\nvar util = require(\"./util\");\nvar tryCatch = util.tryCatch;\nvar errorObj = util.errorObj;\nvar async = Promise._async;\n\nfunction MappingPromiseArray(promises, fn, limit, _filter) {\n this.constructor$(promises);\n this._promise._captureStackTrace();\n var domain = getDomain();\n this._callback = domain === null ? fn : util.domainBind(domain, fn);\n this._preservedValues = _filter === INTERNAL\n ? new Array(this.length())\n : null;\n this._limit = limit;\n this._inFlight = 0;\n this._queue = [];\n async.invoke(this._asyncInit, this, undefined);\n}\nutil.inherits(MappingPromiseArray, PromiseArray);\n\nMappingPromiseArray.prototype._asyncInit = function() {\n this._init$(undefined, -2);\n};\n\nMappingPromiseArray.prototype._init = function () {};\n\nMappingPromiseArray.prototype._promiseFulfilled = function (value, index) {\n var values = this._values;\n var length = this.length();\n var preservedValues = this._preservedValues;\n var limit = this._limit;\n\n if (index < 0) {\n index = (index * -1) - 1;\n values[index] = value;\n if (limit >= 1) {\n this._inFlight--;\n this._drainQueue();\n if (this._isResolved()) return true;\n }\n } else {\n if (limit >= 1 && this._inFlight >= limit) {\n values[index] = value;\n this._queue.push(index);\n return false;\n }\n if (preservedValues !== null) preservedValues[index] = value;\n\n var promise = this._promise;\n var callback = this._callback;\n var receiver = promise._boundValue();\n promise._pushContext();\n var ret = tryCatch(callback).call(receiver, value, index, length);\n var promiseCreated = promise._popContext();\n debug.checkForgottenReturns(\n ret,\n promiseCreated,\n preservedValues !== null ? \"Promise.filter\" : \"Promise.map\",\n promise\n );\n if (ret === errorObj) {\n this._reject(ret.e);\n return true;\n }\n\n var maybePromise = tryConvertToPromise(ret, this._promise);\n if (maybePromise instanceof Promise) {\n maybePromise = maybePromise._target();\n var bitField = maybePromise._bitField;\n ;\n if (((bitField & 50397184) === 0)) {\n if (limit >= 1) this._inFlight++;\n values[index] = maybePromise;\n maybePromise._proxy(this, (index + 1) * -1);\n return false;\n } else if (((bitField & 33554432) !== 0)) {\n ret = maybePromise._value();\n } else if (((bitField & 16777216) !== 0)) {\n this._reject(maybePromise._reason());\n return true;\n } else {\n this._cancel();\n return true;\n }\n }\n values[index] = ret;\n }\n var totalResolved = ++this._totalResolved;\n if (totalResolved >= length) {\n if (preservedValues !== null) {\n this._filter(values, preservedValues);\n } else {\n this._resolve(values);\n }\n return true;\n }\n return false;\n};\n\nMappingPromiseArray.prototype._drainQueue = function () {\n var queue = this._queue;\n var limit = this._limit;\n var values = this._values;\n while (queue.length > 0 && this._inFlight < limit) {\n if (this._isResolved()) return;\n var index = queue.pop();\n this._promiseFulfilled(values[index], index);\n }\n};\n\nMappingPromiseArray.prototype._filter = function (booleans, values) {\n var len = values.length;\n var ret = new Array(len);\n var j = 0;\n for (var i = 0; i < len; ++i) {\n if (booleans[i]) ret[j++] = values[i];\n }\n ret.length = j;\n this._resolve(ret);\n};\n\nMappingPromiseArray.prototype.preservedValues = function () {\n return this._preservedValues;\n};\n\nfunction map(promises, fn, options, _filter) {\n if (typeof fn !== \"function\") {\n return apiRejection(\"expecting a function but got \" + util.classString(fn));\n }\n\n var limit = 0;\n if (options !== undefined) {\n if (typeof options === \"object\" && options !== null) {\n if (typeof options.concurrency !== \"number\") {\n return Promise.reject(\n new TypeError(\"'concurrency' must be a number but it is \" +\n util.classString(options.concurrency)));\n }\n limit = options.concurrency;\n } else {\n return Promise.reject(new TypeError(\n \"options argument must be an object but it is \" +\n util.classString(options)));\n }\n }\n limit = typeof limit === \"number\" &&\n isFinite(limit) && limit >= 1 ? limit : 0;\n return new MappingPromiseArray(promises, fn, limit, _filter).promise();\n}\n\nPromise.prototype.map = function (fn, options) {\n return map(this, fn, options, null);\n};\n\nPromise.map = function (promises, fn, options, _filter) {\n return map(promises, fn, options, _filter);\n};\n\n\n};\n","\"use strict\";\nmodule.exports =\nfunction(Promise, INTERNAL, tryConvertToPromise, apiRejection, debug) {\nvar util = require(\"./util\");\nvar tryCatch = util.tryCatch;\n\nPromise.method = function (fn) {\n if (typeof fn !== \"function\") {\n throw new Promise.TypeError(\"expecting a function but got \" + util.classString(fn));\n }\n return function () {\n var ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n ret._pushContext();\n var value = tryCatch(fn).apply(this, arguments);\n var promiseCreated = ret._popContext();\n debug.checkForgottenReturns(\n value, promiseCreated, \"Promise.method\", ret);\n ret._resolveFromSyncValue(value);\n return ret;\n };\n};\n\nPromise.attempt = Promise[\"try\"] = function (fn) {\n if (typeof fn !== \"function\") {\n return apiRejection(\"expecting a function but got \" + util.classString(fn));\n }\n var ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n ret._pushContext();\n var value;\n if (arguments.length > 1) {\n debug.deprecated(\"calling Promise.try with more than 1 argument\");\n var arg = arguments[1];\n var ctx = arguments[2];\n value = util.isArray(arg) ? tryCatch(fn).apply(ctx, arg)\n : tryCatch(fn).call(ctx, arg);\n } else {\n value = tryCatch(fn)();\n }\n var promiseCreated = ret._popContext();\n debug.checkForgottenReturns(\n value, promiseCreated, \"Promise.try\", ret);\n ret._resolveFromSyncValue(value);\n return ret;\n};\n\nPromise.prototype._resolveFromSyncValue = function (value) {\n if (value === util.errorObj) {\n this._rejectCallback(value.e, false);\n } else {\n this._resolveCallback(value, true);\n }\n};\n};\n","\"use strict\";\nvar util = require(\"./util\");\nvar maybeWrapAsError = util.maybeWrapAsError;\nvar errors = require(\"./errors\");\nvar OperationalError = errors.OperationalError;\nvar es5 = require(\"./es5\");\n\nfunction isUntypedError(obj) {\n return obj instanceof Error &&\n es5.getPrototypeOf(obj) === Error.prototype;\n}\n\nvar rErrorKey = /^(?:name|message|stack|cause)$/;\nfunction wrapAsOperationalError(obj) {\n var ret;\n if (isUntypedError(obj)) {\n ret = new OperationalError(obj);\n ret.name = obj.name;\n ret.message = obj.message;\n ret.stack = obj.stack;\n var keys = es5.keys(obj);\n for (var i = 0; i < keys.length; ++i) {\n var key = keys[i];\n if (!rErrorKey.test(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n }\n util.markAsOriginatingFromRejection(obj);\n return obj;\n}\n\nfunction nodebackForPromise(promise, multiArgs) {\n return function(err, value) {\n if (promise === null) return;\n if (err) {\n var wrapped = wrapAsOperationalError(maybeWrapAsError(err));\n promise._attachExtraTrace(wrapped);\n promise._reject(wrapped);\n } else if (!multiArgs) {\n promise._fulfill(value);\n } else {\n var $_len = arguments.length;var args = new Array(Math.max($_len - 1, 0)); for(var $_i = 1; $_i < $_len; ++$_i) {args[$_i - 1] = arguments[$_i];};\n promise._fulfill(args);\n }\n promise = null;\n };\n}\n\nmodule.exports = nodebackForPromise;\n","\"use strict\";\nmodule.exports = function(Promise) {\nvar util = require(\"./util\");\nvar async = Promise._async;\nvar tryCatch = util.tryCatch;\nvar errorObj = util.errorObj;\n\nfunction spreadAdapter(val, nodeback) {\n var promise = this;\n if (!util.isArray(val)) return successAdapter.call(promise, val, nodeback);\n var ret =\n tryCatch(nodeback).apply(promise._boundValue(), [null].concat(val));\n if (ret === errorObj) {\n async.throwLater(ret.e);\n }\n}\n\nfunction successAdapter(val, nodeback) {\n var promise = this;\n var receiver = promise._boundValue();\n var ret = val === undefined\n ? tryCatch(nodeback).call(receiver, null)\n : tryCatch(nodeback).call(receiver, null, val);\n if (ret === errorObj) {\n async.throwLater(ret.e);\n }\n}\nfunction errorAdapter(reason, nodeback) {\n var promise = this;\n if (!reason) {\n var newReason = new Error(reason + \"\");\n newReason.cause = reason;\n reason = newReason;\n }\n var ret = tryCatch(nodeback).call(promise._boundValue(), reason);\n if (ret === errorObj) {\n async.throwLater(ret.e);\n }\n}\n\nPromise.prototype.asCallback = Promise.prototype.nodeify = function (nodeback,\n options) {\n if (typeof nodeback == \"function\") {\n var adapter = successAdapter;\n if (options !== undefined && Object(options).spread) {\n adapter = spreadAdapter;\n }\n this._then(\n adapter,\n errorAdapter,\n undefined,\n this,\n nodeback\n );\n }\n return this;\n};\n};\n","\"use strict\";\nmodule.exports = function() {\nvar makeSelfResolutionError = function () {\n return new TypeError(\"circular promise resolution chain\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n};\nvar reflectHandler = function() {\n return new Promise.PromiseInspection(this._target());\n};\nvar apiRejection = function(msg) {\n return Promise.reject(new TypeError(msg));\n};\nfunction Proxyable() {}\nvar UNDEFINED_BINDING = {};\nvar util = require(\"./util\");\n\nvar getDomain;\nif (util.isNode) {\n getDomain = function() {\n var ret = process.domain;\n if (ret === undefined) ret = null;\n return ret;\n };\n} else {\n getDomain = function() {\n return null;\n };\n}\nutil.notEnumerableProp(Promise, \"_getDomain\", getDomain);\n\nvar es5 = require(\"./es5\");\nvar Async = require(\"./async\");\nvar async = new Async();\nes5.defineProperty(Promise, \"_async\", {value: async});\nvar errors = require(\"./errors\");\nvar TypeError = Promise.TypeError = errors.TypeError;\nPromise.RangeError = errors.RangeError;\nvar CancellationError = Promise.CancellationError = errors.CancellationError;\nPromise.TimeoutError = errors.TimeoutError;\nPromise.OperationalError = errors.OperationalError;\nPromise.RejectionError = errors.OperationalError;\nPromise.AggregateError = errors.AggregateError;\nvar INTERNAL = function(){};\nvar APPLY = {};\nvar NEXT_FILTER = {};\nvar tryConvertToPromise = require(\"./thenables\")(Promise, INTERNAL);\nvar PromiseArray =\n require(\"./promise_array\")(Promise, INTERNAL,\n tryConvertToPromise, apiRejection, Proxyable);\nvar Context = require(\"./context\")(Promise);\n /*jshint unused:false*/\nvar createContext = Context.create;\nvar debug = require(\"./debuggability\")(Promise, Context);\nvar CapturedTrace = debug.CapturedTrace;\nvar PassThroughHandlerContext =\n require(\"./finally\")(Promise, tryConvertToPromise);\nvar catchFilter = require(\"./catch_filter\")(NEXT_FILTER);\nvar nodebackForPromise = require(\"./nodeback\");\nvar errorObj = util.errorObj;\nvar tryCatch = util.tryCatch;\nfunction check(self, executor) {\n if (typeof executor !== \"function\") {\n throw new TypeError(\"expecting a function but got \" + util.classString(executor));\n }\n if (self.constructor !== Promise) {\n throw new TypeError(\"the promise constructor cannot be invoked directly\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n}\n\nfunction Promise(executor) {\n this._bitField = 0;\n this._fulfillmentHandler0 = undefined;\n this._rejectionHandler0 = undefined;\n this._promise0 = undefined;\n this._receiver0 = undefined;\n if (executor !== INTERNAL) {\n check(this, executor);\n this._resolveFromExecutor(executor);\n }\n this._promiseCreated();\n this._fireEvent(\"promiseCreated\", this);\n}\n\nPromise.prototype.toString = function () {\n return \"[object Promise]\";\n};\n\nPromise.prototype.caught = Promise.prototype[\"catch\"] = function (fn) {\n var len = arguments.length;\n if (len > 1) {\n var catchInstances = new Array(len - 1),\n j = 0, i;\n for (i = 0; i < len - 1; ++i) {\n var item = arguments[i];\n if (util.isObject(item)) {\n catchInstances[j++] = item;\n } else {\n return apiRejection(\"expecting an object but got \" +\n \"A catch statement predicate \" + util.classString(item));\n }\n }\n catchInstances.length = j;\n fn = arguments[i];\n return this.then(undefined, catchFilter(catchInstances, fn, this));\n }\n return this.then(undefined, fn);\n};\n\nPromise.prototype.reflect = function () {\n return this._then(reflectHandler,\n reflectHandler, undefined, this, undefined);\n};\n\nPromise.prototype.then = function (didFulfill, didReject) {\n if (debug.warnings() && arguments.length > 0 &&\n typeof didFulfill !== \"function\" &&\n typeof didReject !== \"function\") {\n var msg = \".then() only accepts functions but was passed: \" +\n util.classString(didFulfill);\n if (arguments.length > 1) {\n msg += \", \" + util.classString(didReject);\n }\n this._warn(msg);\n }\n return this._then(didFulfill, didReject, undefined, undefined, undefined);\n};\n\nPromise.prototype.done = function (didFulfill, didReject) {\n var promise =\n this._then(didFulfill, didReject, undefined, undefined, undefined);\n promise._setIsFinal();\n};\n\nPromise.prototype.spread = function (fn) {\n if (typeof fn !== \"function\") {\n return apiRejection(\"expecting a function but got \" + util.classString(fn));\n }\n return this.all()._then(fn, undefined, undefined, APPLY, undefined);\n};\n\nPromise.prototype.toJSON = function () {\n var ret = {\n isFulfilled: false,\n isRejected: false,\n fulfillmentValue: undefined,\n rejectionReason: undefined\n };\n if (this.isFulfilled()) {\n ret.fulfillmentValue = this.value();\n ret.isFulfilled = true;\n } else if (this.isRejected()) {\n ret.rejectionReason = this.reason();\n ret.isRejected = true;\n }\n return ret;\n};\n\nPromise.prototype.all = function () {\n if (arguments.length > 0) {\n this._warn(\".all() was passed arguments but it does not take any\");\n }\n return new PromiseArray(this).promise();\n};\n\nPromise.prototype.error = function (fn) {\n return this.caught(util.originatesFromRejection, fn);\n};\n\nPromise.getNewLibraryCopy = module.exports;\n\nPromise.is = function (val) {\n return val instanceof Promise;\n};\n\nPromise.fromNode = Promise.fromCallback = function(fn) {\n var ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n var multiArgs = arguments.length > 1 ? !!Object(arguments[1]).multiArgs\n : false;\n var result = tryCatch(fn)(nodebackForPromise(ret, multiArgs));\n if (result === errorObj) {\n ret._rejectCallback(result.e, true);\n }\n if (!ret._isFateSealed()) ret._setAsyncGuaranteed();\n return ret;\n};\n\nPromise.all = function (promises) {\n return new PromiseArray(promises).promise();\n};\n\nPromise.cast = function (obj) {\n var ret = tryConvertToPromise(obj);\n if (!(ret instanceof Promise)) {\n ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n ret._setFulfilled();\n ret._rejectionHandler0 = obj;\n }\n return ret;\n};\n\nPromise.resolve = Promise.fulfilled = Promise.cast;\n\nPromise.reject = Promise.rejected = function (reason) {\n var ret = new Promise(INTERNAL);\n ret._captureStackTrace();\n ret._rejectCallback(reason, true);\n return ret;\n};\n\nPromise.setScheduler = function(fn) {\n if (typeof fn !== \"function\") {\n throw new TypeError(\"expecting a function but got \" + util.classString(fn));\n }\n return async.setScheduler(fn);\n};\n\nPromise.prototype._then = function (\n didFulfill,\n didReject,\n _, receiver,\n internalData\n) {\n var haveInternalData = internalData !== undefined;\n var promise = haveInternalData ? internalData : new Promise(INTERNAL);\n var target = this._target();\n var bitField = target._bitField;\n\n if (!haveInternalData) {\n promise._propagateFrom(this, 3);\n promise._captureStackTrace();\n if (receiver === undefined &&\n ((this._bitField & 2097152) !== 0)) {\n if (!((bitField & 50397184) === 0)) {\n receiver = this._boundValue();\n } else {\n receiver = target === this ? undefined : this._boundTo;\n }\n }\n this._fireEvent(\"promiseChained\", this, promise);\n }\n\n var domain = getDomain();\n if (!((bitField & 50397184) === 0)) {\n var handler, value, settler = target._settlePromiseCtx;\n if (((bitField & 33554432) !== 0)) {\n value = target._rejectionHandler0;\n handler = didFulfill;\n } else if (((bitField & 16777216) !== 0)) {\n value = target._fulfillmentHandler0;\n handler = didReject;\n target._unsetRejectionIsUnhandled();\n } else {\n settler = target._settlePromiseLateCancellationObserver;\n value = new CancellationError(\"late cancellation observer\");\n target._attachExtraTrace(value);\n handler = didReject;\n }\n\n async.invoke(settler, target, {\n handler: domain === null ? handler\n : (typeof handler === \"function\" &&\n util.domainBind(domain, handler)),\n promise: promise,\n receiver: receiver,\n value: value\n });\n } else {\n target._addCallbacks(didFulfill, didReject, promise, receiver, domain);\n }\n\n return promise;\n};\n\nPromise.prototype._length = function () {\n return this._bitField & 65535;\n};\n\nPromise.prototype._isFateSealed = function () {\n return (this._bitField & 117506048) !== 0;\n};\n\nPromise.prototype._isFollowing = function () {\n return (this._bitField & 67108864) === 67108864;\n};\n\nPromise.prototype._setLength = function (len) {\n this._bitField = (this._bitField & -65536) |\n (len & 65535);\n};\n\nPromise.prototype._setFulfilled = function () {\n this._bitField = this._bitField | 33554432;\n this._fireEvent(\"promiseFulfilled\", this);\n};\n\nPromise.prototype._setRejected = function () {\n this._bitField = this._bitField | 16777216;\n this._fireEvent(\"promiseRejected\", this);\n};\n\nPromise.prototype._setFollowing = function () {\n this._bitField = this._bitField | 67108864;\n this._fireEvent(\"promiseResolved\", this);\n};\n\nPromise.prototype._setIsFinal = function () {\n this._bitField = this._bitField | 4194304;\n};\n\nPromise.prototype._isFinal = function () {\n return (this._bitField & 4194304) > 0;\n};\n\nPromise.prototype._unsetCancelled = function() {\n this._bitField = this._bitField & (~65536);\n};\n\nPromise.prototype._setCancelled = function() {\n this._bitField = this._bitField | 65536;\n this._fireEvent(\"promiseCancelled\", this);\n};\n\nPromise.prototype._setWillBeCancelled = function() {\n this._bitField = this._bitField | 8388608;\n};\n\nPromise.prototype._setAsyncGuaranteed = function() {\n if (async.hasCustomScheduler()) return;\n this._bitField = this._bitField | 134217728;\n};\n\nPromise.prototype._receiverAt = function (index) {\n var ret = index === 0 ? this._receiver0 : this[\n index * 4 - 4 + 3];\n if (ret === UNDEFINED_BINDING) {\n return undefined;\n } else if (ret === undefined && this._isBound()) {\n return this._boundValue();\n }\n return ret;\n};\n\nPromise.prototype._promiseAt = function (index) {\n return this[\n index * 4 - 4 + 2];\n};\n\nPromise.prototype._fulfillmentHandlerAt = function (index) {\n return this[\n index * 4 - 4 + 0];\n};\n\nPromise.prototype._rejectionHandlerAt = function (index) {\n return this[\n index * 4 - 4 + 1];\n};\n\nPromise.prototype._boundValue = function() {};\n\nPromise.prototype._migrateCallback0 = function (follower) {\n var bitField = follower._bitField;\n var fulfill = follower._fulfillmentHandler0;\n var reject = follower._rejectionHandler0;\n var promise = follower._promise0;\n var receiver = follower._receiverAt(0);\n if (receiver === undefined) receiver = UNDEFINED_BINDING;\n this._addCallbacks(fulfill, reject, promise, receiver, null);\n};\n\nPromise.prototype._migrateCallbackAt = function (follower, index) {\n var fulfill = follower._fulfillmentHandlerAt(index);\n var reject = follower._rejectionHandlerAt(index);\n var promise = follower._promiseAt(index);\n var receiver = follower._receiverAt(index);\n if (receiver === undefined) receiver = UNDEFINED_BINDING;\n this._addCallbacks(fulfill, reject, promise, receiver, null);\n};\n\nPromise.prototype._addCallbacks = function (\n fulfill,\n reject,\n promise,\n receiver,\n domain\n) {\n var index = this._length();\n\n if (index >= 65535 - 4) {\n index = 0;\n this._setLength(0);\n }\n\n if (index === 0) {\n this._promise0 = promise;\n this._receiver0 = receiver;\n if (typeof fulfill === \"function\") {\n this._fulfillmentHandler0 =\n domain === null ? fulfill : util.domainBind(domain, fulfill);\n }\n if (typeof reject === \"function\") {\n this._rejectionHandler0 =\n domain === null ? reject : util.domainBind(domain, reject);\n }\n } else {\n var base = index * 4 - 4;\n this[base + 2] = promise;\n this[base + 3] = receiver;\n if (typeof fulfill === \"function\") {\n this[base + 0] =\n domain === null ? fulfill : util.domainBind(domain, fulfill);\n }\n if (typeof reject === \"function\") {\n this[base + 1] =\n domain === null ? reject : util.domainBind(domain, reject);\n }\n }\n this._setLength(index + 1);\n return index;\n};\n\nPromise.prototype._proxy = function (proxyable, arg) {\n this._addCallbacks(undefined, undefined, arg, proxyable, null);\n};\n\nPromise.prototype._resolveCallback = function(value, shouldBind) {\n if (((this._bitField & 117506048) !== 0)) return;\n if (value === this)\n return this._rejectCallback(makeSelfResolutionError(), false);\n var maybePromise = tryConvertToPromise(value, this);\n if (!(maybePromise instanceof Promise)) return this._fulfill(value);\n\n if (shouldBind) this._propagateFrom(maybePromise, 2);\n\n var promise = maybePromise._target();\n\n if (promise === this) {\n this._reject(makeSelfResolutionError());\n return;\n }\n\n var bitField = promise._bitField;\n if (((bitField & 50397184) === 0)) {\n var len = this._length();\n if (len > 0) promise._migrateCallback0(this);\n for (var i = 1; i < len; ++i) {\n promise._migrateCallbackAt(this, i);\n }\n this._setFollowing();\n this._setLength(0);\n this._setFollowee(promise);\n } else if (((bitField & 33554432) !== 0)) {\n this._fulfill(promise._value());\n } else if (((bitField & 16777216) !== 0)) {\n this._reject(promise._reason());\n } else {\n var reason = new CancellationError(\"late cancellation observer\");\n promise._attachExtraTrace(reason);\n this._reject(reason);\n }\n};\n\nPromise.prototype._rejectCallback =\nfunction(reason, synchronous, ignoreNonErrorWarnings) {\n var trace = util.ensureErrorObject(reason);\n var hasStack = trace === reason;\n if (!hasStack && !ignoreNonErrorWarnings && debug.warnings()) {\n var message = \"a promise was rejected with a non-error: \" +\n util.classString(reason);\n this._warn(message, true);\n }\n this._attachExtraTrace(trace, synchronous ? hasStack : false);\n this._reject(reason);\n};\n\nPromise.prototype._resolveFromExecutor = function (executor) {\n var promise = this;\n this._captureStackTrace();\n this._pushContext();\n var synchronous = true;\n var r = this._execute(executor, function(value) {\n promise._resolveCallback(value);\n }, function (reason) {\n promise._rejectCallback(reason, synchronous);\n });\n synchronous = false;\n this._popContext();\n\n if (r !== undefined) {\n promise._rejectCallback(r, true);\n }\n};\n\nPromise.prototype._settlePromiseFromHandler = function (\n handler, receiver, value, promise\n) {\n var bitField = promise._bitField;\n if (((bitField & 65536) !== 0)) return;\n promise._pushContext();\n var x;\n if (receiver === APPLY) {\n if (!value || typeof value.length !== \"number\") {\n x = errorObj;\n x.e = new TypeError(\"cannot .spread() a non-array: \" +\n util.classString(value));\n } else {\n x = tryCatch(handler).apply(this._boundValue(), value);\n }\n } else {\n x = tryCatch(handler).call(receiver, value);\n }\n var promiseCreated = promise._popContext();\n bitField = promise._bitField;\n if (((bitField & 65536) !== 0)) return;\n\n if (x === NEXT_FILTER) {\n promise._reject(value);\n } else if (x === errorObj) {\n promise._rejectCallback(x.e, false);\n } else {\n debug.checkForgottenReturns(x, promiseCreated, \"\", promise, this);\n promise._resolveCallback(x);\n }\n};\n\nPromise.prototype._target = function() {\n var ret = this;\n while (ret._isFollowing()) ret = ret._followee();\n return ret;\n};\n\nPromise.prototype._followee = function() {\n return this._rejectionHandler0;\n};\n\nPromise.prototype._setFollowee = function(promise) {\n this._rejectionHandler0 = promise;\n};\n\nPromise.prototype._settlePromise = function(promise, handler, receiver, value) {\n var isPromise = promise instanceof Promise;\n var bitField = this._bitField;\n var asyncGuaranteed = ((bitField & 134217728) !== 0);\n if (((bitField & 65536) !== 0)) {\n if (isPromise) promise._invokeInternalOnCancel();\n\n if (receiver instanceof PassThroughHandlerContext &&\n receiver.isFinallyHandler()) {\n receiver.cancelPromise = promise;\n if (tryCatch(handler).call(receiver, value) === errorObj) {\n promise._reject(errorObj.e);\n }\n } else if (handler === reflectHandler) {\n promise._fulfill(reflectHandler.call(receiver));\n } else if (receiver instanceof Proxyable) {\n receiver._promiseCancelled(promise);\n } else if (isPromise || promise instanceof PromiseArray) {\n promise._cancel();\n } else {\n receiver.cancel();\n }\n } else if (typeof handler === \"function\") {\n if (!isPromise) {\n handler.call(receiver, value, promise);\n } else {\n if (asyncGuaranteed) promise._setAsyncGuaranteed();\n this._settlePromiseFromHandler(handler, receiver, value, promise);\n }\n } else if (receiver instanceof Proxyable) {\n if (!receiver._isResolved()) {\n if (((bitField & 33554432) !== 0)) {\n receiver._promiseFulfilled(value, promise);\n } else {\n receiver._promiseRejected(value, promise);\n }\n }\n } else if (isPromise) {\n if (asyncGuaranteed) promise._setAsyncGuaranteed();\n if (((bitField & 33554432) !== 0)) {\n promise._fulfill(value);\n } else {\n promise._reject(value);\n }\n }\n};\n\nPromise.prototype._settlePromiseLateCancellationObserver = function(ctx) {\n var handler = ctx.handler;\n var promise = ctx.promise;\n var receiver = ctx.receiver;\n var value = ctx.value;\n if (typeof handler === \"function\") {\n if (!(promise instanceof Promise)) {\n handler.call(receiver, value, promise);\n } else {\n this._settlePromiseFromHandler(handler, receiver, value, promise);\n }\n } else if (promise instanceof Promise) {\n promise._reject(value);\n }\n};\n\nPromise.prototype._settlePromiseCtx = function(ctx) {\n this._settlePromise(ctx.promise, ctx.handler, ctx.receiver, ctx.value);\n};\n\nPromise.prototype._settlePromise0 = function(handler, value, bitField) {\n var promise = this._promise0;\n var receiver = this._receiverAt(0);\n this._promise0 = undefined;\n this._receiver0 = undefined;\n this._settlePromise(promise, handler, receiver, value);\n};\n\nPromise.prototype._clearCallbackDataAtIndex = function(index) {\n var base = index * 4 - 4;\n this[base + 2] =\n this[base + 3] =\n this[base + 0] =\n this[base + 1] = undefined;\n};\n\nPromise.prototype._fulfill = function (value) {\n var bitField = this._bitField;\n if (((bitField & 117506048) >>> 16)) return;\n if (value === this) {\n var err = makeSelfResolutionError();\n this._attachExtraTrace(err);\n return this._reject(err);\n }\n this._setFulfilled();\n this._rejectionHandler0 = value;\n\n if ((bitField & 65535) > 0) {\n if (((bitField & 134217728) !== 0)) {\n this._settlePromises();\n } else {\n async.settlePromises(this);\n }\n }\n};\n\nPromise.prototype._reject = function (reason) {\n var bitField = this._bitField;\n if (((bitField & 117506048) >>> 16)) return;\n this._setRejected();\n this._fulfillmentHandler0 = reason;\n\n if (this._isFinal()) {\n return async.fatalError(reason, util.isNode);\n }\n\n if ((bitField & 65535) > 0) {\n async.settlePromises(this);\n } else {\n this._ensurePossibleRejectionHandled();\n }\n};\n\nPromise.prototype._fulfillPromises = function (len, value) {\n for (var i = 1; i < len; i++) {\n var handler = this._fulfillmentHandlerAt(i);\n var promise = this._promiseAt(i);\n var receiver = this._receiverAt(i);\n this._clearCallbackDataAtIndex(i);\n this._settlePromise(promise, handler, receiver, value);\n }\n};\n\nPromise.prototype._rejectPromises = function (len, reason) {\n for (var i = 1; i < len; i++) {\n var handler = this._rejectionHandlerAt(i);\n var promise = this._promiseAt(i);\n var receiver = this._receiverAt(i);\n this._clearCallbackDataAtIndex(i);\n this._settlePromise(promise, handler, receiver, reason);\n }\n};\n\nPromise.prototype._settlePromises = function () {\n var bitField = this._bitField;\n var len = (bitField & 65535);\n\n if (len > 0) {\n if (((bitField & 16842752) !== 0)) {\n var reason = this._fulfillmentHandler0;\n this._settlePromise0(this._rejectionHandler0, reason, bitField);\n this._rejectPromises(len, reason);\n } else {\n var value = this._rejectionHandler0;\n this._settlePromise0(this._fulfillmentHandler0, value, bitField);\n this._fulfillPromises(len, value);\n }\n this._setLength(0);\n }\n this._clearCancellationData();\n};\n\nPromise.prototype._settledValue = function() {\n var bitField = this._bitField;\n if (((bitField & 33554432) !== 0)) {\n return this._rejectionHandler0;\n } else if (((bitField & 16777216) !== 0)) {\n return this._fulfillmentHandler0;\n }\n};\n\nfunction deferResolve(v) {this.promise._resolveCallback(v);}\nfunction deferReject(v) {this.promise._rejectCallback(v, false);}\n\nPromise.defer = Promise.pending = function() {\n debug.deprecated(\"Promise.defer\", \"new Promise\");\n var promise = new Promise(INTERNAL);\n return {\n promise: promise,\n resolve: deferResolve,\n reject: deferReject\n };\n};\n\nutil.notEnumerableProp(Promise,\n \"_makeSelfResolutionError\",\n makeSelfResolutionError);\n\nrequire(\"./method\")(Promise, INTERNAL, tryConvertToPromise, apiRejection,\n debug);\nrequire(\"./bind\")(Promise, INTERNAL, tryConvertToPromise, debug);\nrequire(\"./cancel\")(Promise, PromiseArray, apiRejection, debug);\nrequire(\"./direct_resolve\")(Promise);\nrequire(\"./synchronous_inspection\")(Promise);\nrequire(\"./join\")(\n Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain);\nPromise.Promise = Promise;\nPromise.version = \"3.4.7\";\nrequire('./map.js')(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug);\nrequire('./call_get.js')(Promise);\nrequire('./using.js')(Promise, apiRejection, tryConvertToPromise, createContext, INTERNAL, debug);\nrequire('./timers.js')(Promise, INTERNAL, debug);\nrequire('./generators.js')(Promise, apiRejection, INTERNAL, tryConvertToPromise, Proxyable, debug);\nrequire('./nodeify.js')(Promise);\nrequire('./promisify.js')(Promise, INTERNAL);\nrequire('./props.js')(Promise, PromiseArray, tryConvertToPromise, apiRejection);\nrequire('./race.js')(Promise, INTERNAL, tryConvertToPromise, apiRejection);\nrequire('./reduce.js')(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug);\nrequire('./settle.js')(Promise, PromiseArray, debug);\nrequire('./some.js')(Promise, PromiseArray, apiRejection);\nrequire('./filter.js')(Promise, INTERNAL);\nrequire('./each.js')(Promise, INTERNAL);\nrequire('./any.js')(Promise);\n \n util.toFastProperties(Promise); \n util.toFastProperties(Promise.prototype); \n function fillTypes(value) { \n var p = new Promise(INTERNAL); \n p._fulfillmentHandler0 = value; \n p._rejectionHandler0 = value; \n p._promise0 = value; \n p._receiver0 = value; \n } \n // Complete slack tracking, opt out of field-type tracking and \n // stabilize map \n fillTypes({a: 1}); \n fillTypes({b: 2}); \n fillTypes({c: 3}); \n fillTypes(1); \n fillTypes(function(){}); \n fillTypes(undefined); \n fillTypes(false); \n fillTypes(new Promise(INTERNAL)); \n debug.setBounds(Async.firstLineError, util.lastLineError); \n return Promise; \n\n};\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL, tryConvertToPromise,\n apiRejection, Proxyable) {\nvar util = require(\"./util\");\nvar isArray = util.isArray;\n\nfunction toResolutionValue(val) {\n switch(val) {\n case -2: return [];\n case -3: return {};\n }\n}\n\nfunction PromiseArray(values) {\n var promise = this._promise = new Promise(INTERNAL);\n if (values instanceof Promise) {\n promise._propagateFrom(values, 3);\n }\n promise._setOnCancel(this);\n this._values = values;\n this._length = 0;\n this._totalResolved = 0;\n this._init(undefined, -2);\n}\nutil.inherits(PromiseArray, Proxyable);\n\nPromiseArray.prototype.length = function () {\n return this._length;\n};\n\nPromiseArray.prototype.promise = function () {\n return this._promise;\n};\n\nPromiseArray.prototype._init = function init(_, resolveValueIfEmpty) {\n var values = tryConvertToPromise(this._values, this._promise);\n if (values instanceof Promise) {\n values = values._target();\n var bitField = values._bitField;\n ;\n this._values = values;\n\n if (((bitField & 50397184) === 0)) {\n this._promise._setAsyncGuaranteed();\n return values._then(\n init,\n this._reject,\n undefined,\n this,\n resolveValueIfEmpty\n );\n } else if (((bitField & 33554432) !== 0)) {\n values = values._value();\n } else if (((bitField & 16777216) !== 0)) {\n return this._reject(values._reason());\n } else {\n return this._cancel();\n }\n }\n values = util.asArray(values);\n if (values === null) {\n var err = apiRejection(\n \"expecting an array or an iterable object but got \" + util.classString(values)).reason();\n this._promise._rejectCallback(err, false);\n return;\n }\n\n if (values.length === 0) {\n if (resolveValueIfEmpty === -5) {\n this._resolveEmptyArray();\n }\n else {\n this._resolve(toResolutionValue(resolveValueIfEmpty));\n }\n return;\n }\n this._iterate(values);\n};\n\nPromiseArray.prototype._iterate = function(values) {\n var len = this.getActualLength(values.length);\n this._length = len;\n this._values = this.shouldCopyValues() ? new Array(len) : this._values;\n var result = this._promise;\n var isResolved = false;\n var bitField = null;\n for (var i = 0; i < len; ++i) {\n var maybePromise = tryConvertToPromise(values[i], result);\n\n if (maybePromise instanceof Promise) {\n maybePromise = maybePromise._target();\n bitField = maybePromise._bitField;\n } else {\n bitField = null;\n }\n\n if (isResolved) {\n if (bitField !== null) {\n maybePromise.suppressUnhandledRejections();\n }\n } else if (bitField !== null) {\n if (((bitField & 50397184) === 0)) {\n maybePromise._proxy(this, i);\n this._values[i] = maybePromise;\n } else if (((bitField & 33554432) !== 0)) {\n isResolved = this._promiseFulfilled(maybePromise._value(), i);\n } else if (((bitField & 16777216) !== 0)) {\n isResolved = this._promiseRejected(maybePromise._reason(), i);\n } else {\n isResolved = this._promiseCancelled(i);\n }\n } else {\n isResolved = this._promiseFulfilled(maybePromise, i);\n }\n }\n if (!isResolved) result._setAsyncGuaranteed();\n};\n\nPromiseArray.prototype._isResolved = function () {\n return this._values === null;\n};\n\nPromiseArray.prototype._resolve = function (value) {\n this._values = null;\n this._promise._fulfill(value);\n};\n\nPromiseArray.prototype._cancel = function() {\n if (this._isResolved() || !this._promise._isCancellable()) return;\n this._values = null;\n this._promise._cancel();\n};\n\nPromiseArray.prototype._reject = function (reason) {\n this._values = null;\n this._promise._rejectCallback(reason, false);\n};\n\nPromiseArray.prototype._promiseFulfilled = function (value, index) {\n this._values[index] = value;\n var totalResolved = ++this._totalResolved;\n if (totalResolved >= this._length) {\n this._resolve(this._values);\n return true;\n }\n return false;\n};\n\nPromiseArray.prototype._promiseCancelled = function() {\n this._cancel();\n return true;\n};\n\nPromiseArray.prototype._promiseRejected = function (reason) {\n this._totalResolved++;\n this._reject(reason);\n return true;\n};\n\nPromiseArray.prototype._resultCancelled = function() {\n if (this._isResolved()) return;\n var values = this._values;\n this._cancel();\n if (values instanceof Promise) {\n values.cancel();\n } else {\n for (var i = 0; i < values.length; ++i) {\n if (values[i] instanceof Promise) {\n values[i].cancel();\n }\n }\n }\n};\n\nPromiseArray.prototype.shouldCopyValues = function () {\n return true;\n};\n\nPromiseArray.prototype.getActualLength = function (len) {\n return len;\n};\n\nreturn PromiseArray;\n};\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL) {\nvar THIS = {};\nvar util = require(\"./util\");\nvar nodebackForPromise = require(\"./nodeback\");\nvar withAppended = util.withAppended;\nvar maybeWrapAsError = util.maybeWrapAsError;\nvar canEvaluate = util.canEvaluate;\nvar TypeError = require(\"./errors\").TypeError;\nvar defaultSuffix = \"Async\";\nvar defaultPromisified = {__isPromisified__: true};\nvar noCopyProps = [\n \"arity\", \"length\",\n \"name\",\n \"arguments\",\n \"caller\",\n \"callee\",\n \"prototype\",\n \"__isPromisified__\"\n];\nvar noCopyPropsPattern = new RegExp(\"^(?:\" + noCopyProps.join(\"|\") + \")$\");\n\nvar defaultFilter = function(name) {\n return util.isIdentifier(name) &&\n name.charAt(0) !== \"_\" &&\n name !== \"constructor\";\n};\n\nfunction propsFilter(key) {\n return !noCopyPropsPattern.test(key);\n}\n\nfunction isPromisified(fn) {\n try {\n return fn.__isPromisified__ === true;\n }\n catch (e) {\n return false;\n }\n}\n\nfunction hasPromisified(obj, key, suffix) {\n var val = util.getDataPropertyOrDefault(obj, key + suffix,\n defaultPromisified);\n return val ? isPromisified(val) : false;\n}\nfunction checkValid(ret, suffix, suffixRegexp) {\n for (var i = 0; i < ret.length; i += 2) {\n var key = ret[i];\n if (suffixRegexp.test(key)) {\n var keyWithoutAsyncSuffix = key.replace(suffixRegexp, \"\");\n for (var j = 0; j < ret.length; j += 2) {\n if (ret[j] === keyWithoutAsyncSuffix) {\n throw new TypeError(\"Cannot promisify an API that has normal methods with '%s'-suffix\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\"\n .replace(\"%s\", suffix));\n }\n }\n }\n }\n}\n\nfunction promisifiableMethods(obj, suffix, suffixRegexp, filter) {\n var keys = util.inheritedDataKeys(obj);\n var ret = [];\n for (var i = 0; i < keys.length; ++i) {\n var key = keys[i];\n var value = obj[key];\n var passesDefaultFilter = filter === defaultFilter\n ? true : defaultFilter(key, value, obj);\n if (typeof value === \"function\" &&\n !isPromisified(value) &&\n !hasPromisified(obj, key, suffix) &&\n filter(key, value, obj, passesDefaultFilter)) {\n ret.push(key, value);\n }\n }\n checkValid(ret, suffix, suffixRegexp);\n return ret;\n}\n\nvar escapeIdentRegex = function(str) {\n return str.replace(/([$])/, \"\\\\$\");\n};\n\nvar makeNodePromisifiedEval;\nif (!false) {\nvar switchCaseArgumentOrder = function(likelyArgumentCount) {\n var ret = [likelyArgumentCount];\n var min = Math.max(0, likelyArgumentCount - 1 - 3);\n for(var i = likelyArgumentCount - 1; i >= min; --i) {\n ret.push(i);\n }\n for(var i = likelyArgumentCount + 1; i <= 3; ++i) {\n ret.push(i);\n }\n return ret;\n};\n\nvar argumentSequence = function(argumentCount) {\n return util.filledRange(argumentCount, \"_arg\", \"\");\n};\n\nvar parameterDeclaration = function(parameterCount) {\n return util.filledRange(\n Math.max(parameterCount, 3), \"_arg\", \"\");\n};\n\nvar parameterCount = function(fn) {\n if (typeof fn.length === \"number\") {\n return Math.max(Math.min(fn.length, 1023 + 1), 0);\n }\n return 0;\n};\n\nmakeNodePromisifiedEval =\nfunction(callback, receiver, originalName, fn, _, multiArgs) {\n var newParameterCount = Math.max(0, parameterCount(fn) - 1);\n var argumentOrder = switchCaseArgumentOrder(newParameterCount);\n var shouldProxyThis = typeof callback === \"string\" || receiver === THIS;\n\n function generateCallForArgumentCount(count) {\n var args = argumentSequence(count).join(\", \");\n var comma = count > 0 ? \", \" : \"\";\n var ret;\n if (shouldProxyThis) {\n ret = \"ret = callback.call(this, {{args}}, nodeback); break;\\n\";\n } else {\n ret = receiver === undefined\n ? \"ret = callback({{args}}, nodeback); break;\\n\"\n : \"ret = callback.call(receiver, {{args}}, nodeback); break;\\n\";\n }\n return ret.replace(\"{{args}}\", args).replace(\", \", comma);\n }\n\n function generateArgumentSwitchCase() {\n var ret = \"\";\n for (var i = 0; i < argumentOrder.length; ++i) {\n ret += \"case \" + argumentOrder[i] +\":\" +\n generateCallForArgumentCount(argumentOrder[i]);\n }\n\n ret += \" \\n\\\n default: \\n\\\n var args = new Array(len + 1); \\n\\\n var i = 0; \\n\\\n for (var i = 0; i < len; ++i) { \\n\\\n args[i] = arguments[i]; \\n\\\n } \\n\\\n args[i] = nodeback; \\n\\\n [CodeForCall] \\n\\\n break; \\n\\\n \".replace(\"[CodeForCall]\", (shouldProxyThis\n ? \"ret = callback.apply(this, args);\\n\"\n : \"ret = callback.apply(receiver, args);\\n\"));\n return ret;\n }\n\n var getFunctionCode = typeof callback === \"string\"\n ? (\"this != null ? this['\"+callback+\"'] : fn\")\n : \"fn\";\n var body = \"'use strict'; \\n\\\n var ret = function (Parameters) { \\n\\\n 'use strict'; \\n\\\n var len = arguments.length; \\n\\\n var promise = new Promise(INTERNAL); \\n\\\n promise._captureStackTrace(); \\n\\\n var nodeback = nodebackForPromise(promise, \" + multiArgs + \"); \\n\\\n var ret; \\n\\\n var callback = tryCatch([GetFunctionCode]); \\n\\\n switch(len) { \\n\\\n [CodeForSwitchCase] \\n\\\n } \\n\\\n if (ret === errorObj) { \\n\\\n promise._rejectCallback(maybeWrapAsError(ret.e), true, true);\\n\\\n } \\n\\\n if (!promise._isFateSealed()) promise._setAsyncGuaranteed(); \\n\\\n return promise; \\n\\\n }; \\n\\\n notEnumerableProp(ret, '__isPromisified__', true); \\n\\\n return ret; \\n\\\n \".replace(\"[CodeForSwitchCase]\", generateArgumentSwitchCase())\n .replace(\"[GetFunctionCode]\", getFunctionCode);\n body = body.replace(\"Parameters\", parameterDeclaration(newParameterCount));\n return new Function(\"Promise\",\n \"fn\",\n \"receiver\",\n \"withAppended\",\n \"maybeWrapAsError\",\n \"nodebackForPromise\",\n \"tryCatch\",\n \"errorObj\",\n \"notEnumerableProp\",\n \"INTERNAL\",\n body)(\n Promise,\n fn,\n receiver,\n withAppended,\n maybeWrapAsError,\n nodebackForPromise,\n util.tryCatch,\n util.errorObj,\n util.notEnumerableProp,\n INTERNAL);\n};\n}\n\nfunction makeNodePromisifiedClosure(callback, receiver, _, fn, __, multiArgs) {\n var defaultThis = (function() {return this;})();\n var method = callback;\n if (typeof method === \"string\") {\n callback = fn;\n }\n function promisified() {\n var _receiver = receiver;\n if (receiver === THIS) _receiver = this;\n var promise = new Promise(INTERNAL);\n promise._captureStackTrace();\n var cb = typeof method === \"string\" && this !== defaultThis\n ? this[method] : callback;\n var fn = nodebackForPromise(promise, multiArgs);\n try {\n cb.apply(_receiver, withAppended(arguments, fn));\n } catch(e) {\n promise._rejectCallback(maybeWrapAsError(e), true, true);\n }\n if (!promise._isFateSealed()) promise._setAsyncGuaranteed();\n return promise;\n }\n util.notEnumerableProp(promisified, \"__isPromisified__\", true);\n return promisified;\n}\n\nvar makeNodePromisified = canEvaluate\n ? makeNodePromisifiedEval\n : makeNodePromisifiedClosure;\n\nfunction promisifyAll(obj, suffix, filter, promisifier, multiArgs) {\n var suffixRegexp = new RegExp(escapeIdentRegex(suffix) + \"$\");\n var methods =\n promisifiableMethods(obj, suffix, suffixRegexp, filter);\n\n for (var i = 0, len = methods.length; i < len; i+= 2) {\n var key = methods[i];\n var fn = methods[i+1];\n var promisifiedKey = key + suffix;\n if (promisifier === makeNodePromisified) {\n obj[promisifiedKey] =\n makeNodePromisified(key, THIS, key, fn, suffix, multiArgs);\n } else {\n var promisified = promisifier(fn, function() {\n return makeNodePromisified(key, THIS, key,\n fn, suffix, multiArgs);\n });\n util.notEnumerableProp(promisified, \"__isPromisified__\", true);\n obj[promisifiedKey] = promisified;\n }\n }\n util.toFastProperties(obj);\n return obj;\n}\n\nfunction promisify(callback, receiver, multiArgs) {\n return makeNodePromisified(callback, receiver, undefined,\n callback, null, multiArgs);\n}\n\nPromise.promisify = function (fn, options) {\n if (typeof fn !== \"function\") {\n throw new TypeError(\"expecting a function but got \" + util.classString(fn));\n }\n if (isPromisified(fn)) {\n return fn;\n }\n options = Object(options);\n var receiver = options.context === undefined ? THIS : options.context;\n var multiArgs = !!options.multiArgs;\n var ret = promisify(fn, receiver, multiArgs);\n util.copyDescriptors(fn, ret, propsFilter);\n return ret;\n};\n\nPromise.promisifyAll = function (target, options) {\n if (typeof target !== \"function\" && typeof target !== \"object\") {\n throw new TypeError(\"the target of promisifyAll must be an object or a function\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n options = Object(options);\n var multiArgs = !!options.multiArgs;\n var suffix = options.suffix;\n if (typeof suffix !== \"string\") suffix = defaultSuffix;\n var filter = options.filter;\n if (typeof filter !== \"function\") filter = defaultFilter;\n var promisifier = options.promisifier;\n if (typeof promisifier !== \"function\") promisifier = makeNodePromisified;\n\n if (!util.isIdentifier(suffix)) {\n throw new RangeError(\"suffix must be a valid identifier\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n\n var keys = util.inheritedDataKeys(target);\n for (var i = 0; i < keys.length; ++i) {\n var value = target[keys[i]];\n if (keys[i] !== \"constructor\" &&\n util.isClass(value)) {\n promisifyAll(value.prototype, suffix, filter, promisifier,\n multiArgs);\n promisifyAll(value, suffix, filter, promisifier, multiArgs);\n }\n }\n\n return promisifyAll(target, suffix, filter, promisifier, multiArgs);\n};\n};\n\n","\"use strict\";\nmodule.exports = function(\n Promise, PromiseArray, tryConvertToPromise, apiRejection) {\nvar util = require(\"./util\");\nvar isObject = util.isObject;\nvar es5 = require(\"./es5\");\nvar Es6Map;\nif (typeof Map === \"function\") Es6Map = Map;\n\nvar mapToEntries = (function() {\n var index = 0;\n var size = 0;\n\n function extractEntry(value, key) {\n this[index] = value;\n this[index + size] = key;\n index++;\n }\n\n return function mapToEntries(map) {\n size = map.size;\n index = 0;\n var ret = new Array(map.size * 2);\n map.forEach(extractEntry, ret);\n return ret;\n };\n})();\n\nvar entriesToMap = function(entries) {\n var ret = new Es6Map();\n var length = entries.length / 2 | 0;\n for (var i = 0; i < length; ++i) {\n var key = entries[length + i];\n var value = entries[i];\n ret.set(key, value);\n }\n return ret;\n};\n\nfunction PropertiesPromiseArray(obj) {\n var isMap = false;\n var entries;\n if (Es6Map !== undefined && obj instanceof Es6Map) {\n entries = mapToEntries(obj);\n isMap = true;\n } else {\n var keys = es5.keys(obj);\n var len = keys.length;\n entries = new Array(len * 2);\n for (var i = 0; i < len; ++i) {\n var key = keys[i];\n entries[i] = obj[key];\n entries[i + len] = key;\n }\n }\n this.constructor$(entries);\n this._isMap = isMap;\n this._init$(undefined, -3);\n}\nutil.inherits(PropertiesPromiseArray, PromiseArray);\n\nPropertiesPromiseArray.prototype._init = function () {};\n\nPropertiesPromiseArray.prototype._promiseFulfilled = function (value, index) {\n this._values[index] = value;\n var totalResolved = ++this._totalResolved;\n if (totalResolved >= this._length) {\n var val;\n if (this._isMap) {\n val = entriesToMap(this._values);\n } else {\n val = {};\n var keyOffset = this.length();\n for (var i = 0, len = this.length(); i < len; ++i) {\n val[this._values[i + keyOffset]] = this._values[i];\n }\n }\n this._resolve(val);\n return true;\n }\n return false;\n};\n\nPropertiesPromiseArray.prototype.shouldCopyValues = function () {\n return false;\n};\n\nPropertiesPromiseArray.prototype.getActualLength = function (len) {\n return len >> 1;\n};\n\nfunction props(promises) {\n var ret;\n var castValue = tryConvertToPromise(promises);\n\n if (!isObject(castValue)) {\n return apiRejection(\"cannot await properties of a non-object\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n } else if (castValue instanceof Promise) {\n ret = castValue._then(\n Promise.props, undefined, undefined, undefined, undefined);\n } else {\n ret = new PropertiesPromiseArray(castValue).promise();\n }\n\n if (castValue instanceof Promise) {\n ret._propagateFrom(castValue, 2);\n }\n return ret;\n}\n\nPromise.prototype.props = function () {\n return props(this);\n};\n\nPromise.props = function (promises) {\n return props(promises);\n};\n};\n","\"use strict\";\nfunction arrayMove(src, srcIndex, dst, dstIndex, len) {\n for (var j = 0; j < len; ++j) {\n dst[j + dstIndex] = src[j + srcIndex];\n src[j + srcIndex] = void 0;\n }\n}\n\nfunction Queue(capacity) {\n this._capacity = capacity;\n this._length = 0;\n this._front = 0;\n}\n\nQueue.prototype._willBeOverCapacity = function (size) {\n return this._capacity < size;\n};\n\nQueue.prototype._pushOne = function (arg) {\n var length = this.length();\n this._checkCapacity(length + 1);\n var i = (this._front + length) & (this._capacity - 1);\n this[i] = arg;\n this._length = length + 1;\n};\n\nQueue.prototype.push = function (fn, receiver, arg) {\n var length = this.length() + 3;\n if (this._willBeOverCapacity(length)) {\n this._pushOne(fn);\n this._pushOne(receiver);\n this._pushOne(arg);\n return;\n }\n var j = this._front + length - 3;\n this._checkCapacity(length);\n var wrapMask = this._capacity - 1;\n this[(j + 0) & wrapMask] = fn;\n this[(j + 1) & wrapMask] = receiver;\n this[(j + 2) & wrapMask] = arg;\n this._length = length;\n};\n\nQueue.prototype.shift = function () {\n var front = this._front,\n ret = this[front];\n\n this[front] = undefined;\n this._front = (front + 1) & (this._capacity - 1);\n this._length--;\n return ret;\n};\n\nQueue.prototype.length = function () {\n return this._length;\n};\n\nQueue.prototype._checkCapacity = function (size) {\n if (this._capacity < size) {\n this._resizeTo(this._capacity << 1);\n }\n};\n\nQueue.prototype._resizeTo = function (capacity) {\n var oldCapacity = this._capacity;\n this._capacity = capacity;\n var front = this._front;\n var length = this._length;\n var moveItemsCount = (front + length) & (oldCapacity - 1);\n arrayMove(this, 0, this, oldCapacity, moveItemsCount);\n};\n\nmodule.exports = Queue;\n","\"use strict\";\nmodule.exports = function(\n Promise, INTERNAL, tryConvertToPromise, apiRejection) {\nvar util = require(\"./util\");\n\nvar raceLater = function (promise) {\n return promise.then(function(array) {\n return race(array, promise);\n });\n};\n\nfunction race(promises, parent) {\n var maybePromise = tryConvertToPromise(promises);\n\n if (maybePromise instanceof Promise) {\n return raceLater(maybePromise);\n } else {\n promises = util.asArray(promises);\n if (promises === null)\n return apiRejection(\"expecting an array or an iterable object but got \" + util.classString(promises));\n }\n\n var ret = new Promise(INTERNAL);\n if (parent !== undefined) {\n ret._propagateFrom(parent, 3);\n }\n var fulfill = ret._fulfill;\n var reject = ret._reject;\n for (var i = 0, len = promises.length; i < len; ++i) {\n var val = promises[i];\n\n if (val === undefined && !(i in promises)) {\n continue;\n }\n\n Promise.cast(val)._then(fulfill, reject, undefined, ret, null);\n }\n return ret;\n}\n\nPromise.race = function (promises) {\n return race(promises, undefined);\n};\n\nPromise.prototype.race = function () {\n return race(this, undefined);\n};\n\n};\n","\"use strict\";\nmodule.exports = function(Promise,\n PromiseArray,\n apiRejection,\n tryConvertToPromise,\n INTERNAL,\n debug) {\nvar getDomain = Promise._getDomain;\nvar util = require(\"./util\");\nvar tryCatch = util.tryCatch;\n\nfunction ReductionPromiseArray(promises, fn, initialValue, _each) {\n this.constructor$(promises);\n var domain = getDomain();\n this._fn = domain === null ? fn : util.domainBind(domain, fn);\n if (initialValue !== undefined) {\n initialValue = Promise.resolve(initialValue);\n initialValue._attachCancellationCallback(this);\n }\n this._initialValue = initialValue;\n this._currentCancellable = null;\n if(_each === INTERNAL) {\n this._eachValues = Array(this._length);\n } else if (_each === 0) {\n this._eachValues = null;\n } else {\n this._eachValues = undefined;\n }\n this._promise._captureStackTrace();\n this._init$(undefined, -5);\n}\nutil.inherits(ReductionPromiseArray, PromiseArray);\n\nReductionPromiseArray.prototype._gotAccum = function(accum) {\n if (this._eachValues !== undefined && \n this._eachValues !== null && \n accum !== INTERNAL) {\n this._eachValues.push(accum);\n }\n};\n\nReductionPromiseArray.prototype._eachComplete = function(value) {\n if (this._eachValues !== null) {\n this._eachValues.push(value);\n }\n return this._eachValues;\n};\n\nReductionPromiseArray.prototype._init = function() {};\n\nReductionPromiseArray.prototype._resolveEmptyArray = function() {\n this._resolve(this._eachValues !== undefined ? this._eachValues\n : this._initialValue);\n};\n\nReductionPromiseArray.prototype.shouldCopyValues = function () {\n return false;\n};\n\nReductionPromiseArray.prototype._resolve = function(value) {\n this._promise._resolveCallback(value);\n this._values = null;\n};\n\nReductionPromiseArray.prototype._resultCancelled = function(sender) {\n if (sender === this._initialValue) return this._cancel();\n if (this._isResolved()) return;\n this._resultCancelled$();\n if (this._currentCancellable instanceof Promise) {\n this._currentCancellable.cancel();\n }\n if (this._initialValue instanceof Promise) {\n this._initialValue.cancel();\n }\n};\n\nReductionPromiseArray.prototype._iterate = function (values) {\n this._values = values;\n var value;\n var i;\n var length = values.length;\n if (this._initialValue !== undefined) {\n value = this._initialValue;\n i = 0;\n } else {\n value = Promise.resolve(values[0]);\n i = 1;\n }\n\n this._currentCancellable = value;\n\n if (!value.isRejected()) {\n for (; i < length; ++i) {\n var ctx = {\n accum: null,\n value: values[i],\n index: i,\n length: length,\n array: this\n };\n value = value._then(gotAccum, undefined, undefined, ctx, undefined);\n }\n }\n\n if (this._eachValues !== undefined) {\n value = value\n ._then(this._eachComplete, undefined, undefined, this, undefined);\n }\n value._then(completed, completed, undefined, value, this);\n};\n\nPromise.prototype.reduce = function (fn, initialValue) {\n return reduce(this, fn, initialValue, null);\n};\n\nPromise.reduce = function (promises, fn, initialValue, _each) {\n return reduce(promises, fn, initialValue, _each);\n};\n\nfunction completed(valueOrReason, array) {\n if (this.isFulfilled()) {\n array._resolve(valueOrReason);\n } else {\n array._reject(valueOrReason);\n }\n}\n\nfunction reduce(promises, fn, initialValue, _each) {\n if (typeof fn !== \"function\") {\n return apiRejection(\"expecting a function but got \" + util.classString(fn));\n }\n var array = new ReductionPromiseArray(promises, fn, initialValue, _each);\n return array.promise();\n}\n\nfunction gotAccum(accum) {\n this.accum = accum;\n this.array._gotAccum(accum);\n var value = tryConvertToPromise(this.value, this.array._promise);\n if (value instanceof Promise) {\n this.array._currentCancellable = value;\n return value._then(gotValue, undefined, undefined, this, undefined);\n } else {\n return gotValue.call(this, value);\n }\n}\n\nfunction gotValue(value) {\n var array = this.array;\n var promise = array._promise;\n var fn = tryCatch(array._fn);\n promise._pushContext();\n var ret;\n if (array._eachValues !== undefined) {\n ret = fn.call(promise._boundValue(), value, this.index, this.length);\n } else {\n ret = fn.call(promise._boundValue(),\n this.accum, value, this.index, this.length);\n }\n if (ret instanceof Promise) {\n array._currentCancellable = ret;\n }\n var promiseCreated = promise._popContext();\n debug.checkForgottenReturns(\n ret,\n promiseCreated,\n array._eachValues !== undefined ? \"Promise.each\" : \"Promise.reduce\",\n promise\n );\n return ret;\n}\n};\n","\"use strict\";\nvar util = require(\"./util\");\nvar schedule;\nvar noAsyncScheduler = function() {\n throw new Error(\"No async scheduler available\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n};\nvar NativePromise = util.getNativePromise();\nif (util.isNode && typeof MutationObserver === \"undefined\") {\n var GlobalSetImmediate = global.setImmediate;\n var ProcessNextTick = process.nextTick;\n schedule = util.isRecentNode\n ? function(fn) { GlobalSetImmediate.call(global, fn); }\n : function(fn) { ProcessNextTick.call(process, fn); };\n} else if (typeof NativePromise === \"function\" &&\n typeof NativePromise.resolve === \"function\") {\n var nativePromise = NativePromise.resolve();\n schedule = function(fn) {\n nativePromise.then(fn);\n };\n} else if ((typeof MutationObserver !== \"undefined\") &&\n !(typeof window !== \"undefined\" &&\n window.navigator &&\n (window.navigator.standalone || window.cordova))) {\n schedule = (function() {\n var div = document.createElement(\"div\");\n var opts = {attributes: true};\n var toggleScheduled = false;\n var div2 = document.createElement(\"div\");\n var o2 = new MutationObserver(function() {\n div.classList.toggle(\"foo\");\n toggleScheduled = false;\n });\n o2.observe(div2, opts);\n\n var scheduleToggle = function() {\n if (toggleScheduled) return;\n toggleScheduled = true;\n div2.classList.toggle(\"foo\");\n };\n\n return function schedule(fn) {\n var o = new MutationObserver(function() {\n o.disconnect();\n fn();\n });\n o.observe(div, opts);\n scheduleToggle();\n };\n })();\n} else if (typeof setImmediate !== \"undefined\") {\n schedule = function (fn) {\n setImmediate(fn);\n };\n} else if (typeof setTimeout !== \"undefined\") {\n schedule = function (fn) {\n setTimeout(fn, 0);\n };\n} else {\n schedule = noAsyncScheduler;\n}\nmodule.exports = schedule;\n","\"use strict\";\nmodule.exports =\n function(Promise, PromiseArray, debug) {\nvar PromiseInspection = Promise.PromiseInspection;\nvar util = require(\"./util\");\n\nfunction SettledPromiseArray(values) {\n this.constructor$(values);\n}\nutil.inherits(SettledPromiseArray, PromiseArray);\n\nSettledPromiseArray.prototype._promiseResolved = function (index, inspection) {\n this._values[index] = inspection;\n var totalResolved = ++this._totalResolved;\n if (totalResolved >= this._length) {\n this._resolve(this._values);\n return true;\n }\n return false;\n};\n\nSettledPromiseArray.prototype._promiseFulfilled = function (value, index) {\n var ret = new PromiseInspection();\n ret._bitField = 33554432;\n ret._settledValueField = value;\n return this._promiseResolved(index, ret);\n};\nSettledPromiseArray.prototype._promiseRejected = function (reason, index) {\n var ret = new PromiseInspection();\n ret._bitField = 16777216;\n ret._settledValueField = reason;\n return this._promiseResolved(index, ret);\n};\n\nPromise.settle = function (promises) {\n debug.deprecated(\".settle()\", \".reflect()\");\n return new SettledPromiseArray(promises).promise();\n};\n\nPromise.prototype.settle = function () {\n return Promise.settle(this);\n};\n};\n","\"use strict\";\nmodule.exports =\nfunction(Promise, PromiseArray, apiRejection) {\nvar util = require(\"./util\");\nvar RangeError = require(\"./errors\").RangeError;\nvar AggregateError = require(\"./errors\").AggregateError;\nvar isArray = util.isArray;\nvar CANCELLATION = {};\n\n\nfunction SomePromiseArray(values) {\n this.constructor$(values);\n this._howMany = 0;\n this._unwrap = false;\n this._initialized = false;\n}\nutil.inherits(SomePromiseArray, PromiseArray);\n\nSomePromiseArray.prototype._init = function () {\n if (!this._initialized) {\n return;\n }\n if (this._howMany === 0) {\n this._resolve([]);\n return;\n }\n this._init$(undefined, -5);\n var isArrayResolved = isArray(this._values);\n if (!this._isResolved() &&\n isArrayResolved &&\n this._howMany > this._canPossiblyFulfill()) {\n this._reject(this._getRangeError(this.length()));\n }\n};\n\nSomePromiseArray.prototype.init = function () {\n this._initialized = true;\n this._init();\n};\n\nSomePromiseArray.prototype.setUnwrap = function () {\n this._unwrap = true;\n};\n\nSomePromiseArray.prototype.howMany = function () {\n return this._howMany;\n};\n\nSomePromiseArray.prototype.setHowMany = function (count) {\n this._howMany = count;\n};\n\nSomePromiseArray.prototype._promiseFulfilled = function (value) {\n this._addFulfilled(value);\n if (this._fulfilled() === this.howMany()) {\n this._values.length = this.howMany();\n if (this.howMany() === 1 && this._unwrap) {\n this._resolve(this._values[0]);\n } else {\n this._resolve(this._values);\n }\n return true;\n }\n return false;\n\n};\nSomePromiseArray.prototype._promiseRejected = function (reason) {\n this._addRejected(reason);\n return this._checkOutcome();\n};\n\nSomePromiseArray.prototype._promiseCancelled = function () {\n if (this._values instanceof Promise || this._values == null) {\n return this._cancel();\n }\n this._addRejected(CANCELLATION);\n return this._checkOutcome();\n};\n\nSomePromiseArray.prototype._checkOutcome = function() {\n if (this.howMany() > this._canPossiblyFulfill()) {\n var e = new AggregateError();\n for (var i = this.length(); i < this._values.length; ++i) {\n if (this._values[i] !== CANCELLATION) {\n e.push(this._values[i]);\n }\n }\n if (e.length > 0) {\n this._reject(e);\n } else {\n this._cancel();\n }\n return true;\n }\n return false;\n};\n\nSomePromiseArray.prototype._fulfilled = function () {\n return this._totalResolved;\n};\n\nSomePromiseArray.prototype._rejected = function () {\n return this._values.length - this.length();\n};\n\nSomePromiseArray.prototype._addRejected = function (reason) {\n this._values.push(reason);\n};\n\nSomePromiseArray.prototype._addFulfilled = function (value) {\n this._values[this._totalResolved++] = value;\n};\n\nSomePromiseArray.prototype._canPossiblyFulfill = function () {\n return this.length() - this._rejected();\n};\n\nSomePromiseArray.prototype._getRangeError = function (count) {\n var message = \"Input array must contain at least \" +\n this._howMany + \" items but contains only \" + count + \" items\";\n return new RangeError(message);\n};\n\nSomePromiseArray.prototype._resolveEmptyArray = function () {\n this._reject(this._getRangeError(0));\n};\n\nfunction some(promises, howMany) {\n if ((howMany | 0) !== howMany || howMany < 0) {\n return apiRejection(\"expecting a positive integer\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n var ret = new SomePromiseArray(promises);\n var promise = ret.promise();\n ret.setHowMany(howMany);\n ret.init();\n return promise;\n}\n\nPromise.some = function (promises, howMany) {\n return some(promises, howMany);\n};\n\nPromise.prototype.some = function (howMany) {\n return some(this, howMany);\n};\n\nPromise._SomePromiseArray = SomePromiseArray;\n};\n","\"use strict\";\nmodule.exports = function(Promise) {\nfunction PromiseInspection(promise) {\n if (promise !== undefined) {\n promise = promise._target();\n this._bitField = promise._bitField;\n this._settledValueField = promise._isFateSealed()\n ? promise._settledValue() : undefined;\n }\n else {\n this._bitField = 0;\n this._settledValueField = undefined;\n }\n}\n\nPromiseInspection.prototype._settledValue = function() {\n return this._settledValueField;\n};\n\nvar value = PromiseInspection.prototype.value = function () {\n if (!this.isFulfilled()) {\n throw new TypeError(\"cannot get fulfillment value of a non-fulfilled promise\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n return this._settledValue();\n};\n\nvar reason = PromiseInspection.prototype.error =\nPromiseInspection.prototype.reason = function () {\n if (!this.isRejected()) {\n throw new TypeError(\"cannot get rejection reason of a non-rejected promise\\u000a\\u000a See http://goo.gl/MqrFmX\\u000a\");\n }\n return this._settledValue();\n};\n\nvar isFulfilled = PromiseInspection.prototype.isFulfilled = function() {\n return (this._bitField & 33554432) !== 0;\n};\n\nvar isRejected = PromiseInspection.prototype.isRejected = function () {\n return (this._bitField & 16777216) !== 0;\n};\n\nvar isPending = PromiseInspection.prototype.isPending = function () {\n return (this._bitField & 50397184) === 0;\n};\n\nvar isResolved = PromiseInspection.prototype.isResolved = function () {\n return (this._bitField & 50331648) !== 0;\n};\n\nPromiseInspection.prototype.isCancelled = function() {\n return (this._bitField & 8454144) !== 0;\n};\n\nPromise.prototype.__isCancelled = function() {\n return (this._bitField & 65536) === 65536;\n};\n\nPromise.prototype._isCancelled = function() {\n return this._target().__isCancelled();\n};\n\nPromise.prototype.isCancelled = function() {\n return (this._target()._bitField & 8454144) !== 0;\n};\n\nPromise.prototype.isPending = function() {\n return isPending.call(this._target());\n};\n\nPromise.prototype.isRejected = function() {\n return isRejected.call(this._target());\n};\n\nPromise.prototype.isFulfilled = function() {\n return isFulfilled.call(this._target());\n};\n\nPromise.prototype.isResolved = function() {\n return isResolved.call(this._target());\n};\n\nPromise.prototype.value = function() {\n return value.call(this._target());\n};\n\nPromise.prototype.reason = function() {\n var target = this._target();\n target._unsetRejectionIsUnhandled();\n return reason.call(target);\n};\n\nPromise.prototype._value = function() {\n return this._settledValue();\n};\n\nPromise.prototype._reason = function() {\n this._unsetRejectionIsUnhandled();\n return this._settledValue();\n};\n\nPromise.PromiseInspection = PromiseInspection;\n};\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL) {\nvar util = require(\"./util\");\nvar errorObj = util.errorObj;\nvar isObject = util.isObject;\n\nfunction tryConvertToPromise(obj, context) {\n if (isObject(obj)) {\n if (obj instanceof Promise) return obj;\n var then = getThen(obj);\n if (then === errorObj) {\n if (context) context._pushContext();\n var ret = Promise.reject(then.e);\n if (context) context._popContext();\n return ret;\n } else if (typeof then === \"function\") {\n if (isAnyBluebirdPromise(obj)) {\n var ret = new Promise(INTERNAL);\n obj._then(\n ret._fulfill,\n ret._reject,\n undefined,\n ret,\n null\n );\n return ret;\n }\n return doThenable(obj, then, context);\n }\n }\n return obj;\n}\n\nfunction doGetThen(obj) {\n return obj.then;\n}\n\nfunction getThen(obj) {\n try {\n return doGetThen(obj);\n } catch (e) {\n errorObj.e = e;\n return errorObj;\n }\n}\n\nvar hasProp = {}.hasOwnProperty;\nfunction isAnyBluebirdPromise(obj) {\n try {\n return hasProp.call(obj, \"_promise0\");\n } catch (e) {\n return false;\n }\n}\n\nfunction doThenable(x, then, context) {\n var promise = new Promise(INTERNAL);\n var ret = promise;\n if (context) context._pushContext();\n promise._captureStackTrace();\n if (context) context._popContext();\n var synchronous = true;\n var result = util.tryCatch(then).call(x, resolve, reject);\n synchronous = false;\n\n if (promise && result === errorObj) {\n promise._rejectCallback(result.e, true, true);\n promise = null;\n }\n\n function resolve(value) {\n if (!promise) return;\n promise._resolveCallback(value);\n promise = null;\n }\n\n function reject(reason) {\n if (!promise) return;\n promise._rejectCallback(reason, synchronous, true);\n promise = null;\n }\n return ret;\n}\n\nreturn tryConvertToPromise;\n};\n","\"use strict\";\nmodule.exports = function(Promise, INTERNAL, debug) {\nvar util = require(\"./util\");\nvar TimeoutError = Promise.TimeoutError;\n\nfunction HandleWrapper(handle) {\n this.handle = handle;\n}\n\nHandleWrapper.prototype._resultCancelled = function() {\n clearTimeout(this.handle);\n};\n\nvar afterValue = function(value) { return delay(+this).thenReturn(value); };\nvar delay = Promise.delay = function (ms, value) {\n var ret;\n var handle;\n if (value !== undefined) {\n ret = Promise.resolve(value)\n ._then(afterValue, null, null, ms, undefined);\n if (debug.cancellation() && value instanceof Promise) {\n ret._setOnCancel(value);\n }\n } else {\n ret = new Promise(INTERNAL);\n handle = setTimeout(function() { ret._fulfill(); }, +ms);\n if (debug.cancellation()) {\n ret._setOnCancel(new HandleWrapper(handle));\n }\n ret._captureStackTrace();\n }\n ret._setAsyncGuaranteed();\n return ret;\n};\n\nPromise.prototype.delay = function (ms) {\n return delay(ms, this);\n};\n\nvar afterTimeout = function (promise, message, parent) {\n var err;\n if (typeof message !== \"string\") {\n if (message instanceof Error) {\n err = message;\n } else {\n err = new TimeoutError(\"operation timed out\");\n }\n } else {\n err = new TimeoutError(message);\n }\n util.markAsOriginatingFromRejection(err);\n promise._attachExtraTrace(err);\n promise._reject(err);\n\n if (parent != null) {\n parent.cancel();\n }\n};\n\nfunction successClear(value) {\n clearTimeout(this.handle);\n return value;\n}\n\nfunction failureClear(reason) {\n clearTimeout(this.handle);\n throw reason;\n}\n\nPromise.prototype.timeout = function (ms, message) {\n ms = +ms;\n var ret, parent;\n\n var handleWrapper = new HandleWrapper(setTimeout(function timeoutTimeout() {\n if (ret.isPending()) {\n afterTimeout(ret, message, parent);\n }\n }, ms));\n\n if (debug.cancellation()) {\n parent = this.then();\n ret = parent._then(successClear, failureClear,\n undefined, handleWrapper, undefined);\n ret._setOnCancel(handleWrapper);\n } else {\n ret = this._then(successClear, failureClear,\n undefined, handleWrapper, undefined);\n }\n\n return ret;\n};\n\n};\n","\"use strict\";\nmodule.exports = function (Promise, apiRejection, tryConvertToPromise,\n createContext, INTERNAL, debug) {\n var util = require(\"./util\");\n var TypeError = require(\"./errors\").TypeError;\n var inherits = require(\"./util\").inherits;\n var errorObj = util.errorObj;\n var tryCatch = util.tryCatch;\n var NULL = {};\n\n function thrower(e) {\n setTimeout(function(){throw e;}, 0);\n }\n\n function castPreservingDisposable(thenable) {\n var maybePromise = tryConvertToPromise(thenable);\n if (maybePromise !== thenable &&\n typeof thenable._isDisposable === \"function\" &&\n typeof thenable._getDisposer === \"function\" &&\n thenable._isDisposable()) {\n maybePromise._setDisposable(thenable._getDisposer());\n }\n return maybePromise;\n }\n function dispose(resources, inspection) {\n var i = 0;\n var len = resources.length;\n var ret = new Promise(INTERNAL);\n function iterator() {\n if (i >= len) return ret._fulfill();\n var maybePromise = castPreservingDisposable(resources[i++]);\n if (maybePromise instanceof Promise &&\n maybePromise._isDisposable()) {\n try {\n maybePromise = tryConvertToPromise(\n maybePromise._getDisposer().tryDispose(inspection),\n resources.promise);\n } catch (e) {\n return thrower(e);\n }\n if (maybePromise instanceof Promise) {\n return maybePromise._then(iterator, thrower,\n null, null, null);\n }\n }\n iterator();\n }\n iterator();\n return ret;\n }\n\n function Disposer(data, promise, context) {\n this._data = data;\n this._promise = promise;\n this._context = context;\n }\n\n Disposer.prototype.data = function () {\n return this._data;\n };\n\n Disposer.prototype.promise = function () {\n return this._promise;\n };\n\n Disposer.prototype.resource = function () {\n if (this.promise().isFulfilled()) {\n return this.promise().value();\n }\n return NULL;\n };\n\n Disposer.prototype.tryDispose = function(inspection) {\n var resource = this.resource();\n var context = this._context;\n if (context !== undefined) context._pushContext();\n var ret = resource !== NULL\n ? this.doDispose(resource, inspection) : null;\n if (context !== undefined) context._popContext();\n this._promise._unsetDisposable();\n this._data = null;\n return ret;\n };\n\n Disposer.isDisposer = function (d) {\n return (d != null &&\n typeof d.resource === \"function\" &&\n typeof d.tryDispose === \"function\");\n };\n\n function FunctionDisposer(fn, promise, context) {\n this.constructor$(fn, promise, context);\n }\n inherits(FunctionDisposer, Disposer);\n\n FunctionDisposer.prototype.doDispose = function (resource, inspection) {\n var fn = this.data();\n return fn.call(resource, resource, inspection);\n };\n\n function maybeUnwrapDisposer(value) {\n if (Disposer.isDisposer(value)) {\n this.resources[this.index]._setDisposable(value);\n return value.promise();\n }\n return value;\n }\n\n function ResourceList(length) {\n this.length = length;\n this.promise = null;\n this[length-1] = null;\n }\n\n ResourceList.prototype._resultCancelled = function() {\n var len = this.length;\n for (var i = 0; i < len; ++i) {\n var item = this[i];\n if (item instanceof Promise) {\n item.cancel();\n }\n }\n };\n\n Promise.using = function () {\n var len = arguments.length;\n if (len < 2) return apiRejection(\n \"you must pass at least 2 arguments to Promise.using\");\n var fn = arguments[len - 1];\n if (typeof fn !== \"function\") {\n return apiRejection(\"expecting a function but got \" + util.classString(fn));\n }\n var input;\n var spreadArgs = true;\n if (len === 2 && Array.isArray(arguments[0])) {\n input = arguments[0];\n len = input.length;\n spreadArgs = false;\n } else {\n input = arguments;\n len--;\n }\n var resources = new ResourceList(len);\n for (var i = 0; i < len; ++i) {\n var resource = input[i];\n if (Disposer.isDisposer(resource)) {\n var disposer = resource;\n resource = resource.promise();\n resource._setDisposable(disposer);\n } else {\n var maybePromise = tryConvertToPromise(resource);\n if (maybePromise instanceof Promise) {\n resource =\n maybePromise._then(maybeUnwrapDisposer, null, null, {\n resources: resources,\n index: i\n }, undefined);\n }\n }\n resources[i] = resource;\n }\n\n var reflectedResources = new Array(resources.length);\n for (var i = 0; i < reflectedResources.length; ++i) {\n reflectedResources[i] = Promise.resolve(resources[i]).reflect();\n }\n\n var resultPromise = Promise.all(reflectedResources)\n .then(function(inspections) {\n for (var i = 0; i < inspections.length; ++i) {\n var inspection = inspections[i];\n if (inspection.isRejected()) {\n errorObj.e = inspection.error();\n return errorObj;\n } else if (!inspection.isFulfilled()) {\n resultPromise.cancel();\n return;\n }\n inspections[i] = inspection.value();\n }\n promise._pushContext();\n\n fn = tryCatch(fn);\n var ret = spreadArgs\n ? fn.apply(undefined, inspections) : fn(inspections);\n var promiseCreated = promise._popContext();\n debug.checkForgottenReturns(\n ret, promiseCreated, \"Promise.using\", promise);\n return ret;\n });\n\n var promise = resultPromise.lastly(function() {\n var inspection = new Promise.PromiseInspection(resultPromise);\n return dispose(resources, inspection);\n });\n resources.promise = promise;\n promise._setOnCancel(resources);\n return promise;\n };\n\n Promise.prototype._setDisposable = function (disposer) {\n this._bitField = this._bitField | 131072;\n this._disposer = disposer;\n };\n\n Promise.prototype._isDisposable = function () {\n return (this._bitField & 131072) > 0;\n };\n\n Promise.prototype._getDisposer = function () {\n return this._disposer;\n };\n\n Promise.prototype._unsetDisposable = function () {\n this._bitField = this._bitField & (~131072);\n this._disposer = undefined;\n };\n\n Promise.prototype.disposer = function (fn) {\n if (typeof fn === \"function\") {\n return new FunctionDisposer(fn, this, createContext());\n }\n throw new TypeError();\n };\n\n};\n","\"use strict\";\nvar es5 = require(\"./es5\");\nvar canEvaluate = typeof navigator == \"undefined\";\n\nvar errorObj = {e: {}};\nvar tryCatchTarget;\nvar globalObject = typeof self !== \"undefined\" ? self :\n typeof window !== \"undefined\" ? window :\n typeof global !== \"undefined\" ? global :\n this !== undefined ? this : null;\n\nfunction tryCatcher() {\n try {\n var target = tryCatchTarget;\n tryCatchTarget = null;\n return target.apply(this, arguments);\n } catch (e) {\n errorObj.e = e;\n return errorObj;\n }\n}\nfunction tryCatch(fn) {\n tryCatchTarget = fn;\n return tryCatcher;\n}\n\nvar inherits = function(Child, Parent) {\n var hasProp = {}.hasOwnProperty;\n\n function T() {\n this.constructor = Child;\n this.constructor$ = Parent;\n for (var propertyName in Parent.prototype) {\n if (hasProp.call(Parent.prototype, propertyName) &&\n propertyName.charAt(propertyName.length-1) !== \"$\"\n ) {\n this[propertyName + \"$\"] = Parent.prototype[propertyName];\n }\n }\n }\n T.prototype = Parent.prototype;\n Child.prototype = new T();\n return Child.prototype;\n};\n\n\nfunction isPrimitive(val) {\n return val == null || val === true || val === false ||\n typeof val === \"string\" || typeof val === \"number\";\n\n}\n\nfunction isObject(value) {\n return typeof value === \"function\" ||\n typeof value === \"object\" && value !== null;\n}\n\nfunction maybeWrapAsError(maybeError) {\n if (!isPrimitive(maybeError)) return maybeError;\n\n return new Error(safeToString(maybeError));\n}\n\nfunction withAppended(target, appendee) {\n var len = target.length;\n var ret = new Array(len + 1);\n var i;\n for (i = 0; i < len; ++i) {\n ret[i] = target[i];\n }\n ret[i] = appendee;\n return ret;\n}\n\nfunction getDataPropertyOrDefault(obj, key, defaultValue) {\n if (es5.isES5) {\n var desc = Object.getOwnPropertyDescriptor(obj, key);\n\n if (desc != null) {\n return desc.get == null && desc.set == null\n ? desc.value\n : defaultValue;\n }\n } else {\n return {}.hasOwnProperty.call(obj, key) ? obj[key] : undefined;\n }\n}\n\nfunction notEnumerableProp(obj, name, value) {\n if (isPrimitive(obj)) return obj;\n var descriptor = {\n value: value,\n configurable: true,\n enumerable: false,\n writable: true\n };\n es5.defineProperty(obj, name, descriptor);\n return obj;\n}\n\nfunction thrower(r) {\n throw r;\n}\n\nvar inheritedDataKeys = (function() {\n var excludedPrototypes = [\n Array.prototype,\n Object.prototype,\n Function.prototype\n ];\n\n var isExcludedProto = function(val) {\n for (var i = 0; i < excludedPrototypes.length; ++i) {\n if (excludedPrototypes[i] === val) {\n return true;\n }\n }\n return false;\n };\n\n if (es5.isES5) {\n var getKeys = Object.getOwnPropertyNames;\n return function(obj) {\n var ret = [];\n var visitedKeys = Object.create(null);\n while (obj != null && !isExcludedProto(obj)) {\n var keys;\n try {\n keys = getKeys(obj);\n } catch (e) {\n return ret;\n }\n for (var i = 0; i < keys.length; ++i) {\n var key = keys[i];\n if (visitedKeys[key]) continue;\n visitedKeys[key] = true;\n var desc = Object.getOwnPropertyDescriptor(obj, key);\n if (desc != null && desc.get == null && desc.set == null) {\n ret.push(key);\n }\n }\n obj = es5.getPrototypeOf(obj);\n }\n return ret;\n };\n } else {\n var hasProp = {}.hasOwnProperty;\n return function(obj) {\n if (isExcludedProto(obj)) return [];\n var ret = [];\n\n /*jshint forin:false */\n enumeration: for (var key in obj) {\n if (hasProp.call(obj, key)) {\n ret.push(key);\n } else {\n for (var i = 0; i < excludedPrototypes.length; ++i) {\n if (hasProp.call(excludedPrototypes[i], key)) {\n continue enumeration;\n }\n }\n ret.push(key);\n }\n }\n return ret;\n };\n }\n\n})();\n\nvar thisAssignmentPattern = /this\\s*\\.\\s*\\S+\\s*=/;\nfunction isClass(fn) {\n try {\n if (typeof fn === \"function\") {\n var keys = es5.names(fn.prototype);\n\n var hasMethods = es5.isES5 && keys.length > 1;\n var hasMethodsOtherThanConstructor = keys.length > 0 &&\n !(keys.length === 1 && keys[0] === \"constructor\");\n var hasThisAssignmentAndStaticMethods =\n thisAssignmentPattern.test(fn + \"\") && es5.names(fn).length > 0;\n\n if (hasMethods || hasMethodsOtherThanConstructor ||\n hasThisAssignmentAndStaticMethods) {\n return true;\n }\n }\n return false;\n } catch (e) {\n return false;\n }\n}\n\nfunction toFastProperties(obj) {\n /*jshint -W027,-W055,-W031*/\n function FakeConstructor() {}\n FakeConstructor.prototype = obj;\n var l = 8;\n while (l--) new FakeConstructor();\n return obj;\n eval(obj);\n}\n\nvar rident = /^[a-z$_][a-z$_0-9]*$/i;\nfunction isIdentifier(str) {\n return rident.test(str);\n}\n\nfunction filledRange(count, prefix, suffix) {\n var ret = new Array(count);\n for(var i = 0; i < count; ++i) {\n ret[i] = prefix + i + suffix;\n }\n return ret;\n}\n\nfunction safeToString(obj) {\n try {\n return obj + \"\";\n } catch (e) {\n return \"[no string representation]\";\n }\n}\n\nfunction isError(obj) {\n return obj !== null &&\n typeof obj === \"object\" &&\n typeof obj.message === \"string\" &&\n typeof obj.name === \"string\";\n}\n\nfunction markAsOriginatingFromRejection(e) {\n try {\n notEnumerableProp(e, \"isOperational\", true);\n }\n catch(ignore) {}\n}\n\nfunction originatesFromRejection(e) {\n if (e == null) return false;\n return ((e instanceof Error[\"__BluebirdErrorTypes__\"].OperationalError) ||\n e[\"isOperational\"] === true);\n}\n\nfunction canAttachTrace(obj) {\n return isError(obj) && es5.propertyIsWritable(obj, \"stack\");\n}\n\nvar ensureErrorObject = (function() {\n if (!(\"stack\" in new Error())) {\n return function(value) {\n if (canAttachTrace(value)) return value;\n try {throw new Error(safeToString(value));}\n catch(err) {return err;}\n };\n } else {\n return function(value) {\n if (canAttachTrace(value)) return value;\n return new Error(safeToString(value));\n };\n }\n})();\n\nfunction classString(obj) {\n return {}.toString.call(obj);\n}\n\nfunction copyDescriptors(from, to, filter) {\n var keys = es5.names(from);\n for (var i = 0; i < keys.length; ++i) {\n var key = keys[i];\n if (filter(key)) {\n try {\n es5.defineProperty(to, key, es5.getDescriptor(from, key));\n } catch (ignore) {}\n }\n }\n}\n\nvar asArray = function(v) {\n if (es5.isArray(v)) {\n return v;\n }\n return null;\n};\n\nif (typeof Symbol !== \"undefined\" && Symbol.iterator) {\n var ArrayFrom = typeof Array.from === \"function\" ? function(v) {\n return Array.from(v);\n } : function(v) {\n var ret = [];\n var it = v[Symbol.iterator]();\n var itResult;\n while (!((itResult = it.next()).done)) {\n ret.push(itResult.value);\n }\n return ret;\n };\n\n asArray = function(v) {\n if (es5.isArray(v)) {\n return v;\n } else if (v != null && typeof v[Symbol.iterator] === \"function\") {\n return ArrayFrom(v);\n }\n return null;\n };\n}\n\nvar isNode = typeof process !== \"undefined\" &&\n classString(process).toLowerCase() === \"[object process]\";\n\nvar hasEnvVariables = typeof process !== \"undefined\" &&\n typeof process.env !== \"undefined\";\n\nfunction env(key) {\n return hasEnvVariables ? process.env[key] : undefined;\n}\n\nfunction getNativePromise() {\n if (typeof Promise === \"function\") {\n try {\n var promise = new Promise(function(){});\n if ({}.toString.call(promise) === \"[object Promise]\") {\n return Promise;\n }\n } catch (e) {}\n }\n}\n\nfunction domainBind(self, cb) {\n return self.bind(cb);\n}\n\nvar ret = {\n isClass: isClass,\n isIdentifier: isIdentifier,\n inheritedDataKeys: inheritedDataKeys,\n getDataPropertyOrDefault: getDataPropertyOrDefault,\n thrower: thrower,\n isArray: es5.isArray,\n asArray: asArray,\n notEnumerableProp: notEnumerableProp,\n isPrimitive: isPrimitive,\n isObject: isObject,\n isError: isError,\n canEvaluate: canEvaluate,\n errorObj: errorObj,\n tryCatch: tryCatch,\n inherits: inherits,\n withAppended: withAppended,\n maybeWrapAsError: maybeWrapAsError,\n toFastProperties: toFastProperties,\n filledRange: filledRange,\n toString: safeToString,\n canAttachTrace: canAttachTrace,\n ensureErrorObject: ensureErrorObject,\n originatesFromRejection: originatesFromRejection,\n markAsOriginatingFromRejection: markAsOriginatingFromRejection,\n classString: classString,\n copyDescriptors: copyDescriptors,\n hasDevTools: typeof chrome !== \"undefined\" && chrome &&\n typeof chrome.loadTimes === \"function\",\n isNode: isNode,\n hasEnvVariables: hasEnvVariables,\n env: env,\n global: globalObject,\n getNativePromise: getNativePromise,\n domainBind: domainBind\n};\nret.isRecentNode = ret.isNode && (function() {\n var version = process.versions.node.split(\".\").map(Number);\n return (version[0] === 0 && version[1] > 10) || (version[0] > 0);\n})();\n\nif (ret.isNode) ret.toFastProperties(process);\n\ntry {throw new Error(); } catch (e) {ret.lastLineError = e;}\nmodule.exports = ret;\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","\"use strict\";\nvar initBuffer = require(\"./init-buffer\");\n\nif (!Buffer.prototype.indexOf) {\n Buffer.prototype.indexOf = function (value, offset) {\n offset = offset || 0;\n\n // Always wrap the input as a Buffer so that this method will support any\n // data type such as array octet, string or buffer.\n if (typeof value === \"string\" || value instanceof String) {\n value = initBuffer(value);\n } else if (typeof value === \"number\" || value instanceof Number) {\n value = initBuffer([ value ]);\n }\n\n var len = value.length;\n\n for (var i = offset; i <= this.length - len; i++) {\n var mismatch = false;\n for (var j = 0; j < len; j++) {\n if (this[i + j] != value[j]) {\n mismatch = true;\n break;\n }\n }\n\n if (!mismatch) {\n return i;\n }\n }\n\n return -1;\n };\n}\n\nfunction bufferLastIndexOf (value, offset) {\n\n // Always wrap the input as a Buffer so that this method will support any\n // data type such as array octet, string or buffer.\n if (typeof value === \"string\" || value instanceof String) {\n value = initBuffer(value);\n } else if (typeof value === \"number\" || value instanceof Number) {\n value = initBuffer([ value ]);\n }\n\n var len = value.length;\n offset = offset || this.length - len;\n\n for (var i = offset; i >= 0; i--) {\n var mismatch = false;\n for (var j = 0; j < len; j++) {\n if (this[i + j] != value[j]) {\n mismatch = true;\n break;\n }\n }\n\n if (!mismatch) {\n return i;\n }\n }\n\n return -1;\n}\n\n\nif (Buffer.prototype.lastIndexOf) {\n // check Buffer#lastIndexOf is usable: https://github.com/nodejs/node/issues/4604\n if (initBuffer(\"ABC\").lastIndexOf (\"ABC\") === -1)\n Buffer.prototype.lastIndexOf = bufferLastIndexOf;\n} else {\n Buffer.prototype.lastIndexOf = bufferLastIndexOf;\n}\n","module.exports = function initBuffer(val) {\n // assume old version\n var nodeVersion = process && process.version ? process.version : \"v5.0.0\";\n var major = nodeVersion.split(\".\")[0].replace(\"v\", \"\");\n return major < 6\n ? new Buffer(val)\n : Buffer.from(val);\n};","module.exports = Buffers;\n\nfunction Buffers (bufs) {\n if (!(this instanceof Buffers)) return new Buffers(bufs);\n this.buffers = bufs || [];\n this.length = this.buffers.reduce(function (size, buf) {\n return size + buf.length\n }, 0);\n}\n\nBuffers.prototype.push = function () {\n for (var i = 0; i < arguments.length; i++) {\n if (!Buffer.isBuffer(arguments[i])) {\n throw new TypeError('Tried to push a non-buffer');\n }\n }\n \n for (var i = 0; i < arguments.length; i++) {\n var buf = arguments[i];\n this.buffers.push(buf);\n this.length += buf.length;\n }\n return this.length;\n};\n\nBuffers.prototype.unshift = function () {\n for (var i = 0; i < arguments.length; i++) {\n if (!Buffer.isBuffer(arguments[i])) {\n throw new TypeError('Tried to unshift a non-buffer');\n }\n }\n \n for (var i = 0; i < arguments.length; i++) {\n var buf = arguments[i];\n this.buffers.unshift(buf);\n this.length += buf.length;\n }\n return this.length;\n};\n\nBuffers.prototype.copy = function (dst, dStart, start, end) {\n return this.slice(start, end).copy(dst, dStart, 0, end - start);\n};\n\nBuffers.prototype.splice = function (i, howMany) {\n var buffers = this.buffers;\n var index = i >= 0 ? i : this.length - i;\n var reps = [].slice.call(arguments, 2);\n \n if (howMany === undefined) {\n howMany = this.length - index;\n }\n else if (howMany > this.length - index) {\n howMany = this.length - index;\n }\n \n for (var i = 0; i < reps.length; i++) {\n this.length += reps[i].length;\n }\n \n var removed = new Buffers();\n var bytes = 0;\n \n var startBytes = 0;\n for (\n var ii = 0;\n ii < buffers.length && startBytes + buffers[ii].length < index;\n ii ++\n ) { startBytes += buffers[ii].length }\n \n if (index - startBytes > 0) {\n var start = index - startBytes;\n \n if (start + howMany < buffers[ii].length) {\n removed.push(buffers[ii].slice(start, start + howMany));\n \n var orig = buffers[ii];\n //var buf = new Buffer(orig.length - howMany);\n var buf0 = new Buffer(start);\n for (var i = 0; i < start; i++) {\n buf0[i] = orig[i];\n }\n \n var buf1 = new Buffer(orig.length - start - howMany);\n for (var i = start + howMany; i < orig.length; i++) {\n buf1[ i - howMany - start ] = orig[i]\n }\n \n if (reps.length > 0) {\n var reps_ = reps.slice();\n reps_.unshift(buf0);\n reps_.push(buf1);\n buffers.splice.apply(buffers, [ ii, 1 ].concat(reps_));\n ii += reps_.length;\n reps = [];\n }\n else {\n buffers.splice(ii, 1, buf0, buf1);\n //buffers[ii] = buf;\n ii += 2;\n }\n }\n else {\n removed.push(buffers[ii].slice(start));\n buffers[ii] = buffers[ii].slice(0, start);\n ii ++;\n }\n }\n \n if (reps.length > 0) {\n buffers.splice.apply(buffers, [ ii, 0 ].concat(reps));\n ii += reps.length;\n }\n \n while (removed.length < howMany) {\n var buf = buffers[ii];\n var len = buf.length;\n var take = Math.min(len, howMany - removed.length);\n \n if (take === len) {\n removed.push(buf);\n buffers.splice(ii, 1);\n }\n else {\n removed.push(buf.slice(0, take));\n buffers[ii] = buffers[ii].slice(take);\n }\n }\n \n this.length -= removed.length;\n \n return removed;\n};\n \nBuffers.prototype.slice = function (i, j) {\n var buffers = this.buffers;\n if (j === undefined) j = this.length;\n if (i === undefined) i = 0;\n \n if (j > this.length) j = this.length;\n \n var startBytes = 0;\n for (\n var si = 0;\n si < buffers.length && startBytes + buffers[si].length <= i;\n si ++\n ) { startBytes += buffers[si].length }\n \n var target = new Buffer(j - i);\n \n var ti = 0;\n for (var ii = si; ti < j - i && ii < buffers.length; ii++) {\n var len = buffers[ii].length;\n \n var start = ti === 0 ? i - startBytes : 0;\n var end = ti + len >= j - i\n ? Math.min(start + (j - i) - ti, len)\n : len\n ;\n \n buffers[ii].copy(target, ti, start, end);\n ti += end - start;\n }\n \n return target;\n};\n\nBuffers.prototype.pos = function (i) {\n if (i < 0 || i >= this.length) throw new Error('oob');\n var l = i, bi = 0, bu = null;\n for (;;) {\n bu = this.buffers[bi];\n if (l < bu.length) {\n return {buf: bi, offset: l};\n } else {\n l -= bu.length;\n }\n bi++;\n }\n};\n\nBuffers.prototype.get = function get (i) {\n var pos = this.pos(i);\n\n return this.buffers[pos.buf].get(pos.offset);\n};\n\nBuffers.prototype.set = function set (i, b) {\n var pos = this.pos(i);\n\n return this.buffers[pos.buf].set(pos.offset, b);\n};\n\nBuffers.prototype.indexOf = function (needle, offset) {\n if (\"string\" === typeof needle) {\n needle = new Buffer(needle);\n } else if (needle instanceof Buffer) {\n // already a buffer\n } else {\n throw new Error('Invalid type for a search string');\n }\n\n if (!needle.length) {\n return 0;\n }\n\n if (!this.length) {\n return -1;\n }\n\n var i = 0, j = 0, match = 0, mstart, pos = 0;\n\n // start search from a particular point in the virtual buffer\n if (offset) {\n var p = this.pos(offset);\n i = p.buf;\n j = p.offset;\n pos = offset;\n }\n\n // for each character in virtual buffer\n for (;;) {\n while (j >= this.buffers[i].length) {\n j = 0;\n i++;\n\n if (i >= this.buffers.length) {\n // search string not found\n return -1;\n }\n }\n\n var char = this.buffers[i][j];\n\n if (char == needle[match]) {\n // keep track where match started\n if (match == 0) {\n mstart = {\n i: i,\n j: j,\n pos: pos\n };\n }\n match++;\n if (match == needle.length) {\n // full match\n return mstart.pos;\n }\n } else if (match != 0) {\n // a partial match ended, go back to match starting position\n // this will continue the search at the next character\n i = mstart.i;\n j = mstart.j;\n pos = mstart.pos;\n match = 0;\n }\n\n j++;\n pos++;\n }\n};\n\nBuffers.prototype.toBuffer = function() {\n return this.slice();\n}\n\nBuffers.prototype.toString = function(encoding, start, end) {\n return this.slice(start, end).toString(encoding);\n}\n","var Traverse = require('traverse');\nvar EventEmitter = require('events').EventEmitter;\n\nmodule.exports = Chainsaw;\nfunction Chainsaw (builder) {\n var saw = Chainsaw.saw(builder, {});\n var r = builder.call(saw.handlers, saw);\n if (r !== undefined) saw.handlers = r;\n saw.record();\n return saw.chain();\n};\n\nChainsaw.light = function ChainsawLight (builder) {\n var saw = Chainsaw.saw(builder, {});\n var r = builder.call(saw.handlers, saw);\n if (r !== undefined) saw.handlers = r;\n return saw.chain();\n};\n\nChainsaw.saw = function (builder, handlers) {\n var saw = new EventEmitter;\n saw.handlers = handlers;\n saw.actions = [];\n\n saw.chain = function () {\n var ch = Traverse(saw.handlers).map(function (node) {\n if (this.isRoot) return node;\n var ps = this.path;\n\n if (typeof node === 'function') {\n this.update(function () {\n saw.actions.push({\n path : ps,\n args : [].slice.call(arguments)\n });\n return ch;\n });\n }\n });\n\n process.nextTick(function () {\n saw.emit('begin');\n saw.next();\n });\n\n return ch;\n };\n\n saw.pop = function () {\n return saw.actions.shift();\n };\n\n saw.next = function () {\n var action = saw.pop();\n\n if (!action) {\n saw.emit('end');\n }\n else if (!action.trap) {\n var node = saw.handlers;\n action.path.forEach(function (key) { node = node[key] });\n node.apply(saw.handlers, action.args);\n }\n };\n\n saw.nest = function (cb) {\n var args = [].slice.call(arguments, 1);\n var autonext = true;\n\n if (typeof cb === 'boolean') {\n var autonext = cb;\n cb = args.shift();\n }\n\n var s = Chainsaw.saw(builder, {});\n var r = builder.call(s.handlers, s);\n\n if (r !== undefined) s.handlers = r;\n\n // If we are recording...\n if (\"undefined\" !== typeof saw.step) {\n // ... our children should, too\n s.record();\n }\n\n cb.apply(s.chain(), args);\n if (autonext !== false) s.on('end', saw.next);\n };\n\n saw.record = function () {\n upgradeChainsaw(saw);\n };\n\n ['trap', 'down', 'jump'].forEach(function (method) {\n saw[method] = function () {\n throw new Error(\"To use the trap, down and jump features, please \"+\n \"call record() first to start recording actions.\");\n };\n });\n\n return saw;\n};\n\nfunction upgradeChainsaw(saw) {\n saw.step = 0;\n\n // override pop\n saw.pop = function () {\n return saw.actions[saw.step++];\n };\n\n saw.trap = function (name, cb) {\n var ps = Array.isArray(name) ? name : [name];\n saw.actions.push({\n path : ps,\n step : saw.step,\n cb : cb,\n trap : true\n });\n };\n\n saw.down = function (name) {\n var ps = (Array.isArray(name) ? name : [name]).join('/');\n var i = saw.actions.slice(saw.step).map(function (x) {\n if (x.trap && x.step <= saw.step) return false;\n return x.path.join('/') == ps;\n }).indexOf(true);\n\n if (i >= 0) saw.step += i;\n else saw.step = saw.actions.length;\n\n var act = saw.actions[saw.step - 1];\n if (act && act.trap) {\n // It's a trap!\n saw.step = act.step;\n act.cb();\n }\n else saw.next();\n };\n\n saw.jump = function (step) {\n saw.step = step;\n saw.next();\n };\n};\n","var util = require('util');\nvar Stream = require('stream').Stream;\nvar DelayedStream = require('delayed-stream');\n\nmodule.exports = CombinedStream;\nfunction CombinedStream() {\n this.writable = false;\n this.readable = true;\n this.dataSize = 0;\n this.maxDataSize = 2 * 1024 * 1024;\n this.pauseStreams = true;\n\n this._released = false;\n this._streams = [];\n this._currentStream = null;\n this._insideLoop = false;\n this._pendingNext = false;\n}\nutil.inherits(CombinedStream, Stream);\n\nCombinedStream.create = function(options) {\n var combinedStream = new this();\n\n options = options || {};\n for (var option in options) {\n combinedStream[option] = options[option];\n }\n\n return combinedStream;\n};\n\nCombinedStream.isStreamLike = function(stream) {\n return (typeof stream !== 'function')\n && (typeof stream !== 'string')\n && (typeof stream !== 'boolean')\n && (typeof stream !== 'number')\n && (!Buffer.isBuffer(stream));\n};\n\nCombinedStream.prototype.append = function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n\n if (isStreamLike) {\n if (!(stream instanceof DelayedStream)) {\n var newStream = DelayedStream.create(stream, {\n maxDataSize: Infinity,\n pauseStream: this.pauseStreams,\n });\n stream.on('data', this._checkDataSize.bind(this));\n stream = newStream;\n }\n\n this._handleErrors(stream);\n\n if (this.pauseStreams) {\n stream.pause();\n }\n }\n\n this._streams.push(stream);\n return this;\n};\n\nCombinedStream.prototype.pipe = function(dest, options) {\n Stream.prototype.pipe.call(this, dest, options);\n this.resume();\n return dest;\n};\n\nCombinedStream.prototype._getNext = function() {\n this._currentStream = null;\n\n if (this._insideLoop) {\n this._pendingNext = true;\n return; // defer call\n }\n\n this._insideLoop = true;\n try {\n do {\n this._pendingNext = false;\n this._realGetNext();\n } while (this._pendingNext);\n } finally {\n this._insideLoop = false;\n }\n};\n\nCombinedStream.prototype._realGetNext = function() {\n var stream = this._streams.shift();\n\n\n if (typeof stream == 'undefined') {\n this.end();\n return;\n }\n\n if (typeof stream !== 'function') {\n this._pipeNext(stream);\n return;\n }\n\n var getStream = stream;\n getStream(function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('data', this._checkDataSize.bind(this));\n this._handleErrors(stream);\n }\n\n this._pipeNext(stream);\n }.bind(this));\n};\n\nCombinedStream.prototype._pipeNext = function(stream) {\n this._currentStream = stream;\n\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('end', this._getNext.bind(this));\n stream.pipe(this, {end: false});\n return;\n }\n\n var value = stream;\n this.write(value);\n this._getNext();\n};\n\nCombinedStream.prototype._handleErrors = function(stream) {\n var self = this;\n stream.on('error', function(err) {\n self._emitError(err);\n });\n};\n\nCombinedStream.prototype.write = function(data) {\n this.emit('data', data);\n};\n\nCombinedStream.prototype.pause = function() {\n if (!this.pauseStreams) {\n return;\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();\n this.emit('pause');\n};\n\nCombinedStream.prototype.resume = function() {\n if (!this._released) {\n this._released = true;\n this.writable = true;\n this._getNext();\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();\n this.emit('resume');\n};\n\nCombinedStream.prototype.end = function() {\n this._reset();\n this.emit('end');\n};\n\nCombinedStream.prototype.destroy = function() {\n this._reset();\n this.emit('close');\n};\n\nCombinedStream.prototype._reset = function() {\n this.writable = false;\n this._streams = [];\n this._currentStream = null;\n};\n\nCombinedStream.prototype._checkDataSize = function() {\n this._updateDataSize();\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';\n this._emitError(new Error(message));\n};\n\nCombinedStream.prototype._updateDataSize = function() {\n this.dataSize = 0;\n\n var self = this;\n this._streams.forEach(function(stream) {\n if (!stream.dataSize) {\n return;\n }\n\n self.dataSize += stream.dataSize;\n });\n\n if (this._currentStream && this._currentStream.dataSize) {\n this.dataSize += this._currentStream.dataSize;\n }\n};\n\nCombinedStream.prototype._emitError = function(err) {\n this._reset();\n this.emit('error', err);\n};\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// NOTE: These type checking functions intentionally don't use `instanceof`\n// because it is fragile and can be easily faked with `Object.create()`.\n\nfunction isArray(arg) {\n if (Array.isArray) {\n return Array.isArray(arg);\n }\n return objectToString(arg) === '[object Array]';\n}\nexports.isArray = isArray;\n\nfunction isBoolean(arg) {\n return typeof arg === 'boolean';\n}\nexports.isBoolean = isBoolean;\n\nfunction isNull(arg) {\n return arg === null;\n}\nexports.isNull = isNull;\n\nfunction isNullOrUndefined(arg) {\n return arg == null;\n}\nexports.isNullOrUndefined = isNullOrUndefined;\n\nfunction isNumber(arg) {\n return typeof arg === 'number';\n}\nexports.isNumber = isNumber;\n\nfunction isString(arg) {\n return typeof arg === 'string';\n}\nexports.isString = isString;\n\nfunction isSymbol(arg) {\n return typeof arg === 'symbol';\n}\nexports.isSymbol = isSymbol;\n\nfunction isUndefined(arg) {\n return arg === void 0;\n}\nexports.isUndefined = isUndefined;\n\nfunction isRegExp(re) {\n return objectToString(re) === '[object RegExp]';\n}\nexports.isRegExp = isRegExp;\n\nfunction isObject(arg) {\n return typeof arg === 'object' && arg !== null;\n}\nexports.isObject = isObject;\n\nfunction isDate(d) {\n return objectToString(d) === '[object Date]';\n}\nexports.isDate = isDate;\n\nfunction isError(e) {\n return (objectToString(e) === '[object Error]' || e instanceof Error);\n}\nexports.isError = isError;\n\nfunction isFunction(arg) {\n return typeof arg === 'function';\n}\nexports.isFunction = isFunction;\n\nfunction isPrimitive(arg) {\n return arg === null ||\n typeof arg === 'boolean' ||\n typeof arg === 'number' ||\n typeof arg === 'string' ||\n typeof arg === 'symbol' || // ES6 symbol\n typeof arg === 'undefined';\n}\nexports.isPrimitive = isPrimitive;\n\nexports.isBuffer = Buffer.isBuffer;\n\nfunction objectToString(o) {\n return Object.prototype.toString.call(o);\n}\n","var Stream = require('stream').Stream;\nvar util = require('util');\n\nmodule.exports = DelayedStream;\nfunction DelayedStream() {\n this.source = null;\n this.dataSize = 0;\n this.maxDataSize = 1024 * 1024;\n this.pauseStream = true;\n\n this._maxDataSizeExceeded = false;\n this._released = false;\n this._bufferedEvents = [];\n}\nutil.inherits(DelayedStream, Stream);\n\nDelayedStream.create = function(source, options) {\n var delayedStream = new this();\n\n options = options || {};\n for (var option in options) {\n delayedStream[option] = options[option];\n }\n\n delayedStream.source = source;\n\n var realEmit = source.emit;\n source.emit = function() {\n delayedStream._handleEmit(arguments);\n return realEmit.apply(source, arguments);\n };\n\n source.on('error', function() {});\n if (delayedStream.pauseStream) {\n source.pause();\n }\n\n return delayedStream;\n};\n\nObject.defineProperty(DelayedStream.prototype, 'readable', {\n configurable: true,\n enumerable: true,\n get: function() {\n return this.source.readable;\n }\n});\n\nDelayedStream.prototype.setEncoding = function() {\n return this.source.setEncoding.apply(this.source, arguments);\n};\n\nDelayedStream.prototype.resume = function() {\n if (!this._released) {\n this.release();\n }\n\n this.source.resume();\n};\n\nDelayedStream.prototype.pause = function() {\n this.source.pause();\n};\n\nDelayedStream.prototype.release = function() {\n this._released = true;\n\n this._bufferedEvents.forEach(function(args) {\n this.emit.apply(this, args);\n }.bind(this));\n this._bufferedEvents = [];\n};\n\nDelayedStream.prototype.pipe = function() {\n var r = Stream.prototype.pipe.apply(this, arguments);\n this.resume();\n return r;\n};\n\nDelayedStream.prototype._handleEmit = function(args) {\n if (this._released) {\n this.emit.apply(this, args);\n return;\n }\n\n if (args[0] === 'data') {\n this.dataSize += args[1].length;\n this._checkIfMaxDataSizeExceeded();\n }\n\n this._bufferedEvents.push(args);\n};\n\nDelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {\n if (this._maxDataSizeExceeded) {\n return;\n }\n\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n this._maxDataSizeExceeded = true;\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'\n this.emit('error', new Error(message));\n};\n","\"use strict\";\n\nvar stream = require(\"readable-stream\");\n\nfunction DuplexWrapper(options, writable, readable) {\n if (typeof readable === \"undefined\") {\n readable = writable;\n writable = options;\n options = null;\n }\n\n stream.Duplex.call(this, options);\n\n if (typeof readable.read !== \"function\") {\n readable = (new stream.Readable(options)).wrap(readable);\n }\n\n this._writable = writable;\n this._readable = readable;\n this._waiting = false;\n\n var self = this;\n\n writable.once(\"finish\", function() {\n self.end();\n });\n\n this.once(\"finish\", function() {\n writable.end();\n });\n\n readable.on(\"readable\", function() {\n if (self._waiting) {\n self._waiting = false;\n self._read();\n }\n });\n\n readable.once(\"end\", function() {\n self.push(null);\n });\n\n if (!options || typeof options.bubbleErrors === \"undefined\" || options.bubbleErrors) {\n writable.on(\"error\", function(err) {\n self.emit(\"error\", err);\n });\n\n readable.on(\"error\", function(err) {\n self.emit(\"error\", err);\n });\n }\n}\n\nDuplexWrapper.prototype = Object.create(stream.Duplex.prototype, {constructor: {value: DuplexWrapper}});\n\nDuplexWrapper.prototype._write = function _write(input, encoding, done) {\n this._writable.write(input, encoding, done);\n};\n\nDuplexWrapper.prototype._read = function _read() {\n var buf;\n var reads = 0;\n while ((buf = this._readable.read()) !== null) {\n this.push(buf);\n reads++;\n }\n if (reads === 0) {\n this._waiting = true;\n }\n};\n\nmodule.exports = function duplex2(options, writable, readable) {\n return new DuplexWrapper(options, writable, readable);\n};\n\nmodule.exports.DuplexWrapper = DuplexWrapper;\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","module.exports = realpath\nrealpath.realpath = realpath\nrealpath.sync = realpathSync\nrealpath.realpathSync = realpathSync\nrealpath.monkeypatch = monkeypatch\nrealpath.unmonkeypatch = unmonkeypatch\n\nvar fs = require('fs')\nvar origRealpath = fs.realpath\nvar origRealpathSync = fs.realpathSync\n\nvar version = process.version\nvar ok = /^v[0-5]\\./.test(version)\nvar old = require('./old.js')\n\nfunction newError (er) {\n return er && er.syscall === 'realpath' && (\n er.code === 'ELOOP' ||\n er.code === 'ENOMEM' ||\n er.code === 'ENAMETOOLONG'\n )\n}\n\nfunction realpath (p, cache, cb) {\n if (ok) {\n return origRealpath(p, cache, cb)\n }\n\n if (typeof cache === 'function') {\n cb = cache\n cache = null\n }\n origRealpath(p, cache, function (er, result) {\n if (newError(er)) {\n old.realpath(p, cache, cb)\n } else {\n cb(er, result)\n }\n })\n}\n\nfunction realpathSync (p, cache) {\n if (ok) {\n return origRealpathSync(p, cache)\n }\n\n try {\n return origRealpathSync(p, cache)\n } catch (er) {\n if (newError(er)) {\n return old.realpathSync(p, cache)\n } else {\n throw er\n }\n }\n}\n\nfunction monkeypatch () {\n fs.realpath = realpath\n fs.realpathSync = realpathSync\n}\n\nfunction unmonkeypatch () {\n fs.realpath = origRealpath\n fs.realpathSync = origRealpathSync\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nvar pathModule = require('path');\nvar isWindows = process.platform === 'win32';\nvar fs = require('fs');\n\n// JavaScript implementation of realpath, ported from node pre-v6\n\nvar DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);\n\nfunction rethrow() {\n // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and\n // is fairly slow to generate.\n var callback;\n if (DEBUG) {\n var backtrace = new Error;\n callback = debugCallback;\n } else\n callback = missingCallback;\n\n return callback;\n\n function debugCallback(err) {\n if (err) {\n backtrace.message = err.message;\n err = backtrace;\n missingCallback(err);\n }\n }\n\n function missingCallback(err) {\n if (err) {\n if (process.throwDeprecation)\n throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs\n else if (!process.noDeprecation) {\n var msg = 'fs: missing callback ' + (err.stack || err.message);\n if (process.traceDeprecation)\n console.trace(msg);\n else\n console.error(msg);\n }\n }\n }\n}\n\nfunction maybeCallback(cb) {\n return typeof cb === 'function' ? cb : rethrow();\n}\n\nvar normalize = pathModule.normalize;\n\n// Regexp that finds the next partion of a (partial) path\n// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']\nif (isWindows) {\n var nextPartRe = /(.*?)(?:[\\/\\\\]+|$)/g;\n} else {\n var nextPartRe = /(.*?)(?:[\\/]+|$)/g;\n}\n\n// Regex to find the device root, including trailing slash. E.g. 'c:\\\\'.\nif (isWindows) {\n var splitRootRe = /^(?:[a-zA-Z]:|[\\\\\\/]{2}[^\\\\\\/]+[\\\\\\/][^\\\\\\/]+)?[\\\\\\/]*/;\n} else {\n var splitRootRe = /^[\\/]*/;\n}\n\nexports.realpathSync = function realpathSync(p, cache) {\n // make p is absolute\n p = pathModule.resolve(p);\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {\n return cache[p];\n }\n\n var original = p,\n seenLinks = {},\n knownHard = {};\n\n // current character position in p\n var pos;\n // the partial path so far, including a trailing slash if any\n var current;\n // the partial path without a trailing slash (except when pointing at a root)\n var base;\n // the partial path scanned in the previous round, with slash\n var previous;\n\n start();\n\n function start() {\n // Skip over roots\n var m = splitRootRe.exec(p);\n pos = m[0].length;\n current = m[0];\n base = m[0];\n previous = '';\n\n // On windows, check that the root exists. On unix there is no need.\n if (isWindows && !knownHard[base]) {\n fs.lstatSync(base);\n knownHard[base] = true;\n }\n }\n\n // walk down the path, swapping out linked pathparts for their real\n // values\n // NB: p.length changes.\n while (pos < p.length) {\n // find the next part\n nextPartRe.lastIndex = pos;\n var result = nextPartRe.exec(p);\n previous = current;\n current += result[0];\n base = previous + result[1];\n pos = nextPartRe.lastIndex;\n\n // continue if not a symlink\n if (knownHard[base] || (cache && cache[base] === base)) {\n continue;\n }\n\n var resolvedLink;\n if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {\n // some known symbolic link. no need to stat again.\n resolvedLink = cache[base];\n } else {\n var stat = fs.lstatSync(base);\n if (!stat.isSymbolicLink()) {\n knownHard[base] = true;\n if (cache) cache[base] = base;\n continue;\n }\n\n // read the link if it wasn't read before\n // dev/ino always return 0 on windows, so skip the check.\n var linkTarget = null;\n if (!isWindows) {\n var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);\n if (seenLinks.hasOwnProperty(id)) {\n linkTarget = seenLinks[id];\n }\n }\n if (linkTarget === null) {\n fs.statSync(base);\n linkTarget = fs.readlinkSync(base);\n }\n resolvedLink = pathModule.resolve(previous, linkTarget);\n // track this, if given a cache.\n if (cache) cache[base] = resolvedLink;\n if (!isWindows) seenLinks[id] = linkTarget;\n }\n\n // resolve the link, then start over\n p = pathModule.resolve(resolvedLink, p.slice(pos));\n start();\n }\n\n if (cache) cache[original] = p;\n\n return p;\n};\n\n\nexports.realpath = function realpath(p, cache, cb) {\n if (typeof cb !== 'function') {\n cb = maybeCallback(cache);\n cache = null;\n }\n\n // make p is absolute\n p = pathModule.resolve(p);\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {\n return process.nextTick(cb.bind(null, null, cache[p]));\n }\n\n var original = p,\n seenLinks = {},\n knownHard = {};\n\n // current character position in p\n var pos;\n // the partial path so far, including a trailing slash if any\n var current;\n // the partial path without a trailing slash (except when pointing at a root)\n var base;\n // the partial path scanned in the previous round, with slash\n var previous;\n\n start();\n\n function start() {\n // Skip over roots\n var m = splitRootRe.exec(p);\n pos = m[0].length;\n current = m[0];\n base = m[0];\n previous = '';\n\n // On windows, check that the root exists. On unix there is no need.\n if (isWindows && !knownHard[base]) {\n fs.lstat(base, function(err) {\n if (err) return cb(err);\n knownHard[base] = true;\n LOOP();\n });\n } else {\n process.nextTick(LOOP);\n }\n }\n\n // walk down the path, swapping out linked pathparts for their real\n // values\n function LOOP() {\n // stop if scanned past end of path\n if (pos >= p.length) {\n if (cache) cache[original] = p;\n return cb(null, p);\n }\n\n // find the next part\n nextPartRe.lastIndex = pos;\n var result = nextPartRe.exec(p);\n previous = current;\n current += result[0];\n base = previous + result[1];\n pos = nextPartRe.lastIndex;\n\n // continue if not a symlink\n if (knownHard[base] || (cache && cache[base] === base)) {\n return process.nextTick(LOOP);\n }\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {\n // known symbolic link. no need to stat again.\n return gotResolvedLink(cache[base]);\n }\n\n return fs.lstat(base, gotStat);\n }\n\n function gotStat(err, stat) {\n if (err) return cb(err);\n\n // if not a symlink, skip to the next path part\n if (!stat.isSymbolicLink()) {\n knownHard[base] = true;\n if (cache) cache[base] = base;\n return process.nextTick(LOOP);\n }\n\n // stat & read the link if not read before\n // call gotTarget as soon as the link target is known\n // dev/ino always return 0 on windows, so skip the check.\n if (!isWindows) {\n var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);\n if (seenLinks.hasOwnProperty(id)) {\n return gotTarget(null, seenLinks[id], base);\n }\n }\n fs.stat(base, function(err) {\n if (err) return cb(err);\n\n fs.readlink(base, function(err, target) {\n if (!isWindows) seenLinks[id] = target;\n gotTarget(err, target);\n });\n });\n }\n\n function gotTarget(err, target, base) {\n if (err) return cb(err);\n\n var resolvedLink = pathModule.resolve(previous, target);\n if (cache) cache[base] = resolvedLink;\n gotResolvedLink(resolvedLink);\n }\n\n function gotResolvedLink(resolvedLink) {\n // resolve the link, then start over\n p = pathModule.resolve(resolvedLink, p.slice(pos));\n start();\n }\n};\n","exports.Abstract = require('./lib/abstract.js')\nexports.Reader = require('./lib/reader.js')\nexports.Writer = require('./lib/writer.js')\n\nexports.File = {\n Reader: require('./lib/file-reader.js'),\n Writer: require('./lib/file-writer.js')\n}\n\nexports.Dir = {\n Reader: require('./lib/dir-reader.js'),\n Writer: require('./lib/dir-writer.js')\n}\n\nexports.Link = {\n Reader: require('./lib/link-reader.js'),\n Writer: require('./lib/link-writer.js')\n}\n\nexports.Proxy = {\n Reader: require('./lib/proxy-reader.js'),\n Writer: require('./lib/proxy-writer.js')\n}\n\nexports.Reader.Dir = exports.DirReader = exports.Dir.Reader\nexports.Reader.File = exports.FileReader = exports.File.Reader\nexports.Reader.Link = exports.LinkReader = exports.Link.Reader\nexports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader\n\nexports.Writer.Dir = exports.DirWriter = exports.Dir.Writer\nexports.Writer.File = exports.FileWriter = exports.File.Writer\nexports.Writer.Link = exports.LinkWriter = exports.Link.Writer\nexports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer\n\nexports.collect = require('./lib/collect.js')\n","// the parent class for all fstreams.\n\nmodule.exports = Abstract\n\nvar Stream = require('stream').Stream\nvar inherits = require('inherits')\n\nfunction Abstract () {\n Stream.call(this)\n}\n\ninherits(Abstract, Stream)\n\nAbstract.prototype.on = function (ev, fn) {\n if (ev === 'ready' && this.ready) {\n process.nextTick(fn.bind(this))\n } else {\n Stream.prototype.on.call(this, ev, fn)\n }\n return this\n}\n\nAbstract.prototype.abort = function () {\n this._aborted = true\n this.emit('abort')\n}\n\nAbstract.prototype.destroy = function () {}\n\nAbstract.prototype.warn = function (msg, code) {\n var self = this\n var er = decorate(msg, code, self)\n if (!self.listeners('warn')) {\n console.error('%s %s\\n' +\n 'path = %s\\n' +\n 'syscall = %s\\n' +\n 'fstream_type = %s\\n' +\n 'fstream_path = %s\\n' +\n 'fstream_unc_path = %s\\n' +\n 'fstream_class = %s\\n' +\n 'fstream_stack =\\n%s\\n',\n code || 'UNKNOWN',\n er.stack,\n er.path,\n er.syscall,\n er.fstream_type,\n er.fstream_path,\n er.fstream_unc_path,\n er.fstream_class,\n er.fstream_stack.join('\\n'))\n } else {\n self.emit('warn', er)\n }\n}\n\nAbstract.prototype.info = function (msg, code) {\n this.emit('info', msg, code)\n}\n\nAbstract.prototype.error = function (msg, code, th) {\n var er = decorate(msg, code, this)\n if (th) throw er\n else this.emit('error', er)\n}\n\nfunction decorate (er, code, self) {\n if (!(er instanceof Error)) er = new Error(er)\n er.code = er.code || code\n er.path = er.path || self.path\n er.fstream_type = er.fstream_type || self.type\n er.fstream_path = er.fstream_path || self.path\n if (self._path !== self.path) {\n er.fstream_unc_path = er.fstream_unc_path || self._path\n }\n if (self.linkpath) {\n er.fstream_linkpath = er.fstream_linkpath || self.linkpath\n }\n er.fstream_class = er.fstream_class || self.constructor.name\n er.fstream_stack = er.fstream_stack ||\n new Error().stack.split(/\\n/).slice(3).map(function (s) {\n return s.replace(/^ {4}at /, '')\n })\n\n return er\n}\n","module.exports = collect\n\nfunction collect (stream) {\n if (stream._collected) return\n\n if (stream._paused) return stream.on('resume', collect.bind(null, stream))\n\n stream._collected = true\n stream.pause()\n\n stream.on('data', save)\n stream.on('end', save)\n var buf = []\n function save (b) {\n if (typeof b === 'string') b = new Buffer(b)\n if (Buffer.isBuffer(b) && !b.length) return\n buf.push(b)\n }\n\n stream.on('entry', saveEntry)\n var entryBuffer = []\n function saveEntry (e) {\n collect(e)\n entryBuffer.push(e)\n }\n\n stream.on('proxy', proxyPause)\n function proxyPause (p) {\n p.pause()\n }\n\n // replace the pipe method with a new version that will\n // unlock the buffered stuff. if you just call .pipe()\n // without a destination, then it'll re-play the events.\n stream.pipe = (function (orig) {\n return function (dest) {\n // console.error(' === open the pipes', dest && dest.path)\n\n // let the entries flow through one at a time.\n // Once they're all done, then we can resume completely.\n var e = 0\n ;(function unblockEntry () {\n var entry = entryBuffer[e++]\n // console.error(\" ==== unblock entry\", entry && entry.path)\n if (!entry) return resume()\n entry.on('end', unblockEntry)\n if (dest) dest.add(entry)\n else stream.emit('entry', entry)\n })()\n\n function resume () {\n stream.removeListener('entry', saveEntry)\n stream.removeListener('data', save)\n stream.removeListener('end', save)\n\n stream.pipe = orig\n if (dest) stream.pipe(dest)\n\n buf.forEach(function (b) {\n if (b) stream.emit('data', b)\n else stream.emit('end')\n })\n\n stream.resume()\n }\n\n return dest\n }\n })(stream.pipe)\n}\n","// A thing that emits \"entry\" events with Reader objects\n// Pausing it causes it to stop emitting entry events, and also\n// pauses the current entry if there is one.\n\nmodule.exports = DirReader\n\nvar fs = require('graceful-fs')\nvar inherits = require('inherits')\nvar path = require('path')\nvar Reader = require('./reader.js')\nvar assert = require('assert').ok\n\ninherits(DirReader, Reader)\n\nfunction DirReader (props) {\n var self = this\n if (!(self instanceof DirReader)) {\n throw new Error('DirReader must be called as constructor.')\n }\n\n // should already be established as a Directory type\n if (props.type !== 'Directory' || !props.Directory) {\n throw new Error('Non-directory type ' + props.type)\n }\n\n self.entries = null\n self._index = -1\n self._paused = false\n self._length = -1\n\n if (props.sort) {\n this.sort = props.sort\n }\n\n Reader.call(this, props)\n}\n\nDirReader.prototype._getEntries = function () {\n var self = this\n\n // race condition. might pause() before calling _getEntries,\n // and then resume, and try to get them a second time.\n if (self._gotEntries) return\n self._gotEntries = true\n\n fs.readdir(self._path, function (er, entries) {\n if (er) return self.error(er)\n\n self.entries = entries\n\n self.emit('entries', entries)\n if (self._paused) self.once('resume', processEntries)\n else processEntries()\n\n function processEntries () {\n self._length = self.entries.length\n if (typeof self.sort === 'function') {\n self.entries = self.entries.sort(self.sort.bind(self))\n }\n self._read()\n }\n })\n}\n\n// start walking the dir, and emit an \"entry\" event for each one.\nDirReader.prototype._read = function () {\n var self = this\n\n if (!self.entries) return self._getEntries()\n\n if (self._paused || self._currentEntry || self._aborted) {\n // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted)\n return\n }\n\n self._index++\n if (self._index >= self.entries.length) {\n if (!self._ended) {\n self._ended = true\n self.emit('end')\n self.emit('close')\n }\n return\n }\n\n // ok, handle this one, then.\n\n // save creating a proxy, by stat'ing the thing now.\n var p = path.resolve(self._path, self.entries[self._index])\n assert(p !== self._path)\n assert(self.entries[self._index])\n\n // set this to prevent trying to _read() again in the stat time.\n self._currentEntry = p\n fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) {\n if (er) return self.error(er)\n\n var who = self._proxy || self\n\n stat.path = p\n stat.basename = path.basename(p)\n stat.dirname = path.dirname(p)\n var childProps = self.getChildProps.call(who, stat)\n childProps.path = p\n childProps.basename = path.basename(p)\n childProps.dirname = path.dirname(p)\n\n var entry = Reader(childProps, stat)\n\n // console.error(\"DR Entry\", p, stat.size)\n\n self._currentEntry = entry\n\n // \"entry\" events are for direct entries in a specific dir.\n // \"child\" events are for any and all children at all levels.\n // This nomenclature is not completely final.\n\n entry.on('pause', function (who) {\n if (!self._paused && !entry._disowned) {\n self.pause(who)\n }\n })\n\n entry.on('resume', function (who) {\n if (self._paused && !entry._disowned) {\n self.resume(who)\n }\n })\n\n entry.on('stat', function (props) {\n self.emit('_entryStat', entry, props)\n if (entry._aborted) return\n if (entry._paused) {\n entry.once('resume', function () {\n self.emit('entryStat', entry, props)\n })\n } else self.emit('entryStat', entry, props)\n })\n\n entry.on('ready', function EMITCHILD () {\n // console.error(\"DR emit child\", entry._path)\n if (self._paused) {\n // console.error(\" DR emit child - try again later\")\n // pause the child, and emit the \"entry\" event once we drain.\n // console.error(\"DR pausing child entry\")\n entry.pause(self)\n return self.once('resume', EMITCHILD)\n }\n\n // skip over sockets. they can't be piped around properly,\n // so there's really no sense even acknowledging them.\n // if someone really wants to see them, they can listen to\n // the \"socket\" events.\n if (entry.type === 'Socket') {\n self.emit('socket', entry)\n } else {\n self.emitEntry(entry)\n }\n })\n\n var ended = false\n entry.on('close', onend)\n entry.on('disown', onend)\n function onend () {\n if (ended) return\n ended = true\n self.emit('childEnd', entry)\n self.emit('entryEnd', entry)\n self._currentEntry = null\n if (!self._paused) {\n self._read()\n }\n }\n\n // XXX Remove this. Works in node as of 0.6.2 or so.\n // Long filenames should not break stuff.\n entry.on('error', function (er) {\n if (entry._swallowErrors) {\n self.warn(er)\n entry.emit('end')\n entry.emit('close')\n } else {\n self.emit('error', er)\n }\n })\n\n // proxy up some events.\n ;[\n 'child',\n 'childEnd',\n 'warn'\n ].forEach(function (ev) {\n entry.on(ev, self.emit.bind(self, ev))\n })\n })\n}\n\nDirReader.prototype.disown = function (entry) {\n entry.emit('beforeDisown')\n entry._disowned = true\n entry.parent = entry.root = null\n if (entry === this._currentEntry) {\n this._currentEntry = null\n }\n entry.emit('disown')\n}\n\nDirReader.prototype.getChildProps = function () {\n return {\n depth: this.depth + 1,\n root: this.root || this,\n parent: this,\n follow: this.follow,\n filter: this.filter,\n sort: this.props.sort,\n hardlinks: this.props.hardlinks\n }\n}\n\nDirReader.prototype.pause = function (who) {\n var self = this\n if (self._paused) return\n who = who || self\n self._paused = true\n if (self._currentEntry && self._currentEntry.pause) {\n self._currentEntry.pause(who)\n }\n self.emit('pause', who)\n}\n\nDirReader.prototype.resume = function (who) {\n var self = this\n if (!self._paused) return\n who = who || self\n\n self._paused = false\n // console.error('DR Emit Resume', self._path)\n self.emit('resume', who)\n if (self._paused) {\n // console.error('DR Re-paused', self._path)\n return\n }\n\n if (self._currentEntry) {\n if (self._currentEntry.resume) self._currentEntry.resume(who)\n } else self._read()\n}\n\nDirReader.prototype.emitEntry = function (entry) {\n this.emit('entry', entry)\n this.emit('child', entry)\n}\n","// It is expected that, when .add() returns false, the consumer\n// of the DirWriter will pause until a \"drain\" event occurs. Note\n// that this is *almost always going to be the case*, unless the\n// thing being written is some sort of unsupported type, and thus\n// skipped over.\n\nmodule.exports = DirWriter\n\nvar Writer = require('./writer.js')\nvar inherits = require('inherits')\nvar mkdir = require('mkdirp')\nvar path = require('path')\nvar collect = require('./collect.js')\n\ninherits(DirWriter, Writer)\n\nfunction DirWriter (props) {\n var self = this\n if (!(self instanceof DirWriter)) {\n self.error('DirWriter must be called as constructor.', null, true)\n }\n\n // should already be established as a Directory type\n if (props.type !== 'Directory' || !props.Directory) {\n self.error('Non-directory type ' + props.type + ' ' +\n JSON.stringify(props), null, true)\n }\n\n Writer.call(this, props)\n}\n\nDirWriter.prototype._create = function () {\n var self = this\n mkdir(self._path, Writer.dirmode, function (er) {\n if (er) return self.error(er)\n // ready to start getting entries!\n self.ready = true\n self.emit('ready')\n self._process()\n })\n}\n\n// a DirWriter has an add(entry) method, but its .write() doesn't\n// do anything. Why a no-op rather than a throw? Because this\n// leaves open the door for writing directory metadata for\n// gnu/solaris style dumpdirs.\nDirWriter.prototype.write = function () {\n return true\n}\n\nDirWriter.prototype.end = function () {\n this._ended = true\n this._process()\n}\n\nDirWriter.prototype.add = function (entry) {\n var self = this\n\n // console.error('\\tadd', entry._path, '->', self._path)\n collect(entry)\n if (!self.ready || self._currentEntry) {\n self._buffer.push(entry)\n return false\n }\n\n // create a new writer, and pipe the incoming entry into it.\n if (self._ended) {\n return self.error('add after end')\n }\n\n self._buffer.push(entry)\n self._process()\n\n return this._buffer.length === 0\n}\n\nDirWriter.prototype._process = function () {\n var self = this\n\n // console.error('DW Process p=%j', self._processing, self.basename)\n\n if (self._processing) return\n\n var entry = self._buffer.shift()\n if (!entry) {\n // console.error(\"DW Drain\")\n self.emit('drain')\n if (self._ended) self._finish()\n return\n }\n\n self._processing = true\n // console.error(\"DW Entry\", entry._path)\n\n self.emit('entry', entry)\n\n // ok, add this entry\n //\n // don't allow recursive copying\n var p = entry\n var pp\n do {\n pp = p._path || p.path\n if (pp === self.root._path || pp === self._path ||\n (pp && pp.indexOf(self._path) === 0)) {\n // console.error('DW Exit (recursive)', entry.basename, self._path)\n self._processing = false\n if (entry._collected) entry.pipe()\n return self._process()\n }\n p = p.parent\n } while (p)\n\n // console.error(\"DW not recursive\")\n\n // chop off the entry's root dir, replace with ours\n var props = {\n parent: self,\n root: self.root || self,\n type: entry.type,\n depth: self.depth + 1\n }\n\n pp = entry._path || entry.path || entry.props.path\n if (entry.parent) {\n pp = pp.substr(entry.parent._path.length + 1)\n }\n // get rid of any ../../ shenanigans\n props.path = path.join(self.path, path.join('/', pp))\n\n // if i have a filter, the child should inherit it.\n props.filter = self.filter\n\n // all the rest of the stuff, copy over from the source.\n Object.keys(entry.props).forEach(function (k) {\n if (!props.hasOwnProperty(k)) {\n props[k] = entry.props[k]\n }\n })\n\n // not sure at this point what kind of writer this is.\n var child = self._currentChild = new Writer(props)\n child.on('ready', function () {\n // console.error(\"DW Child Ready\", child.type, child._path)\n // console.error(\" resuming\", entry._path)\n entry.pipe(child)\n entry.resume()\n })\n\n // XXX Make this work in node.\n // Long filenames should not break stuff.\n child.on('error', function (er) {\n if (child._swallowErrors) {\n self.warn(er)\n child.emit('end')\n child.emit('close')\n } else {\n self.emit('error', er)\n }\n })\n\n // we fire _end internally *after* end, so that we don't move on\n // until any \"end\" listeners have had their chance to do stuff.\n child.on('close', onend)\n var ended = false\n function onend () {\n if (ended) return\n ended = true\n // console.error(\"* DW Child end\", child.basename)\n self._currentChild = null\n self._processing = false\n self._process()\n }\n}\n","// Basically just a wrapper around an fs.ReadStream\n\nmodule.exports = FileReader\n\nvar fs = require('graceful-fs')\nvar inherits = require('inherits')\nvar Reader = require('./reader.js')\nvar EOF = {EOF: true}\nvar CLOSE = {CLOSE: true}\n\ninherits(FileReader, Reader)\n\nfunction FileReader (props) {\n // console.error(\" FR create\", props.path, props.size, new Error().stack)\n var self = this\n if (!(self instanceof FileReader)) {\n throw new Error('FileReader must be called as constructor.')\n }\n\n // should already be established as a File type\n // XXX Todo: preserve hardlinks by tracking dev+inode+nlink,\n // with a HardLinkReader class.\n if (!((props.type === 'Link' && props.Link) ||\n (props.type === 'File' && props.File))) {\n throw new Error('Non-file type ' + props.type)\n }\n\n self._buffer = []\n self._bytesEmitted = 0\n Reader.call(self, props)\n}\n\nFileReader.prototype._getStream = function () {\n var self = this\n var stream = self._stream = fs.createReadStream(self._path, self.props)\n\n if (self.props.blksize) {\n stream.bufferSize = self.props.blksize\n }\n\n stream.on('open', self.emit.bind(self, 'open'))\n\n stream.on('data', function (c) {\n // console.error('\\t\\t%d %s', c.length, self.basename)\n self._bytesEmitted += c.length\n // no point saving empty chunks\n if (!c.length) {\n return\n } else if (self._paused || self._buffer.length) {\n self._buffer.push(c)\n self._read()\n } else self.emit('data', c)\n })\n\n stream.on('end', function () {\n if (self._paused || self._buffer.length) {\n // console.error('FR Buffering End', self._path)\n self._buffer.push(EOF)\n self._read()\n } else {\n self.emit('end')\n }\n\n if (self._bytesEmitted !== self.props.size) {\n self.error(\"Didn't get expected byte count\\n\" +\n 'expect: ' + self.props.size + '\\n' +\n 'actual: ' + self._bytesEmitted)\n }\n })\n\n stream.on('close', function () {\n if (self._paused || self._buffer.length) {\n // console.error('FR Buffering Close', self._path)\n self._buffer.push(CLOSE)\n self._read()\n } else {\n // console.error('FR close 1', self._path)\n self.emit('close')\n }\n })\n\n stream.on('error', function (e) {\n self.emit('error', e)\n })\n\n self._read()\n}\n\nFileReader.prototype._read = function () {\n var self = this\n // console.error('FR _read', self._path)\n if (self._paused) {\n // console.error('FR _read paused', self._path)\n return\n }\n\n if (!self._stream) {\n // console.error('FR _getStream calling', self._path)\n return self._getStream()\n }\n\n // clear out the buffer, if there is one.\n if (self._buffer.length) {\n // console.error('FR _read has buffer', self._buffer.length, self._path)\n var buf = self._buffer\n for (var i = 0, l = buf.length; i < l; i++) {\n var c = buf[i]\n if (c === EOF) {\n // console.error('FR Read emitting buffered end', self._path)\n self.emit('end')\n } else if (c === CLOSE) {\n // console.error('FR Read emitting buffered close', self._path)\n self.emit('close')\n } else {\n // console.error('FR Read emitting buffered data', self._path)\n self.emit('data', c)\n }\n\n if (self._paused) {\n // console.error('FR Read Re-pausing at '+i, self._path)\n self._buffer = buf.slice(i)\n return\n }\n }\n self._buffer.length = 0\n }\n// console.error(\"FR _read done\")\n// that's about all there is to it.\n}\n\nFileReader.prototype.pause = function (who) {\n var self = this\n // console.error('FR Pause', self._path)\n if (self._paused) return\n who = who || self\n self._paused = true\n if (self._stream) self._stream.pause()\n self.emit('pause', who)\n}\n\nFileReader.prototype.resume = function (who) {\n var self = this\n // console.error('FR Resume', self._path)\n if (!self._paused) return\n who = who || self\n self.emit('resume', who)\n self._paused = false\n if (self._stream) self._stream.resume()\n self._read()\n}\n","module.exports = FileWriter\n\nvar fs = require('graceful-fs')\nvar Writer = require('./writer.js')\nvar inherits = require('inherits')\nvar EOF = {}\n\ninherits(FileWriter, Writer)\n\nfunction FileWriter (props) {\n var self = this\n if (!(self instanceof FileWriter)) {\n throw new Error('FileWriter must be called as constructor.')\n }\n\n // should already be established as a File type\n if (props.type !== 'File' || !props.File) {\n throw new Error('Non-file type ' + props.type)\n }\n\n self._buffer = []\n self._bytesWritten = 0\n\n Writer.call(this, props)\n}\n\nFileWriter.prototype._create = function () {\n var self = this\n if (self._stream) return\n\n var so = {}\n if (self.props.flags) so.flags = self.props.flags\n so.mode = Writer.filemode\n if (self._old && self._old.blksize) so.bufferSize = self._old.blksize\n\n self._stream = fs.createWriteStream(self._path, so)\n\n self._stream.on('open', function () {\n // console.error(\"FW open\", self._buffer, self._path)\n self.ready = true\n self._buffer.forEach(function (c) {\n if (c === EOF) self._stream.end()\n else self._stream.write(c)\n })\n self.emit('ready')\n // give this a kick just in case it needs it.\n self.emit('drain')\n })\n\n self._stream.on('error', function (er) { self.emit('error', er) })\n\n self._stream.on('drain', function () { self.emit('drain') })\n\n self._stream.on('close', function () {\n // console.error('\\n\\nFW Stream Close', self._path, self.size)\n self._finish()\n })\n}\n\nFileWriter.prototype.write = function (c) {\n var self = this\n\n self._bytesWritten += c.length\n\n if (!self.ready) {\n if (!Buffer.isBuffer(c) && typeof c !== 'string') {\n throw new Error('invalid write data')\n }\n self._buffer.push(c)\n return false\n }\n\n var ret = self._stream.write(c)\n // console.error('\\t-- fw wrote, _stream says', ret, self._stream._queue.length)\n\n // allow 2 buffered writes, because otherwise there's just too\n // much stop and go bs.\n if (ret === false && self._stream._queue) {\n return self._stream._queue.length <= 2\n } else {\n return ret\n }\n}\n\nFileWriter.prototype.end = function (c) {\n var self = this\n\n if (c) self.write(c)\n\n if (!self.ready) {\n self._buffer.push(EOF)\n return false\n }\n\n return self._stream.end()\n}\n\nFileWriter.prototype._finish = function () {\n var self = this\n if (typeof self.size === 'number' && self._bytesWritten !== self.size) {\n self.error(\n 'Did not get expected byte count.\\n' +\n 'expect: ' + self.size + '\\n' +\n 'actual: ' + self._bytesWritten)\n }\n Writer.prototype._finish.call(self)\n}\n","module.exports = getType\n\nfunction getType (st) {\n var types = [\n 'Directory',\n 'File',\n 'SymbolicLink',\n 'Link', // special for hardlinks from tarballs\n 'BlockDevice',\n 'CharacterDevice',\n 'FIFO',\n 'Socket'\n ]\n var type\n\n if (st.type && types.indexOf(st.type) !== -1) {\n st[st.type] = true\n return st.type\n }\n\n for (var i = 0, l = types.length; i < l; i++) {\n type = types[i]\n var is = st[type] || st['is' + type]\n if (typeof is === 'function') is = is.call(st)\n if (is) {\n st[type] = true\n st.type = type\n return type\n }\n }\n\n return null\n}\n","// Basically just a wrapper around an fs.readlink\n//\n// XXX: Enhance this to support the Link type, by keeping\n// a lookup table of {:}, so that hardlinks\n// can be preserved in tarballs.\n\nmodule.exports = LinkReader\n\nvar fs = require('graceful-fs')\nvar inherits = require('inherits')\nvar Reader = require('./reader.js')\n\ninherits(LinkReader, Reader)\n\nfunction LinkReader (props) {\n var self = this\n if (!(self instanceof LinkReader)) {\n throw new Error('LinkReader must be called as constructor.')\n }\n\n if (!((props.type === 'Link' && props.Link) ||\n (props.type === 'SymbolicLink' && props.SymbolicLink))) {\n throw new Error('Non-link type ' + props.type)\n }\n\n Reader.call(self, props)\n}\n\n// When piping a LinkReader into a LinkWriter, we have to\n// already have the linkpath property set, so that has to\n// happen *before* the \"ready\" event, which means we need to\n// override the _stat method.\nLinkReader.prototype._stat = function (currentStat) {\n var self = this\n fs.readlink(self._path, function (er, linkpath) {\n if (er) return self.error(er)\n self.linkpath = self.props.linkpath = linkpath\n self.emit('linkpath', linkpath)\n Reader.prototype._stat.call(self, currentStat)\n })\n}\n\nLinkReader.prototype._read = function () {\n var self = this\n if (self._paused) return\n // basically just a no-op, since we got all the info we need\n // from the _stat method\n if (!self._ended) {\n self.emit('end')\n self.emit('close')\n self._ended = true\n }\n}\n","module.exports = LinkWriter\n\nvar fs = require('graceful-fs')\nvar Writer = require('./writer.js')\nvar inherits = require('inherits')\nvar path = require('path')\nvar rimraf = require('rimraf')\n\ninherits(LinkWriter, Writer)\n\nfunction LinkWriter (props) {\n var self = this\n if (!(self instanceof LinkWriter)) {\n throw new Error('LinkWriter must be called as constructor.')\n }\n\n // should already be established as a Link type\n if (!((props.type === 'Link' && props.Link) ||\n (props.type === 'SymbolicLink' && props.SymbolicLink))) {\n throw new Error('Non-link type ' + props.type)\n }\n\n if (props.linkpath === '') props.linkpath = '.'\n if (!props.linkpath) {\n self.error('Need linkpath property to create ' + props.type)\n }\n\n Writer.call(this, props)\n}\n\nLinkWriter.prototype._create = function () {\n // console.error(\" LW _create\")\n var self = this\n var hard = self.type === 'Link' || process.platform === 'win32'\n var link = hard ? 'link' : 'symlink'\n var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath\n\n // can only change the link path by clobbering\n // For hard links, let's just assume that's always the case, since\n // there's no good way to read them if we don't already know.\n if (hard) return clobber(self, lp, link)\n\n fs.readlink(self._path, function (er, p) {\n // only skip creation if it's exactly the same link\n if (p && p === lp) return finish(self)\n clobber(self, lp, link)\n })\n}\n\nfunction clobber (self, lp, link) {\n rimraf(self._path, function (er) {\n if (er) return self.error(er)\n create(self, lp, link)\n })\n}\n\nfunction create (self, lp, link) {\n fs[link](lp, self._path, function (er) {\n // if this is a hard link, and we're in the process of writing out a\n // directory, it's very possible that the thing we're linking to\n // doesn't exist yet (especially if it was intended as a symlink),\n // so swallow ENOENT errors here and just soldier in.\n // Additionally, an EPERM or EACCES can happen on win32 if it's trying\n // to make a link to a directory. Again, just skip it.\n // A better solution would be to have fs.symlink be supported on\n // windows in some nice fashion.\n if (er) {\n if ((er.code === 'ENOENT' ||\n er.code === 'EACCES' ||\n er.code === 'EPERM') && process.platform === 'win32') {\n self.ready = true\n self.emit('ready')\n self.emit('end')\n self.emit('close')\n self.end = self._finish = function () {}\n } else return self.error(er)\n }\n finish(self)\n })\n}\n\nfunction finish (self) {\n self.ready = true\n self.emit('ready')\n if (self._ended && !self._finished) self._finish()\n}\n\nLinkWriter.prototype.end = function () {\n // console.error(\"LW finish in end\")\n this._ended = true\n if (this.ready) {\n this._finished = true\n this._finish()\n }\n}\n","// A reader for when we don't yet know what kind of thing\n// the thing is.\n\nmodule.exports = ProxyReader\n\nvar Reader = require('./reader.js')\nvar getType = require('./get-type.js')\nvar inherits = require('inherits')\nvar fs = require('graceful-fs')\n\ninherits(ProxyReader, Reader)\n\nfunction ProxyReader (props) {\n var self = this\n if (!(self instanceof ProxyReader)) {\n throw new Error('ProxyReader must be called as constructor.')\n }\n\n self.props = props\n self._buffer = []\n self.ready = false\n\n Reader.call(self, props)\n}\n\nProxyReader.prototype._stat = function () {\n var self = this\n var props = self.props\n // stat the thing to see what the proxy should be.\n var stat = props.follow ? 'stat' : 'lstat'\n\n fs[stat](props.path, function (er, current) {\n var type\n if (er || !current) {\n type = 'File'\n } else {\n type = getType(current)\n }\n\n props[type] = true\n props.type = self.type = type\n\n self._old = current\n self._addProxy(Reader(props, current))\n })\n}\n\nProxyReader.prototype._addProxy = function (proxy) {\n var self = this\n if (self._proxyTarget) {\n return self.error('proxy already set')\n }\n\n self._proxyTarget = proxy\n proxy._proxy = self\n\n ;[\n 'error',\n 'data',\n 'end',\n 'close',\n 'linkpath',\n 'entry',\n 'entryEnd',\n 'child',\n 'childEnd',\n 'warn',\n 'stat'\n ].forEach(function (ev) {\n // console.error('~~ proxy event', ev, self.path)\n proxy.on(ev, self.emit.bind(self, ev))\n })\n\n self.emit('proxy', proxy)\n\n proxy.on('ready', function () {\n // console.error(\"~~ proxy is ready!\", self.path)\n self.ready = true\n self.emit('ready')\n })\n\n var calls = self._buffer\n self._buffer.length = 0\n calls.forEach(function (c) {\n proxy[c[0]].apply(proxy, c[1])\n })\n}\n\nProxyReader.prototype.pause = function () {\n return this._proxyTarget ? this._proxyTarget.pause() : false\n}\n\nProxyReader.prototype.resume = function () {\n return this._proxyTarget ? this._proxyTarget.resume() : false\n}\n","// A writer for when we don't know what kind of thing\n// the thing is. That is, it's not explicitly set,\n// so we're going to make it whatever the thing already\n// is, or \"File\"\n//\n// Until then, collect all events.\n\nmodule.exports = ProxyWriter\n\nvar Writer = require('./writer.js')\nvar getType = require('./get-type.js')\nvar inherits = require('inherits')\nvar collect = require('./collect.js')\nvar fs = require('fs')\n\ninherits(ProxyWriter, Writer)\n\nfunction ProxyWriter (props) {\n var self = this\n if (!(self instanceof ProxyWriter)) {\n throw new Error('ProxyWriter must be called as constructor.')\n }\n\n self.props = props\n self._needDrain = false\n\n Writer.call(self, props)\n}\n\nProxyWriter.prototype._stat = function () {\n var self = this\n var props = self.props\n // stat the thing to see what the proxy should be.\n var stat = props.follow ? 'stat' : 'lstat'\n\n fs[stat](props.path, function (er, current) {\n var type\n if (er || !current) {\n type = 'File'\n } else {\n type = getType(current)\n }\n\n props[type] = true\n props.type = self.type = type\n\n self._old = current\n self._addProxy(Writer(props, current))\n })\n}\n\nProxyWriter.prototype._addProxy = function (proxy) {\n // console.error(\"~~ set proxy\", this.path)\n var self = this\n if (self._proxy) {\n return self.error('proxy already set')\n }\n\n self._proxy = proxy\n ;[\n 'ready',\n 'error',\n 'close',\n 'pipe',\n 'drain',\n 'warn'\n ].forEach(function (ev) {\n proxy.on(ev, self.emit.bind(self, ev))\n })\n\n self.emit('proxy', proxy)\n\n var calls = self._buffer\n calls.forEach(function (c) {\n // console.error(\"~~ ~~ proxy buffered call\", c[0], c[1])\n proxy[c[0]].apply(proxy, c[1])\n })\n self._buffer.length = 0\n if (self._needsDrain) self.emit('drain')\n}\n\nProxyWriter.prototype.add = function (entry) {\n // console.error(\"~~ proxy add\")\n collect(entry)\n\n if (!this._proxy) {\n this._buffer.push(['add', [entry]])\n this._needDrain = true\n return false\n }\n return this._proxy.add(entry)\n}\n\nProxyWriter.prototype.write = function (c) {\n // console.error('~~ proxy write')\n if (!this._proxy) {\n this._buffer.push(['write', [c]])\n this._needDrain = true\n return false\n }\n return this._proxy.write(c)\n}\n\nProxyWriter.prototype.end = function (c) {\n // console.error('~~ proxy end')\n if (!this._proxy) {\n this._buffer.push(['end', [c]])\n return false\n }\n return this._proxy.end(c)\n}\n","module.exports = Reader\n\nvar fs = require('graceful-fs')\nvar Stream = require('stream').Stream\nvar inherits = require('inherits')\nvar path = require('path')\nvar getType = require('./get-type.js')\nvar hardLinks = Reader.hardLinks = {}\nvar Abstract = require('./abstract.js')\n\n// Must do this *before* loading the child classes\ninherits(Reader, Abstract)\n\nvar LinkReader = require('./link-reader.js')\n\nfunction Reader (props, currentStat) {\n var self = this\n if (!(self instanceof Reader)) return new Reader(props, currentStat)\n\n if (typeof props === 'string') {\n props = { path: props }\n }\n\n // polymorphism.\n // call fstream.Reader(dir) to get a DirReader object, etc.\n // Note that, unlike in the Writer case, ProxyReader is going\n // to be the *normal* state of affairs, since we rarely know\n // the type of a file prior to reading it.\n\n var type\n var ClassType\n\n if (props.type && typeof props.type === 'function') {\n type = props.type\n ClassType = type\n } else {\n type = getType(props)\n ClassType = Reader\n }\n\n if (currentStat && !type) {\n type = getType(currentStat)\n props[type] = true\n props.type = type\n }\n\n switch (type) {\n case 'Directory':\n ClassType = require('./dir-reader.js')\n break\n\n case 'Link':\n // XXX hard links are just files.\n // However, it would be good to keep track of files' dev+inode\n // and nlink values, and create a HardLinkReader that emits\n // a linkpath value of the original copy, so that the tar\n // writer can preserve them.\n // ClassType = HardLinkReader\n // break\n\n case 'File':\n ClassType = require('./file-reader.js')\n break\n\n case 'SymbolicLink':\n ClassType = LinkReader\n break\n\n case 'Socket':\n ClassType = require('./socket-reader.js')\n break\n\n case null:\n ClassType = require('./proxy-reader.js')\n break\n }\n\n if (!(self instanceof ClassType)) {\n return new ClassType(props)\n }\n\n Abstract.call(self)\n\n if (!props.path) {\n self.error('Must provide a path', null, true)\n }\n\n self.readable = true\n self.writable = false\n\n self.type = type\n self.props = props\n self.depth = props.depth = props.depth || 0\n self.parent = props.parent || null\n self.root = props.root || (props.parent && props.parent.root) || self\n\n self._path = self.path = path.resolve(props.path)\n if (process.platform === 'win32') {\n self.path = self._path = self.path.replace(/\\?/g, '_')\n if (self._path.length >= 260) {\n // how DOES one create files on the moon?\n // if the path has spaces in it, then UNC will fail.\n self._swallowErrors = true\n // if (self._path.indexOf(\" \") === -1) {\n self._path = '\\\\\\\\?\\\\' + self.path.replace(/\\//g, '\\\\')\n // }\n }\n }\n self.basename = props.basename = path.basename(self.path)\n self.dirname = props.dirname = path.dirname(self.path)\n\n // these have served their purpose, and are now just noisy clutter\n props.parent = props.root = null\n\n // console.error(\"\\n\\n\\n%s setting size to\", props.path, props.size)\n self.size = props.size\n self.filter = typeof props.filter === 'function' ? props.filter : null\n if (props.sort === 'alpha') props.sort = alphasort\n\n // start the ball rolling.\n // this will stat the thing, and then call self._read()\n // to start reading whatever it is.\n // console.error(\"calling stat\", props.path, currentStat)\n self._stat(currentStat)\n}\n\nfunction alphasort (a, b) {\n return a === b ? 0\n : a.toLowerCase() > b.toLowerCase() ? 1\n : a.toLowerCase() < b.toLowerCase() ? -1\n : a > b ? 1\n : -1\n}\n\nReader.prototype._stat = function (currentStat) {\n var self = this\n var props = self.props\n var stat = props.follow ? 'stat' : 'lstat'\n // console.error(\"Reader._stat\", self._path, currentStat)\n if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))\n else fs[stat](self._path, statCb)\n\n function statCb (er, props_) {\n // console.error(\"Reader._stat, statCb\", self._path, props_, props_.nlink)\n if (er) return self.error(er)\n\n Object.keys(props_).forEach(function (k) {\n props[k] = props_[k]\n })\n\n // if it's not the expected size, then abort here.\n if (undefined !== self.size && props.size !== self.size) {\n return self.error('incorrect size')\n }\n self.size = props.size\n\n var type = getType(props)\n var handleHardlinks = props.hardlinks !== false\n\n // special little thing for handling hardlinks.\n if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) {\n var k = props.dev + ':' + props.ino\n // console.error(\"Reader has nlink\", self._path, k)\n if (hardLinks[k] === self._path || !hardLinks[k]) {\n hardLinks[k] = self._path\n } else {\n // switch into hardlink mode.\n type = self.type = self.props.type = 'Link'\n self.Link = self.props.Link = true\n self.linkpath = self.props.linkpath = hardLinks[k]\n // console.error(\"Hardlink detected, switching mode\", self._path, self.linkpath)\n // Setting __proto__ would arguably be the \"correct\"\n // approach here, but that just seems too wrong.\n self._stat = self._read = LinkReader.prototype._read\n }\n }\n\n if (self.type && self.type !== type) {\n self.error('Unexpected type: ' + type)\n }\n\n // if the filter doesn't pass, then just skip over this one.\n // still have to emit end so that dir-walking can move on.\n if (self.filter) {\n var who = self._proxy || self\n // special handling for ProxyReaders\n if (!self.filter.call(who, who, props)) {\n if (!self._disowned) {\n self.abort()\n self.emit('end')\n self.emit('close')\n }\n return\n }\n }\n\n // last chance to abort or disown before the flow starts!\n var events = ['_stat', 'stat', 'ready']\n var e = 0\n ;(function go () {\n if (self._aborted) {\n self.emit('end')\n self.emit('close')\n return\n }\n\n if (self._paused && self.type !== 'Directory') {\n self.once('resume', go)\n return\n }\n\n var ev = events[e++]\n if (!ev) {\n return self._read()\n }\n self.emit(ev, props)\n go()\n })()\n }\n}\n\nReader.prototype.pipe = function (dest) {\n var self = this\n if (typeof dest.add === 'function') {\n // piping to a multi-compatible, and we've got directory entries.\n self.on('entry', function (entry) {\n var ret = dest.add(entry)\n if (ret === false) {\n self.pause()\n }\n })\n }\n\n // console.error(\"R Pipe apply Stream Pipe\")\n return Stream.prototype.pipe.apply(this, arguments)\n}\n\nReader.prototype.pause = function (who) {\n this._paused = true\n who = who || this\n this.emit('pause', who)\n if (this._stream) this._stream.pause(who)\n}\n\nReader.prototype.resume = function (who) {\n this._paused = false\n who = who || this\n this.emit('resume', who)\n if (this._stream) this._stream.resume(who)\n this._read()\n}\n\nReader.prototype._read = function () {\n this.error('Cannot read unknown type: ' + this.type)\n}\n","// Just get the stats, and then don't do anything.\n// You can't really \"read\" from a socket. You \"connect\" to it.\n// Mostly, this is here so that reading a dir with a socket in it\n// doesn't blow up.\n\nmodule.exports = SocketReader\n\nvar inherits = require('inherits')\nvar Reader = require('./reader.js')\n\ninherits(SocketReader, Reader)\n\nfunction SocketReader (props) {\n var self = this\n if (!(self instanceof SocketReader)) {\n throw new Error('SocketReader must be called as constructor.')\n }\n\n if (!(props.type === 'Socket' && props.Socket)) {\n throw new Error('Non-socket type ' + props.type)\n }\n\n Reader.call(self, props)\n}\n\nSocketReader.prototype._read = function () {\n var self = this\n if (self._paused) return\n // basically just a no-op, since we got all the info we have\n // from the _stat method\n if (!self._ended) {\n self.emit('end')\n self.emit('close')\n self._ended = true\n }\n}\n","module.exports = Writer\n\nvar fs = require('graceful-fs')\nvar inherits = require('inherits')\nvar rimraf = require('rimraf')\nvar mkdir = require('mkdirp')\nvar path = require('path')\nvar umask = process.platform === 'win32' ? 0 : process.umask()\nvar getType = require('./get-type.js')\nvar Abstract = require('./abstract.js')\n\n// Must do this *before* loading the child classes\ninherits(Writer, Abstract)\n\nWriter.dirmode = parseInt('0777', 8) & (~umask)\nWriter.filemode = parseInt('0666', 8) & (~umask)\n\nvar DirWriter = require('./dir-writer.js')\nvar LinkWriter = require('./link-writer.js')\nvar FileWriter = require('./file-writer.js')\nvar ProxyWriter = require('./proxy-writer.js')\n\n// props is the desired state. current is optionally the current stat,\n// provided here so that subclasses can avoid statting the target\n// more than necessary.\nfunction Writer (props, current) {\n var self = this\n\n if (typeof props === 'string') {\n props = { path: props }\n }\n\n // polymorphism.\n // call fstream.Writer(dir) to get a DirWriter object, etc.\n var type = getType(props)\n var ClassType = Writer\n\n switch (type) {\n case 'Directory':\n ClassType = DirWriter\n break\n case 'File':\n ClassType = FileWriter\n break\n case 'Link':\n case 'SymbolicLink':\n ClassType = LinkWriter\n break\n case null:\n default:\n // Don't know yet what type to create, so we wrap in a proxy.\n ClassType = ProxyWriter\n break\n }\n\n if (!(self instanceof ClassType)) return new ClassType(props)\n\n // now get down to business.\n\n Abstract.call(self)\n\n if (!props.path) self.error('Must provide a path', null, true)\n\n // props is what we want to set.\n // set some convenience properties as well.\n self.type = props.type\n self.props = props\n self.depth = props.depth || 0\n self.clobber = props.clobber === false ? props.clobber : true\n self.parent = props.parent || null\n self.root = props.root || (props.parent && props.parent.root) || self\n\n self._path = self.path = path.resolve(props.path)\n if (process.platform === 'win32') {\n self.path = self._path = self.path.replace(/\\?/g, '_')\n if (self._path.length >= 260) {\n self._swallowErrors = true\n self._path = '\\\\\\\\?\\\\' + self.path.replace(/\\//g, '\\\\')\n }\n }\n self.basename = path.basename(props.path)\n self.dirname = path.dirname(props.path)\n self.linkpath = props.linkpath || null\n\n props.parent = props.root = null\n\n // console.error(\"\\n\\n\\n%s setting size to\", props.path, props.size)\n self.size = props.size\n\n if (typeof props.mode === 'string') {\n props.mode = parseInt(props.mode, 8)\n }\n\n self.readable = false\n self.writable = true\n\n // buffer until ready, or while handling another entry\n self._buffer = []\n self.ready = false\n\n self.filter = typeof props.filter === 'function' ? props.filter : null\n\n // start the ball rolling.\n // this checks what's there already, and then calls\n // self._create() to call the impl-specific creation stuff.\n self._stat(current)\n}\n\n// Calling this means that it's something we can't create.\n// Just assert that it's already there, otherwise raise a warning.\nWriter.prototype._create = function () {\n var self = this\n fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {\n if (er) {\n return self.warn('Cannot create ' + self._path + '\\n' +\n 'Unsupported type: ' + self.type, 'ENOTSUP')\n }\n self._finish()\n })\n}\n\nWriter.prototype._stat = function (current) {\n var self = this\n var props = self.props\n var stat = props.follow ? 'stat' : 'lstat'\n var who = self._proxy || self\n\n if (current) statCb(null, current)\n else fs[stat](self._path, statCb)\n\n function statCb (er, current) {\n if (self.filter && !self.filter.call(who, who, current)) {\n self._aborted = true\n self.emit('end')\n self.emit('close')\n return\n }\n\n // if it's not there, great. We'll just create it.\n // if it is there, then we'll need to change whatever differs\n if (er || !current) {\n return create(self)\n }\n\n self._old = current\n var currentType = getType(current)\n\n // if it's a type change, then we need to clobber or error.\n // if it's not a type change, then let the impl take care of it.\n if (currentType !== self.type || self.type === 'File' && current.nlink > 1) {\n return rimraf(self._path, function (er) {\n if (er) return self.error(er)\n self._old = null\n create(self)\n })\n }\n\n // otherwise, just handle in the app-specific way\n // this creates a fs.WriteStream, or mkdir's, or whatever\n create(self)\n }\n}\n\nfunction create (self) {\n // console.error(\"W create\", self._path, Writer.dirmode)\n\n // XXX Need to clobber non-dirs that are in the way,\n // unless { clobber: false } in the props.\n mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) {\n // console.error(\"W created\", path.dirname(self._path), er)\n if (er) return self.error(er)\n\n // later on, we have to set the mode and owner for these\n self._madeDir = made\n return self._create()\n })\n}\n\nfunction endChmod (self, want, current, path, cb) {\n var wantMode = want.mode\n var chmod = want.follow || self.type !== 'SymbolicLink'\n ? 'chmod' : 'lchmod'\n\n if (!fs[chmod]) return cb()\n if (typeof wantMode !== 'number') return cb()\n\n var curMode = current.mode & parseInt('0777', 8)\n wantMode = wantMode & parseInt('0777', 8)\n if (wantMode === curMode) return cb()\n\n fs[chmod](path, wantMode, cb)\n}\n\nfunction endChown (self, want, current, path, cb) {\n // Don't even try it unless root. Too easy to EPERM.\n if (process.platform === 'win32') return cb()\n if (!process.getuid || process.getuid() !== 0) return cb()\n if (typeof want.uid !== 'number' &&\n typeof want.gid !== 'number') return cb()\n\n if (current.uid === want.uid &&\n current.gid === want.gid) return cb()\n\n var chown = (self.props.follow || self.type !== 'SymbolicLink')\n ? 'chown' : 'lchown'\n if (!fs[chown]) return cb()\n\n if (typeof want.uid !== 'number') want.uid = current.uid\n if (typeof want.gid !== 'number') want.gid = current.gid\n\n fs[chown](path, want.uid, want.gid, cb)\n}\n\nfunction endUtimes (self, want, current, path, cb) {\n if (!fs.utimes || process.platform === 'win32') return cb()\n\n var utimes = (want.follow || self.type !== 'SymbolicLink')\n ? 'utimes' : 'lutimes'\n\n if (utimes === 'lutimes' && !fs[utimes]) {\n utimes = 'utimes'\n }\n\n if (!fs[utimes]) return cb()\n\n var curA = current.atime\n var curM = current.mtime\n var meA = want.atime\n var meM = want.mtime\n\n if (meA === undefined) meA = curA\n if (meM === undefined) meM = curM\n\n if (!isDate(meA)) meA = new Date(meA)\n if (!isDate(meM)) meA = new Date(meM)\n\n if (meA.getTime() === curA.getTime() &&\n meM.getTime() === curM.getTime()) return cb()\n\n fs[utimes](path, meA, meM, cb)\n}\n\n// XXX This function is beastly. Break it up!\nWriter.prototype._finish = function () {\n var self = this\n\n if (self._finishing) return\n self._finishing = true\n\n // console.error(\" W Finish\", self._path, self.size)\n\n // set up all the things.\n // At this point, we're already done writing whatever we've gotta write,\n // adding files to the dir, etc.\n var todo = 0\n var errState = null\n var done = false\n\n if (self._old) {\n // the times will almost *certainly* have changed.\n // adds the utimes syscall, but remove another stat.\n self._old.atime = new Date(0)\n self._old.mtime = new Date(0)\n // console.error(\" W Finish Stale Stat\", self._path, self.size)\n setProps(self._old)\n } else {\n var stat = self.props.follow ? 'stat' : 'lstat'\n // console.error(\" W Finish Stating\", self._path, self.size)\n fs[stat](self._path, function (er, current) {\n // console.error(\" W Finish Stated\", self._path, self.size, current)\n if (er) {\n // if we're in the process of writing out a\n // directory, it's very possible that the thing we're linking to\n // doesn't exist yet (especially if it was intended as a symlink),\n // so swallow ENOENT errors here and just soldier on.\n if (er.code === 'ENOENT' &&\n (self.type === 'Link' || self.type === 'SymbolicLink') &&\n process.platform === 'win32') {\n self.ready = true\n self.emit('ready')\n self.emit('end')\n self.emit('close')\n self.end = self._finish = function () {}\n return\n } else return self.error(er)\n }\n setProps(self._old = current)\n })\n }\n\n return\n\n function setProps (current) {\n todo += 3\n endChmod(self, self.props, current, self._path, next('chmod'))\n endChown(self, self.props, current, self._path, next('chown'))\n endUtimes(self, self.props, current, self._path, next('utimes'))\n }\n\n function next (what) {\n return function (er) {\n // console.error(\" W Finish\", what, todo)\n if (errState) return\n if (er) {\n er.fstream_finish_call = what\n return self.error(errState = er)\n }\n if (--todo > 0) return\n if (done) return\n done = true\n\n // we may still need to set the mode/etc. on some parent dirs\n // that were created previously. delay end/close until then.\n if (!self._madeDir) return end()\n else endMadeDir(self, self._path, end)\n\n function end (er) {\n if (er) {\n er.fstream_finish_call = 'setupMadeDir'\n return self.error(er)\n }\n // all the props have been set, so we're completely done.\n self.emit('end')\n self.emit('close')\n }\n }\n }\n}\n\nfunction endMadeDir (self, p, cb) {\n var made = self._madeDir\n // everything *between* made and path.dirname(self._path)\n // needs to be set up. Note that this may just be one dir.\n var d = path.dirname(p)\n\n endMadeDir_(self, d, function (er) {\n if (er) return cb(er)\n if (d === made) {\n return cb()\n }\n endMadeDir(self, d, cb)\n })\n}\n\nfunction endMadeDir_ (self, p, cb) {\n var dirProps = {}\n Object.keys(self.props).forEach(function (k) {\n dirProps[k] = self.props[k]\n\n // only make non-readable dirs if explicitly requested.\n if (k === 'mode' && self.type !== 'Directory') {\n dirProps[k] = dirProps[k] | parseInt('0111', 8)\n }\n })\n\n var todo = 3\n var errState = null\n fs.stat(p, function (er, current) {\n if (er) return cb(errState = er)\n endChmod(self, dirProps, current, p, next)\n endChown(self, dirProps, current, p, next)\n endUtimes(self, dirProps, current, p, next)\n })\n\n function next (er) {\n if (errState) return\n if (er) return cb(errState = er)\n if (--todo === 0) return cb()\n }\n}\n\nWriter.prototype.pipe = function () {\n this.error(\"Can't pipe from writable stream\")\n}\n\nWriter.prototype.add = function () {\n this.error(\"Can't add to non-Directory type\")\n}\n\nWriter.prototype.write = function () {\n return true\n}\n\nfunction objectToString (d) {\n return Object.prototype.toString.call(d)\n}\n\nfunction isDate (d) {\n return typeof d === 'object' && objectToString(d) === '[object Date]'\n}\n","var path = require('path');\nvar fs = require('fs');\nvar _0777 = parseInt('0777', 8);\n\nmodule.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;\n\nfunction mkdirP (p, opts, f, made) {\n if (typeof opts === 'function') {\n f = opts;\n opts = {};\n }\n else if (!opts || typeof opts !== 'object') {\n opts = { mode: opts };\n }\n \n var mode = opts.mode;\n var xfs = opts.fs || fs;\n \n if (mode === undefined) {\n mode = _0777\n }\n if (!made) made = null;\n \n var cb = f || function () {};\n p = path.resolve(p);\n \n xfs.mkdir(p, mode, function (er) {\n if (!er) {\n made = made || p;\n return cb(null, made);\n }\n switch (er.code) {\n case 'ENOENT':\n if (path.dirname(p) === p) return cb(er);\n mkdirP(path.dirname(p), opts, function (er, made) {\n if (er) cb(er, made);\n else mkdirP(p, opts, cb, made);\n });\n break;\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n xfs.stat(p, function (er2, stat) {\n // if the stat fails, then that's super weird.\n // let the original error be the failure reason.\n if (er2 || !stat.isDirectory()) cb(er, made)\n else cb(null, made);\n });\n break;\n }\n });\n}\n\nmkdirP.sync = function sync (p, opts, made) {\n if (!opts || typeof opts !== 'object') {\n opts = { mode: opts };\n }\n \n var mode = opts.mode;\n var xfs = opts.fs || fs;\n \n if (mode === undefined) {\n mode = _0777\n }\n if (!made) made = null;\n\n p = path.resolve(p);\n\n try {\n xfs.mkdirSync(p, mode);\n made = made || p;\n }\n catch (err0) {\n switch (err0.code) {\n case 'ENOENT' :\n made = sync(path.dirname(p), opts, made);\n sync(p, opts, made);\n break;\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n var stat;\n try {\n stat = xfs.statSync(p);\n }\n catch (err1) {\n throw err0;\n }\n if (!stat.isDirectory()) throw err0;\n break;\n }\n }\n\n return made;\n};\n","module.exports = rimraf\nrimraf.sync = rimrafSync\n\nvar assert = require(\"assert\")\nvar path = require(\"path\")\nvar fs = require(\"fs\")\nvar glob = undefined\ntry {\n glob = require(\"glob\")\n} catch (_err) {\n // treat glob as optional.\n}\nvar _0666 = parseInt('666', 8)\n\nvar defaultGlobOpts = {\n nosort: true,\n silent: true\n}\n\n// for EMFILE handling\nvar timeout = 0\n\nvar isWindows = (process.platform === \"win32\")\n\nfunction defaults (options) {\n var methods = [\n 'unlink',\n 'chmod',\n 'stat',\n 'lstat',\n 'rmdir',\n 'readdir'\n ]\n methods.forEach(function(m) {\n options[m] = options[m] || fs[m]\n m = m + 'Sync'\n options[m] = options[m] || fs[m]\n })\n\n options.maxBusyTries = options.maxBusyTries || 3\n options.emfileWait = options.emfileWait || 1000\n if (options.glob === false) {\n options.disableGlob = true\n }\n if (options.disableGlob !== true && glob === undefined) {\n throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')\n }\n options.disableGlob = options.disableGlob || false\n options.glob = options.glob || defaultGlobOpts\n}\n\nfunction rimraf (p, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = {}\n }\n\n assert(p, 'rimraf: missing path')\n assert.equal(typeof p, 'string', 'rimraf: path should be a string')\n assert.equal(typeof cb, 'function', 'rimraf: callback function required')\n assert(options, 'rimraf: invalid options argument provided')\n assert.equal(typeof options, 'object', 'rimraf: options should be object')\n\n defaults(options)\n\n var busyTries = 0\n var errState = null\n var n = 0\n\n if (options.disableGlob || !glob.hasMagic(p))\n return afterGlob(null, [p])\n\n options.lstat(p, function (er, stat) {\n if (!er)\n return afterGlob(null, [p])\n\n glob(p, options.glob, afterGlob)\n })\n\n function next (er) {\n errState = errState || er\n if (--n === 0)\n cb(errState)\n }\n\n function afterGlob (er, results) {\n if (er)\n return cb(er)\n\n n = results.length\n if (n === 0)\n return cb()\n\n results.forEach(function (p) {\n rimraf_(p, options, function CB (er) {\n if (er) {\n if ((er.code === \"EBUSY\" || er.code === \"ENOTEMPTY\" || er.code === \"EPERM\") &&\n busyTries < options.maxBusyTries) {\n busyTries ++\n var time = busyTries * 100\n // try again, with the same exact callback as this one.\n return setTimeout(function () {\n rimraf_(p, options, CB)\n }, time)\n }\n\n // this one won't happen if graceful-fs is used.\n if (er.code === \"EMFILE\" && timeout < options.emfileWait) {\n return setTimeout(function () {\n rimraf_(p, options, CB)\n }, timeout ++)\n }\n\n // already gone\n if (er.code === \"ENOENT\") er = null\n }\n\n timeout = 0\n next(er)\n })\n })\n }\n}\n\n// Two possible strategies.\n// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR\n// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR\n//\n// Both result in an extra syscall when you guess wrong. However, there\n// are likely far more normal files in the world than directories. This\n// is based on the assumption that a the average number of files per\n// directory is >= 1.\n//\n// If anyone ever complains about this, then I guess the strategy could\n// be made configurable somehow. But until then, YAGNI.\nfunction rimraf_ (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // sunos lets the root user unlink directories, which is... weird.\n // so we have to lstat here and make sure it's not a dir.\n options.lstat(p, function (er, st) {\n if (er && er.code === \"ENOENT\")\n return cb(null)\n\n // Windows can EPERM on stat. Life is suffering.\n if (er && er.code === \"EPERM\" && isWindows)\n fixWinEPERM(p, options, er, cb)\n\n if (st && st.isDirectory())\n return rmdir(p, options, er, cb)\n\n options.unlink(p, function (er) {\n if (er) {\n if (er.code === \"ENOENT\")\n return cb(null)\n if (er.code === \"EPERM\")\n return (isWindows)\n ? fixWinEPERM(p, options, er, cb)\n : rmdir(p, options, er, cb)\n if (er.code === \"EISDIR\")\n return rmdir(p, options, er, cb)\n }\n return cb(er)\n })\n })\n}\n\nfunction fixWinEPERM (p, options, er, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n if (er)\n assert(er instanceof Error)\n\n options.chmod(p, _0666, function (er2) {\n if (er2)\n cb(er2.code === \"ENOENT\" ? null : er)\n else\n options.stat(p, function(er3, stats) {\n if (er3)\n cb(er3.code === \"ENOENT\" ? null : er)\n else if (stats.isDirectory())\n rmdir(p, options, er, cb)\n else\n options.unlink(p, cb)\n })\n })\n}\n\nfunction fixWinEPERMSync (p, options, er) {\n assert(p)\n assert(options)\n if (er)\n assert(er instanceof Error)\n\n try {\n options.chmodSync(p, _0666)\n } catch (er2) {\n if (er2.code === \"ENOENT\")\n return\n else\n throw er\n }\n\n try {\n var stats = options.statSync(p)\n } catch (er3) {\n if (er3.code === \"ENOENT\")\n return\n else\n throw er\n }\n\n if (stats.isDirectory())\n rmdirSync(p, options, er)\n else\n options.unlinkSync(p)\n}\n\nfunction rmdir (p, options, originalEr, cb) {\n assert(p)\n assert(options)\n if (originalEr)\n assert(originalEr instanceof Error)\n assert(typeof cb === 'function')\n\n // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)\n // if we guessed wrong, and it's not a directory, then\n // raise the original error.\n options.rmdir(p, function (er) {\n if (er && (er.code === \"ENOTEMPTY\" || er.code === \"EEXIST\" || er.code === \"EPERM\"))\n rmkids(p, options, cb)\n else if (er && er.code === \"ENOTDIR\")\n cb(originalEr)\n else\n cb(er)\n })\n}\n\nfunction rmkids(p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.readdir(p, function (er, files) {\n if (er)\n return cb(er)\n var n = files.length\n if (n === 0)\n return options.rmdir(p, cb)\n var errState\n files.forEach(function (f) {\n rimraf(path.join(p, f), options, function (er) {\n if (errState)\n return\n if (er)\n return cb(errState = er)\n if (--n === 0)\n options.rmdir(p, cb)\n })\n })\n })\n}\n\n// this looks simpler, and is strictly *faster*, but will\n// tie up the JavaScript thread and fail on excessively\n// deep directory trees.\nfunction rimrafSync (p, options) {\n options = options || {}\n defaults(options)\n\n assert(p, 'rimraf: missing path')\n assert.equal(typeof p, 'string', 'rimraf: path should be a string')\n assert(options, 'rimraf: missing options')\n assert.equal(typeof options, 'object', 'rimraf: options should be object')\n\n var results\n\n if (options.disableGlob || !glob.hasMagic(p)) {\n results = [p]\n } else {\n try {\n options.lstatSync(p)\n results = [p]\n } catch (er) {\n results = glob.sync(p, options.glob)\n }\n }\n\n if (!results.length)\n return\n\n for (var i = 0; i < results.length; i++) {\n var p = results[i]\n\n try {\n var st = options.lstatSync(p)\n } catch (er) {\n if (er.code === \"ENOENT\")\n return\n\n // Windows can EPERM on stat. Life is suffering.\n if (er.code === \"EPERM\" && isWindows)\n fixWinEPERMSync(p, options, er)\n }\n\n try {\n // sunos lets the root user unlink directories, which is... weird.\n if (st && st.isDirectory())\n rmdirSync(p, options, null)\n else\n options.unlinkSync(p)\n } catch (er) {\n if (er.code === \"ENOENT\")\n return\n if (er.code === \"EPERM\")\n return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)\n if (er.code !== \"EISDIR\")\n throw er\n\n rmdirSync(p, options, er)\n }\n }\n}\n\nfunction rmdirSync (p, options, originalEr) {\n assert(p)\n assert(options)\n if (originalEr)\n assert(originalEr instanceof Error)\n\n try {\n options.rmdirSync(p)\n } catch (er) {\n if (er.code === \"ENOENT\")\n return\n if (er.code === \"ENOTDIR\")\n throw originalEr\n if (er.code === \"ENOTEMPTY\" || er.code === \"EEXIST\" || er.code === \"EPERM\")\n rmkidsSync(p, options)\n }\n}\n\nfunction rmkidsSync (p, options) {\n assert(p)\n assert(options)\n options.readdirSync(p).forEach(function (f) {\n rimrafSync(path.join(p, f), options)\n })\n\n // We only end up here once we got ENOTEMPTY at least once, and\n // at this point, we are guaranteed to have removed all the kids.\n // So, we know that it won't be ENOENT or ENOTDIR or anything else.\n // try really hard to delete stuff on windows, because it has a\n // PROFOUNDLY annoying habit of not closing handles promptly when\n // files are deleted, resulting in spurious ENOTEMPTY errors.\n var retries = isWindows ? 100 : 1\n var i = 0\n do {\n var threw = true\n try {\n var ret = options.rmdirSync(p, options)\n threw = false\n return ret\n } finally {\n if (++i < retries && threw)\n continue\n }\n } while (true)\n}\n","exports.alphasort = alphasort\nexports.alphasorti = alphasorti\nexports.setopts = setopts\nexports.ownProp = ownProp\nexports.makeAbs = makeAbs\nexports.finish = finish\nexports.mark = mark\nexports.isIgnored = isIgnored\nexports.childrenIgnored = childrenIgnored\n\nfunction ownProp (obj, field) {\n return Object.prototype.hasOwnProperty.call(obj, field)\n}\n\nvar path = require(\"path\")\nvar minimatch = require(\"minimatch\")\nvar isAbsolute = require(\"path-is-absolute\")\nvar Minimatch = minimatch.Minimatch\n\nfunction alphasorti (a, b) {\n return a.toLowerCase().localeCompare(b.toLowerCase())\n}\n\nfunction alphasort (a, b) {\n return a.localeCompare(b)\n}\n\nfunction setupIgnores (self, options) {\n self.ignore = options.ignore || []\n\n if (!Array.isArray(self.ignore))\n self.ignore = [self.ignore]\n\n if (self.ignore.length) {\n self.ignore = self.ignore.map(ignoreMap)\n }\n}\n\n// ignore patterns are always in dot:true mode.\nfunction ignoreMap (pattern) {\n var gmatcher = null\n if (pattern.slice(-3) === '/**') {\n var gpattern = pattern.replace(/(\\/\\*\\*)+$/, '')\n gmatcher = new Minimatch(gpattern, { dot: true })\n }\n\n return {\n matcher: new Minimatch(pattern, { dot: true }),\n gmatcher: gmatcher\n }\n}\n\nfunction setopts (self, pattern, options) {\n if (!options)\n options = {}\n\n // base-matching: just use globstar for that.\n if (options.matchBase && -1 === pattern.indexOf(\"/\")) {\n if (options.noglobstar) {\n throw new Error(\"base matching requires globstar\")\n }\n pattern = \"**/\" + pattern\n }\n\n self.silent = !!options.silent\n self.pattern = pattern\n self.strict = options.strict !== false\n self.realpath = !!options.realpath\n self.realpathCache = options.realpathCache || Object.create(null)\n self.follow = !!options.follow\n self.dot = !!options.dot\n self.mark = !!options.mark\n self.nodir = !!options.nodir\n if (self.nodir)\n self.mark = true\n self.sync = !!options.sync\n self.nounique = !!options.nounique\n self.nonull = !!options.nonull\n self.nosort = !!options.nosort\n self.nocase = !!options.nocase\n self.stat = !!options.stat\n self.noprocess = !!options.noprocess\n self.absolute = !!options.absolute\n\n self.maxLength = options.maxLength || Infinity\n self.cache = options.cache || Object.create(null)\n self.statCache = options.statCache || Object.create(null)\n self.symlinks = options.symlinks || Object.create(null)\n\n setupIgnores(self, options)\n\n self.changedCwd = false\n var cwd = process.cwd()\n if (!ownProp(options, \"cwd\"))\n self.cwd = cwd\n else {\n self.cwd = path.resolve(options.cwd)\n self.changedCwd = self.cwd !== cwd\n }\n\n self.root = options.root || path.resolve(self.cwd, \"/\")\n self.root = path.resolve(self.root)\n if (process.platform === \"win32\")\n self.root = self.root.replace(/\\\\/g, \"/\")\n\n // TODO: is an absolute `cwd` supposed to be resolved against `root`?\n // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')\n self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)\n if (process.platform === \"win32\")\n self.cwdAbs = self.cwdAbs.replace(/\\\\/g, \"/\")\n self.nomount = !!options.nomount\n\n // disable comments and negation in Minimatch.\n // Note that they are not supported in Glob itself anyway.\n options.nonegate = true\n options.nocomment = true\n\n self.minimatch = new Minimatch(pattern, options)\n self.options = self.minimatch.options\n}\n\nfunction finish (self) {\n var nou = self.nounique\n var all = nou ? [] : Object.create(null)\n\n for (var i = 0, l = self.matches.length; i < l; i ++) {\n var matches = self.matches[i]\n if (!matches || Object.keys(matches).length === 0) {\n if (self.nonull) {\n // do like the shell, and spit out the literal glob\n var literal = self.minimatch.globSet[i]\n if (nou)\n all.push(literal)\n else\n all[literal] = true\n }\n } else {\n // had matches\n var m = Object.keys(matches)\n if (nou)\n all.push.apply(all, m)\n else\n m.forEach(function (m) {\n all[m] = true\n })\n }\n }\n\n if (!nou)\n all = Object.keys(all)\n\n if (!self.nosort)\n all = all.sort(self.nocase ? alphasorti : alphasort)\n\n // at *some* point we statted all of these\n if (self.mark) {\n for (var i = 0; i < all.length; i++) {\n all[i] = self._mark(all[i])\n }\n if (self.nodir) {\n all = all.filter(function (e) {\n var notDir = !(/\\/$/.test(e))\n var c = self.cache[e] || self.cache[makeAbs(self, e)]\n if (notDir && c)\n notDir = c !== 'DIR' && !Array.isArray(c)\n return notDir\n })\n }\n }\n\n if (self.ignore.length)\n all = all.filter(function(m) {\n return !isIgnored(self, m)\n })\n\n self.found = all\n}\n\nfunction mark (self, p) {\n var abs = makeAbs(self, p)\n var c = self.cache[abs]\n var m = p\n if (c) {\n var isDir = c === 'DIR' || Array.isArray(c)\n var slash = p.slice(-1) === '/'\n\n if (isDir && !slash)\n m += '/'\n else if (!isDir && slash)\n m = m.slice(0, -1)\n\n if (m !== p) {\n var mabs = makeAbs(self, m)\n self.statCache[mabs] = self.statCache[abs]\n self.cache[mabs] = self.cache[abs]\n }\n }\n\n return m\n}\n\n// lotta situps...\nfunction makeAbs (self, f) {\n var abs = f\n if (f.charAt(0) === '/') {\n abs = path.join(self.root, f)\n } else if (isAbsolute(f) || f === '') {\n abs = f\n } else if (self.changedCwd) {\n abs = path.resolve(self.cwd, f)\n } else {\n abs = path.resolve(f)\n }\n\n if (process.platform === 'win32')\n abs = abs.replace(/\\\\/g, '/')\n\n return abs\n}\n\n\n// Return true, if pattern ends with globstar '**', for the accompanying parent directory.\n// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents\nfunction isIgnored (self, path) {\n if (!self.ignore.length)\n return false\n\n return self.ignore.some(function(item) {\n return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))\n })\n}\n\nfunction childrenIgnored (self, path) {\n if (!self.ignore.length)\n return false\n\n return self.ignore.some(function(item) {\n return !!(item.gmatcher && item.gmatcher.match(path))\n })\n}\n","// Approach:\n//\n// 1. Get the minimatch set\n// 2. For each pattern in the set, PROCESS(pattern, false)\n// 3. Store matches per-set, then uniq them\n//\n// PROCESS(pattern, inGlobStar)\n// Get the first [n] items from pattern that are all strings\n// Join these together. This is PREFIX.\n// If there is no more remaining, then stat(PREFIX) and\n// add to matches if it succeeds. END.\n//\n// If inGlobStar and PREFIX is symlink and points to dir\n// set ENTRIES = []\n// else readdir(PREFIX) as ENTRIES\n// If fail, END\n//\n// with ENTRIES\n// If pattern[n] is GLOBSTAR\n// // handle the case where the globstar match is empty\n// // by pruning it out, and testing the resulting pattern\n// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)\n// // handle other cases.\n// for ENTRY in ENTRIES (not dotfiles)\n// // attach globstar + tail onto the entry\n// // Mark that this entry is a globstar match\n// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)\n//\n// else // not globstar\n// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)\n// Test ENTRY against pattern[n]\n// If fails, continue\n// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])\n//\n// Caveat:\n// Cache all stats and readdirs results to minimize syscall. Since all\n// we ever care about is existence and directory-ness, we can just keep\n// `true` for files, and [children,...] for directories, or `false` for\n// things that don't exist.\n\nmodule.exports = glob\n\nvar fs = require('fs')\nvar rp = require('fs.realpath')\nvar minimatch = require('minimatch')\nvar Minimatch = minimatch.Minimatch\nvar inherits = require('inherits')\nvar EE = require('events').EventEmitter\nvar path = require('path')\nvar assert = require('assert')\nvar isAbsolute = require('path-is-absolute')\nvar globSync = require('./sync.js')\nvar common = require('./common.js')\nvar alphasort = common.alphasort\nvar alphasorti = common.alphasorti\nvar setopts = common.setopts\nvar ownProp = common.ownProp\nvar inflight = require('inflight')\nvar util = require('util')\nvar childrenIgnored = common.childrenIgnored\nvar isIgnored = common.isIgnored\n\nvar once = require('once')\n\nfunction glob (pattern, options, cb) {\n if (typeof options === 'function') cb = options, options = {}\n if (!options) options = {}\n\n if (options.sync) {\n if (cb)\n throw new TypeError('callback provided to sync glob')\n return globSync(pattern, options)\n }\n\n return new Glob(pattern, options, cb)\n}\n\nglob.sync = globSync\nvar GlobSync = glob.GlobSync = globSync.GlobSync\n\n// old api surface\nglob.glob = glob\n\nfunction extend (origin, add) {\n if (add === null || typeof add !== 'object') {\n return origin\n }\n\n var keys = Object.keys(add)\n var i = keys.length\n while (i--) {\n origin[keys[i]] = add[keys[i]]\n }\n return origin\n}\n\nglob.hasMagic = function (pattern, options_) {\n var options = extend({}, options_)\n options.noprocess = true\n\n var g = new Glob(pattern, options)\n var set = g.minimatch.set\n\n if (!pattern)\n return false\n\n if (set.length > 1)\n return true\n\n for (var j = 0; j < set[0].length; j++) {\n if (typeof set[0][j] !== 'string')\n return true\n }\n\n return false\n}\n\nglob.Glob = Glob\ninherits(Glob, EE)\nfunction Glob (pattern, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n\n if (options && options.sync) {\n if (cb)\n throw new TypeError('callback provided to sync glob')\n return new GlobSync(pattern, options)\n }\n\n if (!(this instanceof Glob))\n return new Glob(pattern, options, cb)\n\n setopts(this, pattern, options)\n this._didRealPath = false\n\n // process each pattern in the minimatch set\n var n = this.minimatch.set.length\n\n // The matches are stored as {: true,...} so that\n // duplicates are automagically pruned.\n // Later, we do an Object.keys() on these.\n // Keep them as a list so we can fill in when nonull is set.\n this.matches = new Array(n)\n\n if (typeof cb === 'function') {\n cb = once(cb)\n this.on('error', cb)\n this.on('end', function (matches) {\n cb(null, matches)\n })\n }\n\n var self = this\n this._processing = 0\n\n this._emitQueue = []\n this._processQueue = []\n this.paused = false\n\n if (this.noprocess)\n return this\n\n if (n === 0)\n return done()\n\n var sync = true\n for (var i = 0; i < n; i ++) {\n this._process(this.minimatch.set[i], i, false, done)\n }\n sync = false\n\n function done () {\n --self._processing\n if (self._processing <= 0) {\n if (sync) {\n process.nextTick(function () {\n self._finish()\n })\n } else {\n self._finish()\n }\n }\n }\n}\n\nGlob.prototype._finish = function () {\n assert(this instanceof Glob)\n if (this.aborted)\n return\n\n if (this.realpath && !this._didRealpath)\n return this._realpath()\n\n common.finish(this)\n this.emit('end', this.found)\n}\n\nGlob.prototype._realpath = function () {\n if (this._didRealpath)\n return\n\n this._didRealpath = true\n\n var n = this.matches.length\n if (n === 0)\n return this._finish()\n\n var self = this\n for (var i = 0; i < this.matches.length; i++)\n this._realpathSet(i, next)\n\n function next () {\n if (--n === 0)\n self._finish()\n }\n}\n\nGlob.prototype._realpathSet = function (index, cb) {\n var matchset = this.matches[index]\n if (!matchset)\n return cb()\n\n var found = Object.keys(matchset)\n var self = this\n var n = found.length\n\n if (n === 0)\n return cb()\n\n var set = this.matches[index] = Object.create(null)\n found.forEach(function (p, i) {\n // If there's a problem with the stat, then it means that\n // one or more of the links in the realpath couldn't be\n // resolved. just return the abs value in that case.\n p = self._makeAbs(p)\n rp.realpath(p, self.realpathCache, function (er, real) {\n if (!er)\n set[real] = true\n else if (er.syscall === 'stat')\n set[p] = true\n else\n self.emit('error', er) // srsly wtf right here\n\n if (--n === 0) {\n self.matches[index] = set\n cb()\n }\n })\n })\n}\n\nGlob.prototype._mark = function (p) {\n return common.mark(this, p)\n}\n\nGlob.prototype._makeAbs = function (f) {\n return common.makeAbs(this, f)\n}\n\nGlob.prototype.abort = function () {\n this.aborted = true\n this.emit('abort')\n}\n\nGlob.prototype.pause = function () {\n if (!this.paused) {\n this.paused = true\n this.emit('pause')\n }\n}\n\nGlob.prototype.resume = function () {\n if (this.paused) {\n this.emit('resume')\n this.paused = false\n if (this._emitQueue.length) {\n var eq = this._emitQueue.slice(0)\n this._emitQueue.length = 0\n for (var i = 0; i < eq.length; i ++) {\n var e = eq[i]\n this._emitMatch(e[0], e[1])\n }\n }\n if (this._processQueue.length) {\n var pq = this._processQueue.slice(0)\n this._processQueue.length = 0\n for (var i = 0; i < pq.length; i ++) {\n var p = pq[i]\n this._processing--\n this._process(p[0], p[1], p[2], p[3])\n }\n }\n }\n}\n\nGlob.prototype._process = function (pattern, index, inGlobStar, cb) {\n assert(this instanceof Glob)\n assert(typeof cb === 'function')\n\n if (this.aborted)\n return\n\n this._processing++\n if (this.paused) {\n this._processQueue.push([pattern, index, inGlobStar, cb])\n return\n }\n\n //console.error('PROCESS %d', this._processing, pattern)\n\n // Get the first [n] parts of pattern that are all strings.\n var n = 0\n while (typeof pattern[n] === 'string') {\n n ++\n }\n // now n is the index of the first one that is *not* a string.\n\n // see if there's anything else\n var prefix\n switch (n) {\n // if not, then this is rather simple\n case pattern.length:\n this._processSimple(pattern.join('/'), index, cb)\n return\n\n case 0:\n // pattern *starts* with some non-trivial item.\n // going to readdir(cwd), but not include the prefix in matches.\n prefix = null\n break\n\n default:\n // pattern has some string bits in the front.\n // whatever it starts with, whether that's 'absolute' like /foo/bar,\n // or 'relative' like '../baz'\n prefix = pattern.slice(0, n).join('/')\n break\n }\n\n var remain = pattern.slice(n)\n\n // get the list of entries.\n var read\n if (prefix === null)\n read = '.'\n else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {\n if (!prefix || !isAbsolute(prefix))\n prefix = '/' + prefix\n read = prefix\n } else\n read = prefix\n\n var abs = this._makeAbs(read)\n\n //if ignored, skip _processing\n if (childrenIgnored(this, read))\n return cb()\n\n var isGlobStar = remain[0] === minimatch.GLOBSTAR\n if (isGlobStar)\n this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)\n else\n this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)\n}\n\nGlob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {\n var self = this\n this._readdir(abs, inGlobStar, function (er, entries) {\n return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)\n })\n}\n\nGlob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {\n\n // if the abs isn't a dir, then nothing can match!\n if (!entries)\n return cb()\n\n // It will only match dot entries if it starts with a dot, or if\n // dot is set. Stuff like @(.foo|.bar) isn't allowed.\n var pn = remain[0]\n var negate = !!this.minimatch.negate\n var rawGlob = pn._glob\n var dotOk = this.dot || rawGlob.charAt(0) === '.'\n\n var matchedEntries = []\n for (var i = 0; i < entries.length; i++) {\n var e = entries[i]\n if (e.charAt(0) !== '.' || dotOk) {\n var m\n if (negate && !prefix) {\n m = !e.match(pn)\n } else {\n m = e.match(pn)\n }\n if (m)\n matchedEntries.push(e)\n }\n }\n\n //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)\n\n var len = matchedEntries.length\n // If there are no matched entries, then nothing matches.\n if (len === 0)\n return cb()\n\n // if this is the last remaining pattern bit, then no need for\n // an additional stat *unless* the user has specified mark or\n // stat explicitly. We know they exist, since readdir returned\n // them.\n\n if (remain.length === 1 && !this.mark && !this.stat) {\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n if (prefix) {\n if (prefix !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n\n if (e.charAt(0) === '/' && !this.nomount) {\n e = path.join(this.root, e)\n }\n this._emitMatch(index, e)\n }\n // This was the last one, and no stats were needed\n return cb()\n }\n\n // now test all matched entries as stand-ins for that part\n // of the pattern.\n remain.shift()\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n var newPattern\n if (prefix) {\n if (prefix !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n this._process([e].concat(remain), index, inGlobStar, cb)\n }\n cb()\n}\n\nGlob.prototype._emitMatch = function (index, e) {\n if (this.aborted)\n return\n\n if (isIgnored(this, e))\n return\n\n if (this.paused) {\n this._emitQueue.push([index, e])\n return\n }\n\n var abs = isAbsolute(e) ? e : this._makeAbs(e)\n\n if (this.mark)\n e = this._mark(e)\n\n if (this.absolute)\n e = abs\n\n if (this.matches[index][e])\n return\n\n if (this.nodir) {\n var c = this.cache[abs]\n if (c === 'DIR' || Array.isArray(c))\n return\n }\n\n this.matches[index][e] = true\n\n var st = this.statCache[abs]\n if (st)\n this.emit('stat', e, st)\n\n this.emit('match', e)\n}\n\nGlob.prototype._readdirInGlobStar = function (abs, cb) {\n if (this.aborted)\n return\n\n // follow all symlinked directories forever\n // just proceed as if this is a non-globstar situation\n if (this.follow)\n return this._readdir(abs, false, cb)\n\n var lstatkey = 'lstat\\0' + abs\n var self = this\n var lstatcb = inflight(lstatkey, lstatcb_)\n\n if (lstatcb)\n fs.lstat(abs, lstatcb)\n\n function lstatcb_ (er, lstat) {\n if (er && er.code === 'ENOENT')\n return cb()\n\n var isSym = lstat && lstat.isSymbolicLink()\n self.symlinks[abs] = isSym\n\n // If it's not a symlink or a dir, then it's definitely a regular file.\n // don't bother doing a readdir in that case.\n if (!isSym && lstat && !lstat.isDirectory()) {\n self.cache[abs] = 'FILE'\n cb()\n } else\n self._readdir(abs, false, cb)\n }\n}\n\nGlob.prototype._readdir = function (abs, inGlobStar, cb) {\n if (this.aborted)\n return\n\n cb = inflight('readdir\\0'+abs+'\\0'+inGlobStar, cb)\n if (!cb)\n return\n\n //console.error('RD %j %j', +inGlobStar, abs)\n if (inGlobStar && !ownProp(this.symlinks, abs))\n return this._readdirInGlobStar(abs, cb)\n\n if (ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n if (!c || c === 'FILE')\n return cb()\n\n if (Array.isArray(c))\n return cb(null, c)\n }\n\n var self = this\n fs.readdir(abs, readdirCb(this, abs, cb))\n}\n\nfunction readdirCb (self, abs, cb) {\n return function (er, entries) {\n if (er)\n self._readdirError(abs, er, cb)\n else\n self._readdirEntries(abs, entries, cb)\n }\n}\n\nGlob.prototype._readdirEntries = function (abs, entries, cb) {\n if (this.aborted)\n return\n\n // if we haven't asked to stat everything, then just\n // assume that everything in there exists, so we can avoid\n // having to stat it a second time.\n if (!this.mark && !this.stat) {\n for (var i = 0; i < entries.length; i ++) {\n var e = entries[i]\n if (abs === '/')\n e = abs + e\n else\n e = abs + '/' + e\n this.cache[e] = true\n }\n }\n\n this.cache[abs] = entries\n return cb(null, entries)\n}\n\nGlob.prototype._readdirError = function (f, er, cb) {\n if (this.aborted)\n return\n\n // handle errors, and cache the information\n switch (er.code) {\n case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205\n case 'ENOTDIR': // totally normal. means it *does* exist.\n var abs = this._makeAbs(f)\n this.cache[abs] = 'FILE'\n if (abs === this.cwdAbs) {\n var error = new Error(er.code + ' invalid cwd ' + this.cwd)\n error.path = this.cwd\n error.code = er.code\n this.emit('error', error)\n this.abort()\n }\n break\n\n case 'ENOENT': // not terribly unusual\n case 'ELOOP':\n case 'ENAMETOOLONG':\n case 'UNKNOWN':\n this.cache[this._makeAbs(f)] = false\n break\n\n default: // some unusual error. Treat as failure.\n this.cache[this._makeAbs(f)] = false\n if (this.strict) {\n this.emit('error', er)\n // If the error is handled, then we abort\n // if not, we threw out of here\n this.abort()\n }\n if (!this.silent)\n console.error('glob error', er)\n break\n }\n\n return cb()\n}\n\nGlob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {\n var self = this\n this._readdir(abs, inGlobStar, function (er, entries) {\n self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)\n })\n}\n\n\nGlob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {\n //console.error('pgs2', prefix, remain[0], entries)\n\n // no entries means not a dir, so it can never have matches\n // foo.txt/** doesn't match foo.txt\n if (!entries)\n return cb()\n\n // test without the globstar, and with every child both below\n // and replacing the globstar.\n var remainWithoutGlobStar = remain.slice(1)\n var gspref = prefix ? [ prefix ] : []\n var noGlobStar = gspref.concat(remainWithoutGlobStar)\n\n // the noGlobStar pattern exits the inGlobStar state\n this._process(noGlobStar, index, false, cb)\n\n var isSym = this.symlinks[abs]\n var len = entries.length\n\n // If it's a symlink, and we're in a globstar, then stop\n if (isSym && inGlobStar)\n return cb()\n\n for (var i = 0; i < len; i++) {\n var e = entries[i]\n if (e.charAt(0) === '.' && !this.dot)\n continue\n\n // these two cases enter the inGlobStar state\n var instead = gspref.concat(entries[i], remainWithoutGlobStar)\n this._process(instead, index, true, cb)\n\n var below = gspref.concat(entries[i], remain)\n this._process(below, index, true, cb)\n }\n\n cb()\n}\n\nGlob.prototype._processSimple = function (prefix, index, cb) {\n // XXX review this. Shouldn't it be doing the mounting etc\n // before doing stat? kinda weird?\n var self = this\n this._stat(prefix, function (er, exists) {\n self._processSimple2(prefix, index, er, exists, cb)\n })\n}\nGlob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {\n\n //console.error('ps2', prefix, exists)\n\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n // If it doesn't exist, then just mark the lack of results\n if (!exists)\n return cb()\n\n if (prefix && isAbsolute(prefix) && !this.nomount) {\n var trail = /[\\/\\\\]$/.test(prefix)\n if (prefix.charAt(0) === '/') {\n prefix = path.join(this.root, prefix)\n } else {\n prefix = path.resolve(this.root, prefix)\n if (trail)\n prefix += '/'\n }\n }\n\n if (process.platform === 'win32')\n prefix = prefix.replace(/\\\\/g, '/')\n\n // Mark this as a match\n this._emitMatch(index, prefix)\n cb()\n}\n\n// Returns either 'DIR', 'FILE', or false\nGlob.prototype._stat = function (f, cb) {\n var abs = this._makeAbs(f)\n var needDir = f.slice(-1) === '/'\n\n if (f.length > this.maxLength)\n return cb()\n\n if (!this.stat && ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n\n if (Array.isArray(c))\n c = 'DIR'\n\n // It exists, but maybe not how we need it\n if (!needDir || c === 'DIR')\n return cb(null, c)\n\n if (needDir && c === 'FILE')\n return cb()\n\n // otherwise we have to stat, because maybe c=true\n // if we know it exists, but not what it is.\n }\n\n var exists\n var stat = this.statCache[abs]\n if (stat !== undefined) {\n if (stat === false)\n return cb(null, stat)\n else {\n var type = stat.isDirectory() ? 'DIR' : 'FILE'\n if (needDir && type === 'FILE')\n return cb()\n else\n return cb(null, type, stat)\n }\n }\n\n var self = this\n var statcb = inflight('stat\\0' + abs, lstatcb_)\n if (statcb)\n fs.lstat(abs, statcb)\n\n function lstatcb_ (er, lstat) {\n if (lstat && lstat.isSymbolicLink()) {\n // If it's a symlink, then treat it as the target, unless\n // the target does not exist, then treat it as a file.\n return fs.stat(abs, function (er, stat) {\n if (er)\n self._stat2(f, abs, null, lstat, cb)\n else\n self._stat2(f, abs, er, stat, cb)\n })\n } else {\n self._stat2(f, abs, er, lstat, cb)\n }\n }\n}\n\nGlob.prototype._stat2 = function (f, abs, er, stat, cb) {\n if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {\n this.statCache[abs] = false\n return cb()\n }\n\n var needDir = f.slice(-1) === '/'\n this.statCache[abs] = stat\n\n if (abs.slice(-1) === '/' && stat && !stat.isDirectory())\n return cb(null, false, stat)\n\n var c = true\n if (stat)\n c = stat.isDirectory() ? 'DIR' : 'FILE'\n this.cache[abs] = this.cache[abs] || c\n\n if (needDir && c === 'FILE')\n return cb()\n\n return cb(null, c, stat)\n}\n","module.exports = globSync\nglobSync.GlobSync = GlobSync\n\nvar fs = require('fs')\nvar rp = require('fs.realpath')\nvar minimatch = require('minimatch')\nvar Minimatch = minimatch.Minimatch\nvar Glob = require('./glob.js').Glob\nvar util = require('util')\nvar path = require('path')\nvar assert = require('assert')\nvar isAbsolute = require('path-is-absolute')\nvar common = require('./common.js')\nvar alphasort = common.alphasort\nvar alphasorti = common.alphasorti\nvar setopts = common.setopts\nvar ownProp = common.ownProp\nvar childrenIgnored = common.childrenIgnored\nvar isIgnored = common.isIgnored\n\nfunction globSync (pattern, options) {\n if (typeof options === 'function' || arguments.length === 3)\n throw new TypeError('callback provided to sync glob\\n'+\n 'See: https://github.com/isaacs/node-glob/issues/167')\n\n return new GlobSync(pattern, options).found\n}\n\nfunction GlobSync (pattern, options) {\n if (!pattern)\n throw new Error('must provide pattern')\n\n if (typeof options === 'function' || arguments.length === 3)\n throw new TypeError('callback provided to sync glob\\n'+\n 'See: https://github.com/isaacs/node-glob/issues/167')\n\n if (!(this instanceof GlobSync))\n return new GlobSync(pattern, options)\n\n setopts(this, pattern, options)\n\n if (this.noprocess)\n return this\n\n var n = this.minimatch.set.length\n this.matches = new Array(n)\n for (var i = 0; i < n; i ++) {\n this._process(this.minimatch.set[i], i, false)\n }\n this._finish()\n}\n\nGlobSync.prototype._finish = function () {\n assert(this instanceof GlobSync)\n if (this.realpath) {\n var self = this\n this.matches.forEach(function (matchset, index) {\n var set = self.matches[index] = Object.create(null)\n for (var p in matchset) {\n try {\n p = self._makeAbs(p)\n var real = rp.realpathSync(p, self.realpathCache)\n set[real] = true\n } catch (er) {\n if (er.syscall === 'stat')\n set[self._makeAbs(p)] = true\n else\n throw er\n }\n }\n })\n }\n common.finish(this)\n}\n\n\nGlobSync.prototype._process = function (pattern, index, inGlobStar) {\n assert(this instanceof GlobSync)\n\n // Get the first [n] parts of pattern that are all strings.\n var n = 0\n while (typeof pattern[n] === 'string') {\n n ++\n }\n // now n is the index of the first one that is *not* a string.\n\n // See if there's anything else\n var prefix\n switch (n) {\n // if not, then this is rather simple\n case pattern.length:\n this._processSimple(pattern.join('/'), index)\n return\n\n case 0:\n // pattern *starts* with some non-trivial item.\n // going to readdir(cwd), but not include the prefix in matches.\n prefix = null\n break\n\n default:\n // pattern has some string bits in the front.\n // whatever it starts with, whether that's 'absolute' like /foo/bar,\n // or 'relative' like '../baz'\n prefix = pattern.slice(0, n).join('/')\n break\n }\n\n var remain = pattern.slice(n)\n\n // get the list of entries.\n var read\n if (prefix === null)\n read = '.'\n else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {\n if (!prefix || !isAbsolute(prefix))\n prefix = '/' + prefix\n read = prefix\n } else\n read = prefix\n\n var abs = this._makeAbs(read)\n\n //if ignored, skip processing\n if (childrenIgnored(this, read))\n return\n\n var isGlobStar = remain[0] === minimatch.GLOBSTAR\n if (isGlobStar)\n this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)\n else\n this._processReaddir(prefix, read, abs, remain, index, inGlobStar)\n}\n\n\nGlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {\n var entries = this._readdir(abs, inGlobStar)\n\n // if the abs isn't a dir, then nothing can match!\n if (!entries)\n return\n\n // It will only match dot entries if it starts with a dot, or if\n // dot is set. Stuff like @(.foo|.bar) isn't allowed.\n var pn = remain[0]\n var negate = !!this.minimatch.negate\n var rawGlob = pn._glob\n var dotOk = this.dot || rawGlob.charAt(0) === '.'\n\n var matchedEntries = []\n for (var i = 0; i < entries.length; i++) {\n var e = entries[i]\n if (e.charAt(0) !== '.' || dotOk) {\n var m\n if (negate && !prefix) {\n m = !e.match(pn)\n } else {\n m = e.match(pn)\n }\n if (m)\n matchedEntries.push(e)\n }\n }\n\n var len = matchedEntries.length\n // If there are no matched entries, then nothing matches.\n if (len === 0)\n return\n\n // if this is the last remaining pattern bit, then no need for\n // an additional stat *unless* the user has specified mark or\n // stat explicitly. We know they exist, since readdir returned\n // them.\n\n if (remain.length === 1 && !this.mark && !this.stat) {\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n if (prefix) {\n if (prefix.slice(-1) !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n\n if (e.charAt(0) === '/' && !this.nomount) {\n e = path.join(this.root, e)\n }\n this._emitMatch(index, e)\n }\n // This was the last one, and no stats were needed\n return\n }\n\n // now test all matched entries as stand-ins for that part\n // of the pattern.\n remain.shift()\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n var newPattern\n if (prefix)\n newPattern = [prefix, e]\n else\n newPattern = [e]\n this._process(newPattern.concat(remain), index, inGlobStar)\n }\n}\n\n\nGlobSync.prototype._emitMatch = function (index, e) {\n if (isIgnored(this, e))\n return\n\n var abs = this._makeAbs(e)\n\n if (this.mark)\n e = this._mark(e)\n\n if (this.absolute) {\n e = abs\n }\n\n if (this.matches[index][e])\n return\n\n if (this.nodir) {\n var c = this.cache[abs]\n if (c === 'DIR' || Array.isArray(c))\n return\n }\n\n this.matches[index][e] = true\n\n if (this.stat)\n this._stat(e)\n}\n\n\nGlobSync.prototype._readdirInGlobStar = function (abs) {\n // follow all symlinked directories forever\n // just proceed as if this is a non-globstar situation\n if (this.follow)\n return this._readdir(abs, false)\n\n var entries\n var lstat\n var stat\n try {\n lstat = fs.lstatSync(abs)\n } catch (er) {\n if (er.code === 'ENOENT') {\n // lstat failed, doesn't exist\n return null\n }\n }\n\n var isSym = lstat && lstat.isSymbolicLink()\n this.symlinks[abs] = isSym\n\n // If it's not a symlink or a dir, then it's definitely a regular file.\n // don't bother doing a readdir in that case.\n if (!isSym && lstat && !lstat.isDirectory())\n this.cache[abs] = 'FILE'\n else\n entries = this._readdir(abs, false)\n\n return entries\n}\n\nGlobSync.prototype._readdir = function (abs, inGlobStar) {\n var entries\n\n if (inGlobStar && !ownProp(this.symlinks, abs))\n return this._readdirInGlobStar(abs)\n\n if (ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n if (!c || c === 'FILE')\n return null\n\n if (Array.isArray(c))\n return c\n }\n\n try {\n return this._readdirEntries(abs, fs.readdirSync(abs))\n } catch (er) {\n this._readdirError(abs, er)\n return null\n }\n}\n\nGlobSync.prototype._readdirEntries = function (abs, entries) {\n // if we haven't asked to stat everything, then just\n // assume that everything in there exists, so we can avoid\n // having to stat it a second time.\n if (!this.mark && !this.stat) {\n for (var i = 0; i < entries.length; i ++) {\n var e = entries[i]\n if (abs === '/')\n e = abs + e\n else\n e = abs + '/' + e\n this.cache[e] = true\n }\n }\n\n this.cache[abs] = entries\n\n // mark and cache dir-ness\n return entries\n}\n\nGlobSync.prototype._readdirError = function (f, er) {\n // handle errors, and cache the information\n switch (er.code) {\n case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205\n case 'ENOTDIR': // totally normal. means it *does* exist.\n var abs = this._makeAbs(f)\n this.cache[abs] = 'FILE'\n if (abs === this.cwdAbs) {\n var error = new Error(er.code + ' invalid cwd ' + this.cwd)\n error.path = this.cwd\n error.code = er.code\n throw error\n }\n break\n\n case 'ENOENT': // not terribly unusual\n case 'ELOOP':\n case 'ENAMETOOLONG':\n case 'UNKNOWN':\n this.cache[this._makeAbs(f)] = false\n break\n\n default: // some unusual error. Treat as failure.\n this.cache[this._makeAbs(f)] = false\n if (this.strict)\n throw er\n if (!this.silent)\n console.error('glob error', er)\n break\n }\n}\n\nGlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {\n\n var entries = this._readdir(abs, inGlobStar)\n\n // no entries means not a dir, so it can never have matches\n // foo.txt/** doesn't match foo.txt\n if (!entries)\n return\n\n // test without the globstar, and with every child both below\n // and replacing the globstar.\n var remainWithoutGlobStar = remain.slice(1)\n var gspref = prefix ? [ prefix ] : []\n var noGlobStar = gspref.concat(remainWithoutGlobStar)\n\n // the noGlobStar pattern exits the inGlobStar state\n this._process(noGlobStar, index, false)\n\n var len = entries.length\n var isSym = this.symlinks[abs]\n\n // If it's a symlink, and we're in a globstar, then stop\n if (isSym && inGlobStar)\n return\n\n for (var i = 0; i < len; i++) {\n var e = entries[i]\n if (e.charAt(0) === '.' && !this.dot)\n continue\n\n // these two cases enter the inGlobStar state\n var instead = gspref.concat(entries[i], remainWithoutGlobStar)\n this._process(instead, index, true)\n\n var below = gspref.concat(entries[i], remain)\n this._process(below, index, true)\n }\n}\n\nGlobSync.prototype._processSimple = function (prefix, index) {\n // XXX review this. Shouldn't it be doing the mounting etc\n // before doing stat? kinda weird?\n var exists = this._stat(prefix)\n\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n // If it doesn't exist, then just mark the lack of results\n if (!exists)\n return\n\n if (prefix && isAbsolute(prefix) && !this.nomount) {\n var trail = /[\\/\\\\]$/.test(prefix)\n if (prefix.charAt(0) === '/') {\n prefix = path.join(this.root, prefix)\n } else {\n prefix = path.resolve(this.root, prefix)\n if (trail)\n prefix += '/'\n }\n }\n\n if (process.platform === 'win32')\n prefix = prefix.replace(/\\\\/g, '/')\n\n // Mark this as a match\n this._emitMatch(index, prefix)\n}\n\n// Returns either 'DIR', 'FILE', or false\nGlobSync.prototype._stat = function (f) {\n var abs = this._makeAbs(f)\n var needDir = f.slice(-1) === '/'\n\n if (f.length > this.maxLength)\n return false\n\n if (!this.stat && ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n\n if (Array.isArray(c))\n c = 'DIR'\n\n // It exists, but maybe not how we need it\n if (!needDir || c === 'DIR')\n return c\n\n if (needDir && c === 'FILE')\n return false\n\n // otherwise we have to stat, because maybe c=true\n // if we know it exists, but not what it is.\n }\n\n var exists\n var stat = this.statCache[abs]\n if (!stat) {\n var lstat\n try {\n lstat = fs.lstatSync(abs)\n } catch (er) {\n if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {\n this.statCache[abs] = false\n return false\n }\n }\n\n if (lstat && lstat.isSymbolicLink()) {\n try {\n stat = fs.statSync(abs)\n } catch (er) {\n stat = lstat\n }\n } else {\n stat = lstat\n }\n }\n\n this.statCache[abs] = stat\n\n var c = true\n if (stat)\n c = stat.isDirectory() ? 'DIR' : 'FILE'\n\n this.cache[abs] = this.cache[abs] || c\n\n if (needDir && c === 'FILE')\n return false\n\n return c\n}\n\nGlobSync.prototype._mark = function (p) {\n return common.mark(this, p)\n}\n\nGlobSync.prototype._makeAbs = function (f) {\n return common.makeAbs(this, f)\n}\n","'use strict'\n\nmodule.exports = clone\n\nfunction clone (obj) {\n if (obj === null || typeof obj !== 'object')\n return obj\n\n if (obj instanceof Object)\n var copy = { __proto__: obj.__proto__ }\n else\n var copy = Object.create(null)\n\n Object.getOwnPropertyNames(obj).forEach(function (key) {\n Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))\n })\n\n return copy\n}\n","var fs = require('fs')\nvar polyfills = require('./polyfills.js')\nvar legacy = require('./legacy-streams.js')\nvar clone = require('./clone.js')\n\nvar util = require('util')\n\n/* istanbul ignore next - node 0.x polyfill */\nvar gracefulQueue\nvar previousSymbol\n\n/* istanbul ignore else - node 0.x polyfill */\nif (typeof Symbol === 'function' && typeof Symbol.for === 'function') {\n gracefulQueue = Symbol.for('graceful-fs.queue')\n // This is used in testing by future versions\n previousSymbol = Symbol.for('graceful-fs.previous')\n} else {\n gracefulQueue = '___graceful-fs.queue'\n previousSymbol = '___graceful-fs.previous'\n}\n\nfunction noop () {}\n\nvar debug = noop\nif (util.debuglog)\n debug = util.debuglog('gfs4')\nelse if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || ''))\n debug = function() {\n var m = util.format.apply(util, arguments)\n m = 'GFS4: ' + m.split(/\\n/).join('\\nGFS4: ')\n console.error(m)\n }\n\n// Once time initialization\nif (!global[gracefulQueue]) {\n // This queue can be shared by multiple loaded instances\n var queue = []\n Object.defineProperty(global, gracefulQueue, {\n get: function() {\n return queue\n }\n })\n\n // Patch fs.close/closeSync to shared queue version, because we need\n // to retry() whenever a close happens *anywhere* in the program.\n // This is essential when multiple graceful-fs instances are\n // in play at the same time.\n fs.close = (function (fs$close) {\n function close (fd, cb) {\n return fs$close.call(fs, fd, function (err) {\n // This function uses the graceful-fs shared queue\n if (!err) {\n retry()\n }\n\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n })\n }\n\n Object.defineProperty(close, previousSymbol, {\n value: fs$close\n })\n return close\n })(fs.close)\n\n fs.closeSync = (function (fs$closeSync) {\n function closeSync (fd) {\n // This function uses the graceful-fs shared queue\n fs$closeSync.apply(fs, arguments)\n retry()\n }\n\n Object.defineProperty(closeSync, previousSymbol, {\n value: fs$closeSync\n })\n return closeSync\n })(fs.closeSync)\n\n if (/\\bgfs4\\b/i.test(process.env.NODE_DEBUG || '')) {\n process.on('exit', function() {\n debug(global[gracefulQueue])\n require('assert').equal(global[gracefulQueue].length, 0)\n })\n }\n}\n\nmodule.exports = patch(clone(fs))\nif (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {\n module.exports = patch(fs)\n fs.__patched = true;\n}\n\nfunction patch (fs) {\n // Everything that references the open() function needs to be in here\n polyfills(fs)\n fs.gracefulify = patch\n\n fs.createReadStream = createReadStream\n fs.createWriteStream = createWriteStream\n var fs$readFile = fs.readFile\n fs.readFile = readFile\n function readFile (path, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$readFile(path, options, cb)\n\n function go$readFile (path, options, cb) {\n return fs$readFile(path, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$readFile, [path, options, cb]])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n retry()\n }\n })\n }\n }\n\n var fs$writeFile = fs.writeFile\n fs.writeFile = writeFile\n function writeFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$writeFile(path, data, options, cb)\n\n function go$writeFile (path, data, options, cb) {\n return fs$writeFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$writeFile, [path, data, options, cb]])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n retry()\n }\n })\n }\n }\n\n var fs$appendFile = fs.appendFile\n if (fs$appendFile)\n fs.appendFile = appendFile\n function appendFile (path, data, options, cb) {\n if (typeof options === 'function')\n cb = options, options = null\n\n return go$appendFile(path, data, options, cb)\n\n function go$appendFile (path, data, options, cb) {\n return fs$appendFile(path, data, options, function (err) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$appendFile, [path, data, options, cb]])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n retry()\n }\n })\n }\n }\n\n var fs$readdir = fs.readdir\n fs.readdir = readdir\n function readdir (path, options, cb) {\n var args = [path]\n if (typeof options !== 'function') {\n args.push(options)\n } else {\n cb = options\n }\n args.push(go$readdir$cb)\n\n return go$readdir(args)\n\n function go$readdir$cb (err, files) {\n if (files && files.sort)\n files.sort()\n\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$readdir, [args]])\n\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n retry()\n }\n }\n }\n\n function go$readdir (args) {\n return fs$readdir.apply(fs, args)\n }\n\n if (process.version.substr(0, 4) === 'v0.8') {\n var legStreams = legacy(fs)\n ReadStream = legStreams.ReadStream\n WriteStream = legStreams.WriteStream\n }\n\n var fs$ReadStream = fs.ReadStream\n if (fs$ReadStream) {\n ReadStream.prototype = Object.create(fs$ReadStream.prototype)\n ReadStream.prototype.open = ReadStream$open\n }\n\n var fs$WriteStream = fs.WriteStream\n if (fs$WriteStream) {\n WriteStream.prototype = Object.create(fs$WriteStream.prototype)\n WriteStream.prototype.open = WriteStream$open\n }\n\n Object.defineProperty(fs, 'ReadStream', {\n get: function () {\n return ReadStream\n },\n set: function (val) {\n ReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n Object.defineProperty(fs, 'WriteStream', {\n get: function () {\n return WriteStream\n },\n set: function (val) {\n WriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n // legacy names\n var FileReadStream = ReadStream\n Object.defineProperty(fs, 'FileReadStream', {\n get: function () {\n return FileReadStream\n },\n set: function (val) {\n FileReadStream = val\n },\n enumerable: true,\n configurable: true\n })\n var FileWriteStream = WriteStream\n Object.defineProperty(fs, 'FileWriteStream', {\n get: function () {\n return FileWriteStream\n },\n set: function (val) {\n FileWriteStream = val\n },\n enumerable: true,\n configurable: true\n })\n\n function ReadStream (path, options) {\n if (this instanceof ReadStream)\n return fs$ReadStream.apply(this, arguments), this\n else\n return ReadStream.apply(Object.create(ReadStream.prototype), arguments)\n }\n\n function ReadStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n if (that.autoClose)\n that.destroy()\n\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n that.read()\n }\n })\n }\n\n function WriteStream (path, options) {\n if (this instanceof WriteStream)\n return fs$WriteStream.apply(this, arguments), this\n else\n return WriteStream.apply(Object.create(WriteStream.prototype), arguments)\n }\n\n function WriteStream$open () {\n var that = this\n open(that.path, that.flags, that.mode, function (err, fd) {\n if (err) {\n that.destroy()\n that.emit('error', err)\n } else {\n that.fd = fd\n that.emit('open', fd)\n }\n })\n }\n\n function createReadStream (path, options) {\n return new fs.ReadStream(path, options)\n }\n\n function createWriteStream (path, options) {\n return new fs.WriteStream(path, options)\n }\n\n var fs$open = fs.open\n fs.open = open\n function open (path, flags, mode, cb) {\n if (typeof mode === 'function')\n cb = mode, mode = null\n\n return go$open(path, flags, mode, cb)\n\n function go$open (path, flags, mode, cb) {\n return fs$open(path, flags, mode, function (err, fd) {\n if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))\n enqueue([go$open, [path, flags, mode, cb]])\n else {\n if (typeof cb === 'function')\n cb.apply(this, arguments)\n retry()\n }\n })\n }\n }\n\n return fs\n}\n\nfunction enqueue (elem) {\n debug('ENQUEUE', elem[0].name, elem[1])\n global[gracefulQueue].push(elem)\n}\n\nfunction retry () {\n var elem = global[gracefulQueue].shift()\n if (elem) {\n debug('RETRY', elem[0].name, elem[1])\n elem[0].apply(null, elem[1])\n }\n}\n","var Stream = require('stream').Stream\n\nmodule.exports = legacy\n\nfunction legacy (fs) {\n return {\n ReadStream: ReadStream,\n WriteStream: WriteStream\n }\n\n function ReadStream (path, options) {\n if (!(this instanceof ReadStream)) return new ReadStream(path, options);\n\n Stream.call(this);\n\n var self = this;\n\n this.path = path;\n this.fd = null;\n this.readable = true;\n this.paused = false;\n\n this.flags = 'r';\n this.mode = 438; /*=0666*/\n this.bufferSize = 64 * 1024;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.encoding) this.setEncoding(this.encoding);\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.end === undefined) {\n this.end = Infinity;\n } else if ('number' !== typeof this.end) {\n throw TypeError('end must be a Number');\n }\n\n if (this.start > this.end) {\n throw new Error('start must be <= end');\n }\n\n this.pos = this.start;\n }\n\n if (this.fd !== null) {\n process.nextTick(function() {\n self._read();\n });\n return;\n }\n\n fs.open(this.path, this.flags, this.mode, function (err, fd) {\n if (err) {\n self.emit('error', err);\n self.readable = false;\n return;\n }\n\n self.fd = fd;\n self.emit('open', fd);\n self._read();\n })\n }\n\n function WriteStream (path, options) {\n if (!(this instanceof WriteStream)) return new WriteStream(path, options);\n\n Stream.call(this);\n\n this.path = path;\n this.fd = null;\n this.writable = true;\n\n this.flags = 'w';\n this.encoding = 'binary';\n this.mode = 438; /*=0666*/\n this.bytesWritten = 0;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.start < 0) {\n throw new Error('start must be >= zero');\n }\n\n this.pos = this.start;\n }\n\n this.busy = false;\n this._queue = [];\n\n if (this.fd === null) {\n this._open = fs.open;\n this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);\n this.flush();\n }\n }\n}\n","var constants = require('constants')\n\nvar origCwd = process.cwd\nvar cwd = null\n\nvar platform = process.env.GRACEFUL_FS_PLATFORM || process.platform\n\nprocess.cwd = function() {\n if (!cwd)\n cwd = origCwd.call(process)\n return cwd\n}\ntry {\n process.cwd()\n} catch (er) {}\n\nvar chdir = process.chdir\nprocess.chdir = function(d) {\n cwd = null\n chdir.call(process, d)\n}\n\nmodule.exports = patch\n\nfunction patch (fs) {\n // (re-)implement some things that are known busted or missing.\n\n // lchmod, broken prior to 0.6.2\n // back-port the fix here.\n if (constants.hasOwnProperty('O_SYMLINK') &&\n process.version.match(/^v0\\.6\\.[0-2]|^v0\\.5\\./)) {\n patchLchmod(fs)\n }\n\n // lutimes implementation, or no-op\n if (!fs.lutimes) {\n patchLutimes(fs)\n }\n\n // https://github.com/isaacs/node-graceful-fs/issues/4\n // Chown should not fail on einval or eperm if non-root.\n // It should not fail on enosys ever, as this just indicates\n // that a fs doesn't support the intended operation.\n\n fs.chown = chownFix(fs.chown)\n fs.fchown = chownFix(fs.fchown)\n fs.lchown = chownFix(fs.lchown)\n\n fs.chmod = chmodFix(fs.chmod)\n fs.fchmod = chmodFix(fs.fchmod)\n fs.lchmod = chmodFix(fs.lchmod)\n\n fs.chownSync = chownFixSync(fs.chownSync)\n fs.fchownSync = chownFixSync(fs.fchownSync)\n fs.lchownSync = chownFixSync(fs.lchownSync)\n\n fs.chmodSync = chmodFixSync(fs.chmodSync)\n fs.fchmodSync = chmodFixSync(fs.fchmodSync)\n fs.lchmodSync = chmodFixSync(fs.lchmodSync)\n\n fs.stat = statFix(fs.stat)\n fs.fstat = statFix(fs.fstat)\n fs.lstat = statFix(fs.lstat)\n\n fs.statSync = statFixSync(fs.statSync)\n fs.fstatSync = statFixSync(fs.fstatSync)\n fs.lstatSync = statFixSync(fs.lstatSync)\n\n // if lchmod/lchown do not exist, then make them no-ops\n if (!fs.lchmod) {\n fs.lchmod = function (path, mode, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchmodSync = function () {}\n }\n if (!fs.lchown) {\n fs.lchown = function (path, uid, gid, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchownSync = function () {}\n }\n\n // on Windows, A/V software can lock the directory, causing this\n // to fail with an EACCES or EPERM if the directory contains newly\n // created files. Try again on failure, for up to 60 seconds.\n\n // Set the timeout this long because some Windows Anti-Virus, such as Parity\n // bit9, may lock files for up to a minute, causing npm package install\n // failures. Also, take care to yield the scheduler. Windows scheduling gives\n // CPU to a busy looping process, which can cause the program causing the lock\n // contention to be starved of CPU by node, so the contention doesn't resolve.\n if (platform === \"win32\") {\n fs.rename = (function (fs$rename) { return function (from, to, cb) {\n var start = Date.now()\n var backoff = 0;\n fs$rename(from, to, function CB (er) {\n if (er\n && (er.code === \"EACCES\" || er.code === \"EPERM\")\n && Date.now() - start < 60000) {\n setTimeout(function() {\n fs.stat(to, function (stater, st) {\n if (stater && stater.code === \"ENOENT\")\n fs$rename(from, to, CB);\n else\n cb(er)\n })\n }, backoff)\n if (backoff < 100)\n backoff += 10;\n return;\n }\n if (cb) cb(er)\n })\n }})(fs.rename)\n }\n\n // if read() returns EAGAIN, then just try it again.\n fs.read = (function (fs$read) {\n function read (fd, buffer, offset, length, position, callback_) {\n var callback\n if (callback_ && typeof callback_ === 'function') {\n var eagCounter = 0\n callback = function (er, _, __) {\n if (er && er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n callback_.apply(this, arguments)\n }\n }\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n\n // This ensures `util.promisify` works as it does for native `fs.read`.\n read.__proto__ = fs$read\n return read\n })(fs.read)\n\n fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {\n var eagCounter = 0\n while (true) {\n try {\n return fs$readSync.call(fs, fd, buffer, offset, length, position)\n } catch (er) {\n if (er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n continue\n }\n throw er\n }\n }\n }})(fs.readSync)\n\n function patchLchmod (fs) {\n fs.lchmod = function (path, mode, callback) {\n fs.open( path\n , constants.O_WRONLY | constants.O_SYMLINK\n , mode\n , function (err, fd) {\n if (err) {\n if (callback) callback(err)\n return\n }\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n fs.fchmod(fd, mode, function (err) {\n fs.close(fd, function(err2) {\n if (callback) callback(err || err2)\n })\n })\n })\n }\n\n fs.lchmodSync = function (path, mode) {\n var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)\n\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n var threw = true\n var ret\n try {\n ret = fs.fchmodSync(fd, mode)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n }\n\n function patchLutimes (fs) {\n if (constants.hasOwnProperty(\"O_SYMLINK\")) {\n fs.lutimes = function (path, at, mt, cb) {\n fs.open(path, constants.O_SYMLINK, function (er, fd) {\n if (er) {\n if (cb) cb(er)\n return\n }\n fs.futimes(fd, at, mt, function (er) {\n fs.close(fd, function (er2) {\n if (cb) cb(er || er2)\n })\n })\n })\n }\n\n fs.lutimesSync = function (path, at, mt) {\n var fd = fs.openSync(path, constants.O_SYMLINK)\n var ret\n var threw = true\n try {\n ret = fs.futimesSync(fd, at, mt)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n\n } else {\n fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }\n fs.lutimesSync = function () {}\n }\n }\n\n function chmodFix (orig) {\n if (!orig) return orig\n return function (target, mode, cb) {\n return orig.call(fs, target, mode, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chmodFixSync (orig) {\n if (!orig) return orig\n return function (target, mode) {\n try {\n return orig.call(fs, target, mode)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n\n function chownFix (orig) {\n if (!orig) return orig\n return function (target, uid, gid, cb) {\n return orig.call(fs, target, uid, gid, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n }\n\n function chownFixSync (orig) {\n if (!orig) return orig\n return function (target, uid, gid) {\n try {\n return orig.call(fs, target, uid, gid)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n }\n\n function statFix (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n function callback (er, stats) {\n if (stats) {\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n }\n if (cb) cb.apply(this, arguments)\n }\n return options ? orig.call(fs, target, options, callback)\n : orig.call(fs, target, callback)\n }\n }\n\n function statFixSync (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, options) {\n var stats = options ? orig.call(fs, target, options)\n : orig.call(fs, target)\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n return stats;\n }\n }\n\n // ENOSYS means that the fs doesn't support the op. Just ignore\n // that, because it doesn't matter.\n //\n // if there's no getuid, or if getuid() is something other\n // than 0, and the error is EINVAL or EPERM, then just ignore\n // it.\n //\n // This specific case is a silent failure in cp, install, tar,\n // and most other unix tools that manage permissions.\n //\n // When running as root, or if other types of errors are\n // encountered, then it's strict.\n function chownErOk (er) {\n if (!er)\n return true\n\n if (er.code === \"ENOSYS\")\n return true\n\n var nonroot = !process.getuid || process.getuid() !== 0\n if (nonroot) {\n if (er.code === \"EINVAL\" || er.code === \"EPERM\")\n return true\n }\n\n return false\n }\n}\n","var wrappy = require('wrappy')\nvar reqs = Object.create(null)\nvar once = require('once')\n\nmodule.exports = wrappy(inflight)\n\nfunction inflight (key, cb) {\n if (reqs[key]) {\n reqs[key].push(cb)\n return null\n } else {\n reqs[key] = [cb]\n return makeres(key)\n }\n}\n\nfunction makeres (key) {\n return once(function RES () {\n var cbs = reqs[key]\n var len = cbs.length\n var args = slice(arguments)\n\n // XXX It's somewhat ambiguous whether a new callback added in this\n // pass should be queued for later execution if something in the\n // list of callbacks throws, or if it should just be discarded.\n // However, it's such an edge case that it hardly matters, and either\n // choice is likely as surprising as the other.\n // As it happens, we do go ahead and schedule it for later execution.\n try {\n for (var i = 0; i < len; i++) {\n cbs[i].apply(null, args)\n }\n } finally {\n if (cbs.length > len) {\n // added more in the interim.\n // de-zalgo, just in case, but don't call again.\n cbs.splice(0, len)\n process.nextTick(function () {\n RES.apply(null, args)\n })\n } else {\n delete reqs[key]\n }\n }\n })\n}\n\nfunction slice (args) {\n var length = args.length\n var array = []\n\n for (var i = 0; i < length; i++) array[i] = args[i]\n return array\n}\n","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","var toString = {}.toString;\n\nmodule.exports = Array.isArray || function (arr) {\n return toString.call(arr) == '[object Array]';\n};\n","'use strict'\n\nvar listenerCount = require('events').listenerCount\n// listenerCount isn't in node 0.10, so here's a basic polyfill\nlistenerCount = listenerCount || function (ee, event) {\n var listeners = ee && ee._events && ee._events[event]\n if (Array.isArray(listeners)) {\n return listeners.length\n } else if (typeof listeners === 'function') {\n return 1\n } else {\n return 0\n }\n}\n\nmodule.exports = listenerCount\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = { sep: '/' }\ntry {\n path = require('path')\n} catch (er) {}\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n a = a || {}\n b = b || {}\n var t = {}\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return minimatch\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig.minimatch(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return Minimatch\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n // \"\" only matches \"\"\n if (pattern.trim() === '') return p === ''\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n // don't do it more than once.\n if (this._made) return\n\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = console.error\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n if (typeof pattern === 'undefined') {\n throw new TypeError('undefined pattern')\n }\n\n if (options.nobrace ||\n !pattern.match(/\\{.*\\}/)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n if (pattern.length > 1024 * 64) {\n throw new TypeError('pattern is too long')\n }\n\n var options = this.options\n\n // shortcuts\n if (!options.noglobstar && pattern === '**') return GLOBSTAR\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n case '/':\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)\n re += pl.close\n if (pl.type === '!') {\n negativeLists.push(pl)\n }\n pl.reEnd = re.length\n continue\n\n case '|':\n if (inClass || !patternListStack.length || escaping) {\n re += '\\\\|'\n escaping = false\n continue\n }\n\n clearStateChar()\n re += '|'\n continue\n\n // these are mostly the same in regexp and glob\n case '[':\n // swallow any state-tracking char before the [\n clearStateChar()\n\n if (inClass) {\n re += '\\\\' + c\n continue\n }\n\n inClass = true\n classStart = i\n reClassStart = re.length\n re += c\n continue\n\n case ']':\n // a right bracket shall lose its special\n // meaning and represent itself in\n // a bracket expression if it occurs\n // first in the list. -- POSIX.2 2.8.3.2\n if (i === classStart + 1 || !inClass) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n // handle the case where we left a class open.\n // \"[z-a]\" is valid, equivalent to \"\\[z-a\\]\"\n if (inClass) {\n // split where the last [ was, make sure we don't have\n // an invalid re. if so, re-walk the contents of the\n // would-be class to re-translate any characters that\n // were passed through as-is\n // TODO: It would probably be faster to determine this\n // without a try/catch and a new RegExp, but it's tricky\n // to do safely. For now, this is safe and works.\n var cs = pattern.substring(classStart + 1, i)\n try {\n RegExp('[' + cs + ']')\n } catch (er) {\n // not a valid class!\n var sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0] + '\\\\]'\n hasMagic = hasMagic || sp[1]\n inClass = false\n continue\n }\n }\n\n // finish up the class.\n hasMagic = true\n inClass = false\n re += c\n continue\n\n default:\n // swallow any state char that wasn't consumed\n clearStateChar()\n\n if (escaping) {\n // no need\n escaping = false\n } else if (reSpecials[c]\n && !(c === '^' && inClass)) {\n re += '\\\\'\n }\n\n re += c\n\n } // switch\n } // for\n\n // handle the case where we left a class open.\n // \"[abc\" is valid, equivalent to \"\\[abc\"\n if (inClass) {\n // split where the last [ was, and escape it\n // this is a huge pita. We now have to re-walk\n // the contents of the would-be class to re-translate\n // any characters that were passed through as-is\n cs = pattern.substr(classStart + 1)\n sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0]\n hasMagic = hasMagic || sp[1]\n }\n\n // handle the case where we had a +( thing at the *end*\n // of the pattern.\n // each pattern list stack adds 3 chars, and we need to go through\n // and escape any | chars that were passed through as-is for the regexp.\n // Go through and escape them, taking care not to double-escape any\n // | chars that were already escaped.\n for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n var tail = re.slice(pl.reStart + pl.open.length)\n this.debug('setting tail', re, pl)\n // maybe some even number of \\, then maybe 1 \\, followed by a |\n tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, function (_, $1, $2) {\n if (!$2) {\n // the | isn't already escaped, so escape it.\n $2 = '\\\\'\n }\n\n // need to escape all those slashes *again*, without escaping the\n // one that we need for escaping the | character. As it works out,\n // escaping an even number of slashes can be done by simply repeating\n // it exactly after itself. That's why this trick works.\n //\n // I am sorry that you have to see this.\n return $1 + $1 + $2 + '|'\n })\n\n this.debug('tail=%j\\n %s', tail, tail, pl, re)\n var t = pl.type === '*' ? star\n : pl.type === '?' ? qmark\n : '\\\\' + pl.type\n\n hasMagic = true\n re = re.slice(0, pl.reStart) + t + '\\\\(' + tail\n }\n\n // handle trailing things that only matter at the very end.\n clearStateChar()\n if (escaping) {\n // trailing \\\\\n re += '\\\\\\\\'\n }\n\n // only need to apply the nodot start if the re starts with\n // something that could conceivably capture a dot\n var addPatternStart = false\n switch (re.charAt(0)) {\n case '.':\n case '[':\n case '(': addPatternStart = true\n }\n\n // Hack to work around lack of negative lookbehind in JS\n // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n // like 'a.xyz.yz' doesn't match. So, the first negative\n // lookahead, has to look ALL the way ahead, to the end of\n // the pattern.\n for (var n = negativeLists.length - 1; n > -1; n--) {\n var nl = negativeLists[n]\n\n var nlBefore = re.slice(0, nl.reStart)\n var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)\n var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)\n var nlAfter = re.slice(nl.reEnd)\n\n nlLast += nlAfter\n\n // Handle nested stuff like *(*.js|!(*.json)), where open parens\n // mean that we should *not* include the ) in the bit that is considered\n // \"after\" the negated section.\n var openParensBefore = nlBefore.split('(').length - 1\n var cleanAfter = nlAfter\n for (i = 0; i < openParensBefore; i++) {\n cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '')\n }\n nlAfter = cleanAfter\n\n var dollar = ''\n if (nlAfter === '' && isSub !== SUBPARSE) {\n dollar = '$'\n }\n var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast\n re = newRe\n }\n\n // if the re is not \"\" at this point, then we need to make sure\n // it doesn't match against an empty path part.\n // Otherwise a/* will match a/, which it should not.\n if (re !== '' && hasMagic) {\n re = '(?=.)' + re\n }\n\n if (addPatternStart) {\n re = patternStart + re\n }\n\n // parsing just a piece of a larger pattern.\n if (isSub === SUBPARSE) {\n return [re, hasMagic]\n }\n\n // skip the regexp for non-magical patterns\n // unescape anything in it, though, so that it'll be\n // an exact match against a file etc.\n if (!hasMagic) {\n return globUnescape(pattern)\n }\n\n var flags = options.nocase ? 'i' : ''\n try {\n var regExp = new RegExp('^' + re + '$', flags)\n } catch (er) {\n // If it was an invalid regular expression, then it can't match\n // anything. This trick looks for a character after the end of\n // the string, which is of course impossible, except in multi-line\n // mode, but it's not a /m regex.\n return new RegExp('$.')\n }\n\n regExp._glob = pattern\n regExp._src = re\n\n return regExp\n}\n\nminimatch.makeRe = function (pattern, options) {\n return new Minimatch(pattern, options || {}).makeRe()\n}\n\nMinimatch.prototype.makeRe = makeRe\nfunction makeRe () {\n if (this.regexp || this.regexp === false) return this.regexp\n\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n var set = this.set\n\n if (!set.length) {\n this.regexp = false\n return this.regexp\n }\n var options = this.options\n\n var twoStar = options.noglobstar ? star\n : options.dot ? twoStarDot\n : twoStarNoDot\n var flags = options.nocase ? 'i' : ''\n\n var re = set.map(function (pattern) {\n return pattern.map(function (p) {\n return (p === GLOBSTAR) ? twoStar\n : (typeof p === 'string') ? regExpEscape(p)\n : p._src\n }).join('\\\\\\/')\n }).join('|')\n\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^(?:' + re + ')$'\n\n // can match anything, as long as it's not this.\n if (this.negate) re = '^(?!' + re + ').*$'\n\n try {\n this.regexp = new RegExp(re, flags)\n } catch (ex) {\n this.regexp = false\n }\n return this.regexp\n}\n\nminimatch.match = function (list, pattern, options) {\n options = options || {}\n var mm = new Minimatch(pattern, options)\n list = list.filter(function (f) {\n return mm.match(f)\n })\n if (mm.options.nonull && !list.length) {\n list.push(pattern)\n }\n return list\n}\n\nMinimatch.prototype.match = match\nfunction match (f, partial) {\n this.debug('match', f, this.pattern)\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) return false\n if (this.empty) return f === ''\n\n if (f === '/' && partial) return true\n\n var options = this.options\n\n // windows: need to use /, not \\\n if (path.sep !== '/') {\n f = f.split(path.sep).join('/')\n }\n\n // treat the test path as a set of pathparts.\n f = f.split(slashSplit)\n this.debug(this.pattern, 'split', f)\n\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n\n var set = this.set\n this.debug(this.pattern, 'set', set)\n\n // Find the basename of the path by looking for the last non-empty segment\n var filename\n var i\n for (i = f.length - 1; i >= 0; i--) {\n filename = f[i]\n if (filename) break\n }\n\n for (i = 0; i < set.length; i++) {\n var pattern = set[i]\n var file = f\n if (options.matchBase && pattern.length === 1) {\n file = [filename]\n }\n var hit = this.matchOne(file, pattern, partial)\n if (hit) {\n if (options.flipNegate) return true\n return !this.negate\n }\n }\n\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) return false\n return this.negate\n}\n\n// set partial to true to test if, for example,\n// \"/a/b\" matches the start of \"/*/b/*/d\"\n// Partial means, if you run out of file before you run\n// out of pattern, then that's fine, as long as all\n// the parts match.\nMinimatch.prototype.matchOne = function (file, pattern, partial) {\n var options = this.options\n\n this.debug('matchOne',\n { 'this': this, file: file, pattern: pattern })\n\n this.debug('matchOne', file.length, pattern.length)\n\n for (var fi = 0,\n pi = 0,\n fl = file.length,\n pl = pattern.length\n ; (fi < fl) && (pi < pl)\n ; fi++, pi++) {\n this.debug('matchOne loop')\n var p = pattern[pi]\n var f = file[fi]\n\n this.debug(pattern, p, f)\n\n // should be impossible.\n // some invalid regexp stuff in the set.\n if (p === false) return false\n\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f])\n\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi\n var pr = pi + 1\n if (pr === pl) {\n this.debug('** at the end')\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' || file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.')) return false\n }\n return true\n }\n\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr]\n\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee)\n // found a match.\n return true\n } else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' || swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr)\n break\n }\n\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue')\n fr++\n }\n }\n\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n // If there's more *pattern* left, then\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n if (fr === fl) return true\n }\n return false\n }\n\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n var hit\n if (typeof p === 'string') {\n if (options.nocase) {\n hit = f.toLowerCase() === p.toLowerCase()\n } else {\n hit = f === p\n }\n this.debug('string match', p, f, hit)\n } else {\n hit = f.match(p)\n this.debug('pattern match', p, f, hit)\n }\n\n if (!hit) return false\n }\n\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true\n } else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial\n } else if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')\n return emptyFileEnd\n }\n\n // should be unreachable.\n throw new Error('wtf?')\n}\n\n// replace stuff like \\* with *\nfunction globUnescape (s) {\n return s.replace(/\\\\(.)/g, '$1')\n}\n\nfunction regExpEscape (s) {\n return s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nfunction posix(path) {\n\treturn path.charAt(0) === '/';\n}\n\nfunction win32(path) {\n\t// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56\n\tvar splitDeviceRe = /^([a-zA-Z]:|[\\\\\\/]{2}[^\\\\\\/]+[\\\\\\/]+[^\\\\\\/]+)?([\\\\\\/])?([\\s\\S]*?)$/;\n\tvar result = splitDeviceRe.exec(path);\n\tvar device = result[1] || '';\n\tvar isUnc = Boolean(device && device.charAt(1) !== ':');\n\n\t// UNC paths are always absolute\n\treturn Boolean(result[2] || isUnc);\n}\n\nmodule.exports = process.platform === 'win32' ? win32 : posix;\nmodule.exports.posix = posix;\nmodule.exports.win32 = win32;\n","'use strict';\n\nif (typeof process === 'undefined' ||\n !process.version ||\n process.version.indexOf('v0.') === 0 ||\n process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {\n module.exports = { nextTick: nextTick };\n} else {\n module.exports = process\n}\n\nfunction nextTick(fn, arg1, arg2, arg3) {\n if (typeof fn !== 'function') {\n throw new TypeError('\"callback\" argument must be a function');\n }\n var len = arguments.length;\n var args, i;\n switch (len) {\n case 0:\n case 1:\n return process.nextTick(fn);\n case 2:\n return process.nextTick(function afterTickOne() {\n fn.call(null, arg1);\n });\n case 3:\n return process.nextTick(function afterTickTwo() {\n fn.call(null, arg1, arg2);\n });\n case 4:\n return process.nextTick(function afterTickThree() {\n fn.call(null, arg1, arg2, arg3);\n });\n default:\n args = new Array(len - 1);\n i = 0;\n while (i < args.length) {\n args[i++] = arguments[i];\n }\n return process.nextTick(function afterTick() {\n fn.apply(null, args);\n });\n }\n}\n\n","/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */\n'use strict';\n\n\nvar Punycode = require('punycode');\n\n\nvar internals = {};\n\n\n//\n// Read rules from file.\n//\ninternals.rules = require('./data/rules.json').map(function (rule) {\n\n return {\n rule: rule,\n suffix: rule.replace(/^(\\*\\.|\\!)/, ''),\n punySuffix: -1,\n wildcard: rule.charAt(0) === '*',\n exception: rule.charAt(0) === '!'\n };\n});\n\n\n//\n// Check is given string ends with `suffix`.\n//\ninternals.endsWith = function (str, suffix) {\n\n return str.indexOf(suffix, str.length - suffix.length) !== -1;\n};\n\n\n//\n// Find rule for a given domain.\n//\ninternals.findRule = function (domain) {\n\n var punyDomain = Punycode.toASCII(domain);\n return internals.rules.reduce(function (memo, rule) {\n\n if (rule.punySuffix === -1){\n rule.punySuffix = Punycode.toASCII(rule.suffix);\n }\n if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) {\n return memo;\n }\n // This has been commented out as it never seems to run. This is because\n // sub tlds always appear after their parents and we never find a shorter\n // match.\n //if (memo) {\n // var memoSuffix = Punycode.toASCII(memo.suffix);\n // if (memoSuffix.length >= punySuffix.length) {\n // return memo;\n // }\n //}\n return rule;\n }, null);\n};\n\n\n//\n// Error codes and messages.\n//\nexports.errorCodes = {\n DOMAIN_TOO_SHORT: 'Domain name too short.',\n DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.',\n LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.',\n LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.',\n LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.',\n LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.',\n LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.'\n};\n\n\n//\n// Validate domain name and throw if not valid.\n//\n// From wikipedia:\n//\n// Hostnames are composed of series of labels concatenated with dots, as are all\n// domain names. Each label must be between 1 and 63 characters long, and the\n// entire hostname (including the delimiting dots) has a maximum of 255 chars.\n//\n// Allowed chars:\n//\n// * `a-z`\n// * `0-9`\n// * `-` but not as a starting or ending character\n// * `.` as a separator for the textual portions of a domain name\n//\n// * http://en.wikipedia.org/wiki/Domain_name\n// * http://en.wikipedia.org/wiki/Hostname\n//\ninternals.validate = function (input) {\n\n // Before we can validate we need to take care of IDNs with unicode chars.\n var ascii = Punycode.toASCII(input);\n\n if (ascii.length < 1) {\n return 'DOMAIN_TOO_SHORT';\n }\n if (ascii.length > 255) {\n return 'DOMAIN_TOO_LONG';\n }\n\n // Check each part's length and allowed chars.\n var labels = ascii.split('.');\n var label;\n\n for (var i = 0; i < labels.length; ++i) {\n label = labels[i];\n if (!label.length) {\n return 'LABEL_TOO_SHORT';\n }\n if (label.length > 63) {\n return 'LABEL_TOO_LONG';\n }\n if (label.charAt(0) === '-') {\n return 'LABEL_STARTS_WITH_DASH';\n }\n if (label.charAt(label.length - 1) === '-') {\n return 'LABEL_ENDS_WITH_DASH';\n }\n if (!/^[a-z0-9\\-]+$/.test(label)) {\n return 'LABEL_INVALID_CHARS';\n }\n }\n};\n\n\n//\n// Public API\n//\n\n\n//\n// Parse domain.\n//\nexports.parse = function (input) {\n\n if (typeof input !== 'string') {\n throw new TypeError('Domain name must be a string.');\n }\n\n // Force domain to lowercase.\n var domain = input.slice(0).toLowerCase();\n\n // Handle FQDN.\n // TODO: Simply remove trailing dot?\n if (domain.charAt(domain.length - 1) === '.') {\n domain = domain.slice(0, domain.length - 1);\n }\n\n // Validate and sanitise input.\n var error = internals.validate(domain);\n if (error) {\n return {\n input: input,\n error: {\n message: exports.errorCodes[error],\n code: error\n }\n };\n }\n\n var parsed = {\n input: input,\n tld: null,\n sld: null,\n domain: null,\n subdomain: null,\n listed: false\n };\n\n var domainParts = domain.split('.');\n\n // Non-Internet TLD\n if (domainParts[domainParts.length - 1] === 'local') {\n return parsed;\n }\n\n var handlePunycode = function () {\n\n if (!/xn--/.test(domain)) {\n return parsed;\n }\n if (parsed.domain) {\n parsed.domain = Punycode.toASCII(parsed.domain);\n }\n if (parsed.subdomain) {\n parsed.subdomain = Punycode.toASCII(parsed.subdomain);\n }\n return parsed;\n };\n\n var rule = internals.findRule(domain);\n\n // Unlisted tld.\n if (!rule) {\n if (domainParts.length < 2) {\n return parsed;\n }\n parsed.tld = domainParts.pop();\n parsed.sld = domainParts.pop();\n parsed.domain = [parsed.sld, parsed.tld].join('.');\n if (domainParts.length) {\n parsed.subdomain = domainParts.pop();\n }\n return handlePunycode();\n }\n\n // At this point we know the public suffix is listed.\n parsed.listed = true;\n\n var tldParts = rule.suffix.split('.');\n var privateParts = domainParts.slice(0, domainParts.length - tldParts.length);\n\n if (rule.exception) {\n privateParts.push(tldParts.shift());\n }\n\n parsed.tld = tldParts.join('.');\n\n if (!privateParts.length) {\n return handlePunycode();\n }\n\n if (rule.wildcard) {\n tldParts.unshift(privateParts.pop());\n parsed.tld = tldParts.join('.');\n }\n\n if (!privateParts.length) {\n return handlePunycode();\n }\n\n parsed.sld = privateParts.pop();\n parsed.domain = [parsed.sld, parsed.tld].join('.');\n\n if (privateParts.length) {\n parsed.subdomain = privateParts.join('.');\n }\n\n return handlePunycode();\n};\n\n\n//\n// Get domain.\n//\nexports.get = function (domain) {\n\n if (!domain) {\n return null;\n }\n return exports.parse(domain).domain || null;\n};\n\n\n//\n// Check whether domain belongs to a known public suffix.\n//\nexports.isValid = function (domain) {\n\n var parsed = exports.parse(domain);\n return Boolean(parsed.domain && parsed.listed);\n};\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n'use strict';\n\n/**/\n\nvar pna = require('process-nextick-args');\n/**/\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) {\n keys.push(key);\n }return keys;\n};\n/**/\n\nmodule.exports = Duplex;\n\n/**/\nvar util = Object.create(require('core-util-is'));\nutil.inherits = require('inherits');\n/**/\n\nvar Readable = require('./_stream_readable');\nvar Writable = require('./_stream_writable');\n\nutil.inherits(Duplex, Readable);\n\n{\n // avoid scope creep, the keys array can then be collected\n var keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n var method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\n\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n\n Readable.call(this, options);\n Writable.call(this, options);\n\n if (options && options.readable === false) this.readable = false;\n\n if (options && options.writable === false) this.writable = false;\n\n this.allowHalfOpen = true;\n if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;\n\n this.once('end', onend);\n}\n\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._writableState.highWaterMark;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // if we allow half-open state, or if the writable side ended,\n // then we're ok.\n if (this.allowHalfOpen || this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n pna.nextTick(onEndNT, this);\n}\n\nfunction onEndNT(self) {\n self.end();\n}\n\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n get: function () {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});\n\nDuplex.prototype._destroy = function (err, cb) {\n this.push(null);\n this.end();\n\n pna.nextTick(cb, err);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n'use strict';\n\nmodule.exports = PassThrough;\n\nvar Transform = require('./_stream_transform');\n\n/**/\nvar util = Object.create(require('core-util-is'));\nutil.inherits = require('inherits');\n/**/\n\nutil.inherits(PassThrough, Transform);\n\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n\n Transform.call(this, options);\n}\n\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar pna = require('process-nextick-args');\n/**/\n\nmodule.exports = Readable;\n\n/**/\nvar isArray = require('isarray');\n/**/\n\n/**/\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n\n/**/\nvar EE = require('events').EventEmitter;\n\nvar EElistenerCount = function (emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\nvar OurUint8Array = global.Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\n\n/**/\nvar util = Object.create(require('core-util-is'));\nutil.inherits = require('inherits');\n/**/\n\n/**/\nvar debugUtil = require('util');\nvar debug = void 0;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function () {};\n}\n/**/\n\nvar BufferList = require('./internal/streams/BufferList');\nvar destroyImpl = require('./internal/streams/destroy');\nvar StringDecoder;\n\nutil.inherits(Readable, Stream);\n\nvar kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\n\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\n\nfunction ReadableState(options, stream) {\n Duplex = Duplex || require('./_stream_duplex');\n\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n var isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n var hwm = options.highWaterMark;\n var readableHwm = options.readableHighWaterMark;\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\n\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;\n\n // cast to ints.\n this.highWaterMark = Math.floor(this.highWaterMark);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\n\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n if (!(this instanceof Readable)) return new Readable(options);\n\n this._readableState = new ReadableState(options, this);\n\n // legacy\n this.readable = true;\n\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n\n Stream.call(this);\n}\n\nObject.defineProperty(Readable.prototype, 'destroyed', {\n get: function () {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\n\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n this.push(null);\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\n\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n stream.emit('error', er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (addToFront) {\n if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n stream.emit('error', new Error('stream.push() after EOF'));\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n }\n }\n\n return needMoreData(state);\n}\n\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n stream.emit('data', chunk);\n stream.read(0);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\n\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new TypeError('Invalid non-string/buffer chunk');\n }\n return er;\n}\n\n// if it's past the high water mark, we can push in some more.\n// Also, if we have no data yet, we can stand some\n// more bytes. This is to work around cases where hwm=0,\n// such as the repl. Also, if the push() triggered a\n// readable event, and the user called read(largeNumber) such that\n// needReadable was set, then we ought to push more, so that another\n// 'readable' event will be triggered.\nfunction needMoreData(state) {\n return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);\n}\n\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this._readableState.decoder = new StringDecoder(enc);\n this._readableState.encoding = enc;\n return this;\n};\n\n// Don't raise the hwm > 8MB\nvar MAX_HWM = 0x800000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n\n if (ret === null) {\n state.needReadable = true;\n n = 0;\n } else {\n state.length -= n;\n }\n\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n\n if (ret !== null) this.emit('data', ret);\n\n return ret;\n};\n\nfunction onEofChunk(stream, state) {\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n\n // emit 'readable' now to make sure it gets picked up.\n emitReadable(stream);\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);\n }\n}\n\nfunction emitReadable_(stream) {\n debug('emit readable');\n stream.emit('readable');\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n pna.nextTick(maybeReadMore_, stream, state);\n }\n}\n\nfunction maybeReadMore_(stream, state) {\n var len = state.length;\n while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;else len = state.length;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n this.emit('error', new Error('_read() is not implemented'));\n};\n\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);\n\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n\n // If the user pushes more data while we're writing to dest then we'll end up\n // in ondata again. However, we only want to increase awaitDrain once because\n // dest will only emit one 'drain' event for the multiple writes.\n // => Introduce a guard on increasing awaitDrain.\n var increasedAwaitDrain = false;\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n increasedAwaitDrain = false;\n var ret = dest.write(chunk);\n if (false === ret && !increasedAwaitDrain) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', src._readableState.awaitDrain);\n src._readableState.awaitDrain++;\n increasedAwaitDrain = true;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n\n return dest;\n};\n\nfunction pipeOnDrain(src) {\n return function () {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\n\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = { hasUnpiped: false };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n\n for (var i = 0; i < len; i++) {\n dests[i].emit('unpipe', this, unpipeInfo);\n }return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n\n dest.emit('unpipe', this, unpipeInfo);\n\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n var res = Stream.prototype.on.call(this, ev, fn);\n\n if (ev === 'data') {\n // Start flowing on next tick if stream isn't explicitly paused\n if (this._readableState.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n var state = this._readableState;\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.emittedReadable = false;\n if (!state.reading) {\n pna.nextTick(nReadingNextTick, this);\n } else if (state.length) {\n emitReadable(this);\n }\n }\n }\n\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\n\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n state.flowing = true;\n resume(this, state);\n }\n return this;\n};\n\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n pna.nextTick(resume_, stream, state);\n }\n}\n\nfunction resume_(stream, state) {\n if (!state.reading) {\n debug('resume read 0');\n stream.read(0);\n }\n\n state.resumeScheduled = false;\n state.awaitDrain = 0;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\n\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (false !== this._readableState.flowing) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n return this;\n};\n\nfunction flow(stream) {\n var state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null) {}\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var _this = this;\n\n var state = this._readableState;\n var paused = false;\n\n stream.on('end', function () {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) _this.push(chunk);\n }\n\n _this.push(null);\n });\n\n stream.on('data', function (chunk) {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n\n var ret = _this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function (method) {\n return function () {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = function (n) {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n\n return this;\n};\n\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._readableState.highWaterMark;\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = fromListPartial(n, state.buffer, state.decoder);\n }\n\n return ret;\n}\n\n// Extracts only enough buffered data to satisfy the amount requested.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromListPartial(n, list, hasStrings) {\n var ret;\n if (n < list.head.data.length) {\n // slice is the same for buffers and strings\n ret = list.head.data.slice(0, n);\n list.head.data = list.head.data.slice(n);\n } else if (n === list.head.data.length) {\n // first chunk is a perfect match\n ret = list.shift();\n } else {\n // result spans more than one buffer\n ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);\n }\n return ret;\n}\n\n// Copies a specified amount of characters from the list of buffered data\n// chunks.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction copyFromBufferString(n, list) {\n var p = list.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n var str = p.data;\n var nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) list.head = p.next;else list.head = list.tail = null;\n } else {\n list.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n list.length -= c;\n return ret;\n}\n\n// Copies a specified amount of bytes from the list of buffered data chunks.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction copyFromBuffer(n, list) {\n var ret = Buffer.allocUnsafe(n);\n var p = list.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n var buf = p.data;\n var nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) list.head = p.next;else list.head = list.tail = null;\n } else {\n list.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n list.length -= c;\n return ret;\n}\n\nfunction endReadable(stream) {\n var state = stream._readableState;\n\n // If we get here before consuming all the bytes, then that is a\n // bug in node. Should never happen.\n if (state.length > 0) throw new Error('\"endReadable()\" called on non-empty stream');\n\n if (!state.endEmitted) {\n state.ended = true;\n pna.nextTick(endReadableNT, state, stream);\n }\n}\n\nfunction endReadableNT(state, stream) {\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n }\n}\n\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n'use strict';\n\nmodule.exports = Transform;\n\nvar Duplex = require('./_stream_duplex');\n\n/**/\nvar util = Object.create(require('core-util-is'));\nutil.inherits = require('inherits');\n/**/\n\nutil.inherits(Transform, Duplex);\n\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n\n var cb = ts.writecb;\n\n if (!cb) {\n return this.emit('error', new Error('write callback called multiple times'));\n }\n\n ts.writechunk = null;\n ts.writecb = null;\n\n if (data != null) // single equals check for both `null` and `undefined`\n this.push(data);\n\n cb(er);\n\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\n\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n\n Duplex.call(this, options);\n\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\n\nfunction prefinish() {\n var _this = this;\n\n if (typeof this._flush === 'function') {\n this._flush(function (er, data) {\n done(_this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\n\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n throw new Error('_transform() is not implemented');\n};\n\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n\n if (ts.writechunk !== null && ts.writecb && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\n\nTransform.prototype._destroy = function (err, cb) {\n var _this2 = this;\n\n Duplex.prototype._destroy.call(this, err, function (err2) {\n cb(err2);\n _this2.emit('close');\n });\n};\n\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n\n if (data != null) // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');\n\n if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');\n\n return stream.push(null);\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n'use strict';\n\n/**/\n\nvar pna = require('process-nextick-args');\n/**/\n\nmodule.exports = Writable;\n\n/* */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n var _this = this;\n\n this.next = null;\n this.entry = null;\n this.finish = function () {\n onCorkedFinish(_this, state);\n };\n}\n/* */\n\n/**/\nvar asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;\n/**/\n\n/**/\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n\n/**/\nvar util = Object.create(require('core-util-is'));\nutil.inherits = require('inherits');\n/**/\n\n/**/\nvar internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\nvar OurUint8Array = global.Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\n\nvar destroyImpl = require('./internal/streams/destroy');\n\nutil.inherits(Writable, Stream);\n\nfunction nop() {}\n\nfunction WritableState(options, stream) {\n Duplex = Duplex || require('./_stream_duplex');\n\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n var isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n var hwm = options.highWaterMark;\n var writableHwm = options.writableHighWaterMark;\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\n\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;\n\n // cast to ints.\n this.highWaterMark = Math.floor(this.highWaterMark);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\n\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function () {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function (object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function (object) {\n return object instanceof this;\n };\n}\n\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {\n return new Writable(options);\n }\n\n this._writableState = new WritableState(options, this);\n\n // legacy.\n this.writable = true;\n\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n\n if (typeof options.writev === 'function') this._writev = options.writev;\n\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n\n if (typeof options.final === 'function') this._final = options.final;\n }\n\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n this.emit('error', new Error('Cannot pipe, not readable'));\n};\n\nfunction writeAfterEnd(stream, cb) {\n var er = new Error('write after end');\n // TODO: defer error events consistently everywhere, not just the cb\n stream.emit('error', er);\n pna.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var valid = true;\n var er = false;\n\n if (chunk === null) {\n er = new TypeError('May not write null values to stream');\n } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new TypeError('Invalid non-string/buffer chunk');\n }\n if (er) {\n stream.emit('error', er);\n pna.nextTick(cb, er);\n valid = false;\n }\n return valid;\n}\n\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n\n if (typeof cb !== 'function') cb = nop;\n\n if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n\n return ret;\n};\n\nWritable.prototype.cork = function () {\n var state = this._writableState;\n\n state.corked++;\n};\n\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n\n if (state.corked) {\n state.corked--;\n\n if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\n\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\n\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\n\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n\n state.length += len;\n\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk: chunk,\n encoding: encoding,\n isBuf: isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n\n return ret;\n}\n\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\n\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n pna.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n pna.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n stream.emit('error', er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n stream.emit('error', er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\n\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\n\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n\n onwriteStateUpdate(state);\n\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state);\n\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n\n if (sync) {\n /**/\n asyncWrite(afterWrite, stream, state, finished, cb);\n /**/\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\n\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n\n if (entry === null) state.lastBufferedRequest = null;\n }\n\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\n\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new Error('_write() is not implemented'));\n};\n\nWritable.prototype._writev = null;\n\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending && !state.finished) endWritable(this, state, cb);\n};\n\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(function (err) {\n state.pendingcb--;\n if (err) {\n stream.emit('error', err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function') {\n state.pendingcb++;\n state.finalCalled = true;\n pna.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\n\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n }\n }\n return need;\n}\n\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\n\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n if (state.corkedRequestsFree) {\n state.corkedRequestsFree.next = corkReq;\n } else {\n state.corkedRequestsFree = corkReq;\n }\n}\n\nObject.defineProperty(Writable.prototype, 'destroyed', {\n get: function () {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\n\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n this.end();\n cb(err);\n};","'use strict';\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nvar Buffer = require('safe-buffer').Buffer;\nvar util = require('util');\n\nfunction copyBuffer(src, target, offset) {\n src.copy(target, offset);\n}\n\nmodule.exports = function () {\n function BufferList() {\n _classCallCheck(this, BufferList);\n\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n\n BufferList.prototype.push = function push(v) {\n var entry = { data: v, next: null };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n };\n\n BufferList.prototype.unshift = function unshift(v) {\n var entry = { data: v, next: this.head };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n };\n\n BufferList.prototype.shift = function shift() {\n if (this.length === 0) return;\n var ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n };\n\n BufferList.prototype.clear = function clear() {\n this.head = this.tail = null;\n this.length = 0;\n };\n\n BufferList.prototype.join = function join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) {\n ret += s + p.data;\n }return ret;\n };\n\n BufferList.prototype.concat = function concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n if (this.length === 1) return this.head.data;\n var ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n };\n\n return BufferList;\n}();\n\nif (util && util.inspect && util.inspect.custom) {\n module.exports.prototype[util.inspect.custom] = function () {\n var obj = util.inspect({ length: this.length });\n return this.constructor.name + ' ' + obj;\n };\n}","'use strict';\n\n/**/\n\nvar pna = require('process-nextick-args');\n/**/\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n var _this = this;\n\n var readableDestroyed = this._readableState && this._readableState.destroyed;\n var writableDestroyed = this._writableState && this._writableState.destroyed;\n\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {\n pna.nextTick(emitErrorNT, this, err);\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n\n this._destroy(err || null, function (err) {\n if (!cb && err) {\n pna.nextTick(emitErrorNT, _this, err);\n if (_this._writableState) {\n _this._writableState.errorEmitted = true;\n }\n } else if (cb) {\n cb(err);\n }\n });\n\n return this;\n}\n\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\n\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\n\nmodule.exports = {\n destroy: destroy,\n undestroy: undestroy\n};","module.exports = require('stream');\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream;\n exports = module.exports = Stream.Readable;\n exports.Readable = Stream.Readable;\n exports.Writable = Stream.Writable;\n exports.Duplex = Stream.Duplex;\n exports.Transform = Stream.Transform;\n exports.PassThrough = Stream.PassThrough;\n exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n}\n","/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //