Update node_modules

This commit is contained in:
crazy-max 2020-01-23 08:51:44 +00:00
parent ff30bfb8f9
commit 8ef82d6aea
9910 changed files with 638 additions and 947336 deletions

1
node_modules/.bin/acorn generated vendored
View File

@ -1 +0,0 @@
../acorn/bin/acorn

1
node_modules/.bin/atob generated vendored
View File

@ -1 +0,0 @@
../atob/bin/atob.js

1
node_modules/.bin/escodegen generated vendored
View File

@ -1 +0,0 @@
../escodegen/bin/escodegen.js

1
node_modules/.bin/esgenerate generated vendored
View File

@ -1 +0,0 @@
../escodegen/bin/esgenerate.js

1
node_modules/.bin/esparse generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esparse.js

1
node_modules/.bin/esvalidate generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esvalidate.js

1
node_modules/.bin/is-ci generated vendored
View File

@ -1 +0,0 @@
../is-ci/bin.js

1
node_modules/.bin/jest-runtime generated vendored
View File

@ -1 +0,0 @@
../jest-runtime/bin/jest-runtime.js

1
node_modules/.bin/js-yaml generated vendored
View File

@ -1 +0,0 @@
../js-yaml/bin/js-yaml.js

1
node_modules/.bin/jsesc generated vendored
View File

@ -1 +0,0 @@
../jsesc/bin/jsesc

1
node_modules/.bin/json5 generated vendored
View File

@ -1 +0,0 @@
../json5/lib/cli.js

1
node_modules/.bin/mkdirp generated vendored
View File

@ -1 +0,0 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/parser generated vendored
View File

@ -1 +0,0 @@
../@babel/parser/bin/babel-parser.js

1
node_modules/.bin/sane generated vendored
View File

@ -1 +0,0 @@
../sane/src/cli.js

1
node_modules/.bin/sshpk-conv generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-conv

1
node_modules/.bin/sshpk-sign generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-sign

1
node_modules/.bin/sshpk-verify generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-verify

1
node_modules/.bin/watch generated vendored
View File

@ -1 +0,0 @@
../@cnakazawa/watch/cli.js

1
node_modules/.bin/which generated vendored
View File

@ -1 +0,0 @@
../which/bin/which

View File

@ -1,22 +0,0 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,19 +0,0 @@
# @babel/code-frame
> Generate errors that contain a code frame that point to source locations.
See our website [@babel/code-frame](https://babeljs.io/docs/en/next/babel-code-frame.html) for more information.
## Install
Using npm:
```sh
npm install --save-dev @babel/code-frame
```
or using yarn:
```sh
yarn add @babel/code-frame --dev
```

View File

@ -1,167 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.codeFrameColumns = codeFrameColumns;
exports.default = _default;
var _highlight = _interopRequireWildcard(require("@babel/highlight"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
let deprecationWarningShown = false;
function getDefs(chalk) {
return {
gutter: chalk.grey,
marker: chalk.red.bold,
message: chalk.red.bold
};
}
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
function getMarkerLines(loc, source, opts) {
const startLoc = Object.assign({
column: 0,
line: -1
}, loc.start);
const endLoc = Object.assign({}, startLoc, {}, loc.end);
const {
linesAbove = 2,
linesBelow = 3
} = opts || {};
const startLine = startLoc.line;
const startColumn = startLoc.column;
const endLine = endLoc.line;
const endColumn = endLoc.column;
let start = Math.max(startLine - (linesAbove + 1), 0);
let end = Math.min(source.length, endLine + linesBelow);
if (startLine === -1) {
start = 0;
}
if (endLine === -1) {
end = source.length;
}
const lineDiff = endLine - startLine;
const markerLines = {};
if (lineDiff) {
for (let i = 0; i <= lineDiff; i++) {
const lineNumber = i + startLine;
if (!startColumn) {
markerLines[lineNumber] = true;
} else if (i === 0) {
const sourceLength = source[lineNumber - 1].length;
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
} else if (i === lineDiff) {
markerLines[lineNumber] = [0, endColumn];
} else {
const sourceLength = source[lineNumber - i].length;
markerLines[lineNumber] = [0, sourceLength];
}
}
} else {
if (startColumn === endColumn) {
if (startColumn) {
markerLines[startLine] = [startColumn, 0];
} else {
markerLines[startLine] = true;
}
} else {
markerLines[startLine] = [startColumn, endColumn - startColumn];
}
}
return {
start,
end,
markerLines
};
}
function codeFrameColumns(rawLines, loc, opts = {}) {
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts);
const chalk = (0, _highlight.getChalk)(opts);
const defs = getDefs(chalk);
const maybeHighlight = (chalkFn, string) => {
return highlighted ? chalkFn(string) : string;
};
const lines = rawLines.split(NEWLINE);
const {
start,
end,
markerLines
} = getMarkerLines(loc, lines, opts);
const hasColumns = loc.start && typeof loc.start.column === "number";
const numberMaxWidth = String(end).length;
const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines;
let frame = highlightedLines.split(NEWLINE).slice(start, end).map((line, index) => {
const number = start + 1 + index;
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
const gutter = ` ${paddedNumber} | `;
const hasMarker = markerLines[number];
const lastMarkerLine = !markerLines[number + 1];
if (hasMarker) {
let markerLine = "";
if (Array.isArray(hasMarker)) {
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
const numberOfMarkers = hasMarker[1] || 1;
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
if (lastMarkerLine && opts.message) {
markerLine += " " + maybeHighlight(defs.message, opts.message);
}
}
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line, markerLine].join("");
} else {
return ` ${maybeHighlight(defs.gutter, gutter)}${line}`;
}
}).join("\n");
if (opts.message && !hasColumns) {
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
}
if (highlighted) {
return chalk.reset(frame);
} else {
return frame;
}
}
function _default(rawLines, lineNumber, colNumber, opts = {}) {
if (!deprecationWarningShown) {
deprecationWarningShown = true;
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
if (process.emitWarning) {
process.emitWarning(message, "DeprecationWarning");
} else {
const deprecationError = new Error(message);
deprecationError.name = "DeprecationWarning";
console.warn(new Error(message));
}
}
colNumber = Math.max(colNumber, 0);
const location = {
start: {
column: colNumber,
line: lineNumber
}
};
return codeFrameColumns(rawLines, location, opts);
}

View File

@ -1,56 +0,0 @@
{
"_from": "@babel/code-frame@^7.0.0",
"_id": "@babel/code-frame@7.8.3",
"_inBundle": false,
"_integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==",
"_location": "/@babel/code-frame",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@babel/code-frame@^7.0.0",
"name": "@babel/code-frame",
"escapedName": "@babel%2fcode-frame",
"scope": "@babel",
"rawSpec": "^7.0.0",
"saveSpec": null,
"fetchSpec": "^7.0.0"
},
"_requiredBy": [
"/@babel/core",
"/@babel/template",
"/@babel/traverse",
"/jest-message-util"
],
"_resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz",
"_shasum": "33e25903d7481181534e12ec0a25f16b6fcf419e",
"_spec": "@babel/code-frame@^7.0.0",
"_where": "/home/runner/work/ghaction-upx/ghaction-upx/node_modules/jest-message-util",
"author": {
"name": "Sebastian McKenzie",
"email": "sebmck@gmail.com"
},
"bundleDependencies": false,
"dependencies": {
"@babel/highlight": "^7.8.3"
},
"deprecated": false,
"description": "Generate errors that contain a code frame that point to source locations.",
"devDependencies": {
"chalk": "^2.0.0",
"strip-ansi": "^4.0.0"
},
"gitHead": "a7620bd266ae1345975767bbc7abf09034437017",
"homepage": "https://babeljs.io/",
"license": "MIT",
"main": "lib/index.js",
"name": "@babel/code-frame",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "https://github.com/babel/babel/tree/master/packages/babel-code-frame"
},
"version": "7.8.3"
}

22
node_modules/@babel/core/LICENSE generated vendored
View File

@ -1,22 +0,0 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

19
node_modules/@babel/core/README.md generated vendored
View File

@ -1,19 +0,0 @@
# @babel/core
> Babel compiler core.
See our website [@babel/core](https://babeljs.io/docs/en/next/babel-core.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
## Install
Using npm:
```sh
npm install --save-dev @babel/core
```
or using yarn:
```sh
yarn add @babel/core --dev
```

View File

@ -1,324 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.makeWeakCache = makeWeakCache;
exports.makeWeakCacheSync = makeWeakCacheSync;
exports.makeStrongCache = makeStrongCache;
exports.makeStrongCacheSync = makeStrongCacheSync;
exports.assertSimpleType = assertSimpleType;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _async = require("../gensync-utils/async");
var _util = require("./util");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const synchronize = gen => {
return (0, _gensync().default)(gen).sync;
};
function* genTrue(data) {
return true;
}
function makeWeakCache(handler) {
return makeCachedFunction(WeakMap, handler);
}
function makeWeakCacheSync(handler) {
return synchronize(makeWeakCache(handler));
}
function makeStrongCache(handler) {
return makeCachedFunction(Map, handler);
}
function makeStrongCacheSync(handler) {
return synchronize(makeStrongCache(handler));
}
function makeCachedFunction(CallCache, handler) {
const callCacheSync = new CallCache();
const callCacheAsync = new CallCache();
const futureCache = new CallCache();
return function* cachedFunction(arg, data) {
const asyncContext = yield* (0, _async.isAsync)();
const callCache = asyncContext ? callCacheAsync : callCacheSync;
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
if (cached.valid) return cached.value;
const cache = new CacheConfigurator(data);
const handlerResult = handler(arg, cache);
let finishLock;
let value;
if ((0, _util.isIterableIterator)(handlerResult)) {
const gen = handlerResult;
value = yield* (0, _async.onFirstPause)(gen, () => {
finishLock = setupAsyncLocks(cache, futureCache, arg);
});
} else {
value = handlerResult;
}
updateFunctionCache(callCache, cache, arg, value);
if (finishLock) {
futureCache.delete(arg);
finishLock.release(value);
}
return value;
};
}
function* getCachedValue(cache, arg, data) {
const cachedValue = cache.get(arg);
if (cachedValue) {
for (const {
value,
valid
} of cachedValue) {
if (yield* valid(data)) return {
valid: true,
value
};
}
}
return {
valid: false,
value: null
};
}
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
const cached = yield* getCachedValue(callCache, arg, data);
if (cached.valid) {
return cached;
}
if (asyncContext) {
const cached = yield* getCachedValue(futureCache, arg, data);
if (cached.valid) {
const value = yield* (0, _async.waitFor)(cached.value.promise);
return {
valid: true,
value
};
}
}
return {
valid: false,
value: null
};
}
function setupAsyncLocks(config, futureCache, arg) {
const finishLock = new Lock();
updateFunctionCache(futureCache, config, arg, finishLock);
return finishLock;
}
function updateFunctionCache(cache, config, arg, value) {
if (!config.configured()) config.forever();
let cachedValue = cache.get(arg);
config.deactivate();
switch (config.mode()) {
case "forever":
cachedValue = [{
value,
valid: genTrue
}];
cache.set(arg, cachedValue);
break;
case "invalidate":
cachedValue = [{
value,
valid: config.validator()
}];
cache.set(arg, cachedValue);
break;
case "valid":
if (cachedValue) {
cachedValue.push({
value,
valid: config.validator()
});
} else {
cachedValue = [{
value,
valid: config.validator()
}];
cache.set(arg, cachedValue);
}
}
}
class CacheConfigurator {
constructor(data) {
this._active = true;
this._never = false;
this._forever = false;
this._invalidate = false;
this._configured = false;
this._pairs = [];
this._data = data;
}
simple() {
return makeSimpleConfigurator(this);
}
mode() {
if (this._never) return "never";
if (this._forever) return "forever";
if (this._invalidate) return "invalidate";
return "valid";
}
forever() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never) {
throw new Error("Caching has already been configured with .never()");
}
this._forever = true;
this._configured = true;
}
never() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._forever) {
throw new Error("Caching has already been configured with .forever()");
}
this._never = true;
this._configured = true;
}
using(handler) {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never || this._forever) {
throw new Error("Caching has already been configured with .never or .forever()");
}
this._configured = true;
const key = handler(this._data);
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
if ((0, _async.isThenable)(key)) {
return key.then(key => {
this._pairs.push([key, fn]);
return key;
});
}
this._pairs.push([key, fn]);
return key;
}
invalidate(handler) {
this._invalidate = true;
return this.using(handler);
}
validator() {
const pairs = this._pairs;
return function* (data) {
for (const [key, fn] of pairs) {
if (key !== (yield* fn(data))) return false;
}
return true;
};
}
deactivate() {
this._active = false;
}
configured() {
return this._configured;
}
}
function makeSimpleConfigurator(cache) {
function cacheFn(val) {
if (typeof val === "boolean") {
if (val) cache.forever();else cache.never();
return;
}
return cache.using(() => assertSimpleType(val()));
}
cacheFn.forever = () => cache.forever();
cacheFn.never = () => cache.never();
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
return cacheFn;
}
function assertSimpleType(value) {
if ((0, _async.isThenable)(value)) {
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
}
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
}
return value;
}
class Lock {
constructor() {
this.released = false;
this.promise = new Promise(resolve => {
this._resolve = resolve;
});
}
release(value) {
this.released = true;
this._resolve(value);
}
}

View File

@ -1,439 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.buildPresetChain = buildPresetChain;
exports.buildRootChain = buildRootChain;
exports.buildPresetChainWalker = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
var _options = require("./validation/options");
var _patternToRegex = _interopRequireDefault(require("./pattern-to-regex"));
var _files = require("./files");
var _caching = require("./caching");
var _configDescriptors = require("./config-descriptors");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:config:config-chain");
function* buildPresetChain(arg, context) {
const chain = yield* buildPresetChainWalker(arg, context);
if (!chain) return null;
return {
plugins: dedupDescriptors(chain.plugins),
presets: dedupDescriptors(chain.presets),
options: chain.options.map(o => normalizeOptions(o))
};
}
const buildPresetChainWalker = makeChainWalker({
init: arg => arg,
root: preset => loadPresetDescriptors(preset),
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName)
});
exports.buildPresetChainWalker = buildPresetChainWalker;
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
function* buildRootChain(opts, context) {
const programmaticChain = yield* loadProgrammaticChain({
options: opts,
dirname: context.cwd
}, context);
if (!programmaticChain) return null;
let configFile;
if (typeof opts.configFile === "string") {
configFile = yield* (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
} else if (opts.configFile !== false) {
configFile = yield* (0, _files.findRootConfig)(context.root, context.envName, context.caller);
}
let {
babelrc,
babelrcRoots
} = opts;
let babelrcRootsDirectory = context.cwd;
const configFileChain = emptyChain();
if (configFile) {
const validatedFile = validateConfigFile(configFile);
const result = yield* loadFileChain(validatedFile, context);
if (!result) return null;
if (babelrc === undefined) {
babelrc = validatedFile.options.babelrc;
}
if (babelrcRoots === undefined) {
babelrcRootsDirectory = validatedFile.dirname;
babelrcRoots = validatedFile.options.babelrcRoots;
}
mergeChain(configFileChain, result);
}
const pkgData = typeof context.filename === "string" ? yield* (0, _files.findPackageData)(context.filename) : null;
let ignoreFile, babelrcFile;
const fileChain = emptyChain();
if ((babelrc === true || babelrc === undefined) && pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
({
ignore: ignoreFile,
config: babelrcFile
} = yield* (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
return null;
}
if (babelrcFile) {
const result = yield* loadFileChain(validateBabelrcFile(babelrcFile), context);
if (!result) return null;
mergeChain(fileChain, result);
}
}
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
return {
plugins: dedupDescriptors(chain.plugins),
presets: dedupDescriptors(chain.presets),
options: chain.options.map(o => normalizeOptions(o)),
ignore: ignoreFile || undefined,
babelrc: babelrcFile || undefined,
config: configFile || undefined
};
}
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
if (typeof babelrcRoots === "boolean") return babelrcRoots;
const absoluteRoot = context.root;
if (babelrcRoots === undefined) {
return pkgData.directories.indexOf(absoluteRoot) !== -1;
}
let babelrcPatterns = babelrcRoots;
if (!Array.isArray(babelrcPatterns)) babelrcPatterns = [babelrcPatterns];
babelrcPatterns = babelrcPatterns.map(pat => {
return typeof pat === "string" ? _path().default.resolve(babelrcRootsDirectory, pat) : pat;
});
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
return pkgData.directories.indexOf(absoluteRoot) !== -1;
}
return babelrcPatterns.some(pat => {
if (typeof pat === "string") {
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
}
return pkgData.directories.some(directory => {
return matchPattern(pat, babelrcRootsDirectory, directory, context);
});
});
}
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("configfile", file.options)
}));
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("babelrcfile", file.options)
}));
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("extendsfile", file.options)
}));
const loadProgrammaticChain = makeChainWalker({
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName)
});
const loadFileChain = makeChainWalker({
root: file => loadFileDescriptors(file),
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName)
});
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
function buildRootDescriptors({
dirname,
options
}, alias, descriptors) {
return descriptors(dirname, options, alias);
}
function buildEnvDescriptors({
dirname,
options
}, alias, descriptors, envName) {
const opts = options.env && options.env[envName];
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
}
function buildOverrideDescriptors({
dirname,
options
}, alias, descriptors, index) {
const opts = options.overrides && options.overrides[index];
if (!opts) throw new Error("Assertion failure - missing override");
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
}
function buildOverrideEnvDescriptors({
dirname,
options
}, alias, descriptors, index, envName) {
const override = options.overrides && options.overrides[index];
if (!override) throw new Error("Assertion failure - missing override");
const opts = override.env && override.env[envName];
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
}
function makeChainWalker({
root,
env,
overrides,
overridesEnv
}) {
return function* (input, context, files = new Set()) {
const {
dirname
} = input;
const flattenedConfigs = [];
const rootOpts = root(input);
if (configIsApplicable(rootOpts, dirname, context)) {
flattenedConfigs.push(rootOpts);
const envOpts = env(input, context.envName);
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
flattenedConfigs.push(envOpts);
}
(rootOpts.options.overrides || []).forEach((_, index) => {
const overrideOps = overrides(input, index);
if (configIsApplicable(overrideOps, dirname, context)) {
flattenedConfigs.push(overrideOps);
const overrideEnvOpts = overridesEnv(input, index, context.envName);
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
flattenedConfigs.push(overrideEnvOpts);
}
}
});
}
if (flattenedConfigs.some(({
options: {
ignore,
only
}
}) => shouldIgnore(context, ignore, only, dirname))) {
return null;
}
const chain = emptyChain();
for (const op of flattenedConfigs) {
if (!(yield* mergeExtendsChain(chain, op.options, dirname, context, files))) {
return null;
}
mergeChainOpts(chain, op);
}
return chain;
};
}
function* mergeExtendsChain(chain, opts, dirname, context, files) {
if (opts.extends === undefined) return true;
const file = yield* (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
if (files.has(file)) {
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
}
files.add(file);
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files);
files.delete(file);
if (!fileChain) return false;
mergeChain(chain, fileChain);
return true;
}
function mergeChain(target, source) {
target.options.push(...source.options);
target.plugins.push(...source.plugins);
target.presets.push(...source.presets);
return target;
}
function mergeChainOpts(target, {
options,
plugins,
presets
}) {
target.options.push(options);
target.plugins.push(...plugins());
target.presets.push(...presets());
return target;
}
function emptyChain() {
return {
options: [],
presets: [],
plugins: []
};
}
function normalizeOptions(opts) {
const options = Object.assign({}, opts);
delete options.extends;
delete options.env;
delete options.overrides;
delete options.plugins;
delete options.presets;
delete options.passPerPreset;
delete options.ignore;
delete options.only;
delete options.test;
delete options.include;
delete options.exclude;
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
options.sourceMaps = options.sourceMap;
delete options.sourceMap;
}
return options;
}
function dedupDescriptors(items) {
const map = new Map();
const descriptors = [];
for (const item of items) {
if (typeof item.value === "function") {
const fnKey = item.value;
let nameMap = map.get(fnKey);
if (!nameMap) {
nameMap = new Map();
map.set(fnKey, nameMap);
}
let desc = nameMap.get(item.name);
if (!desc) {
desc = {
value: item
};
descriptors.push(desc);
if (!item.ownPass) nameMap.set(item.name, desc);
} else {
desc.value = item;
}
} else {
descriptors.push({
value: item
});
}
}
return descriptors.reduce((acc, desc) => {
acc.push(desc.value);
return acc;
}, []);
}
function configIsApplicable({
options
}, dirname, context) {
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
}
function configFieldIsApplicable(context, test, dirname) {
const patterns = Array.isArray(test) ? test : [test];
return matchesPatterns(context, patterns, dirname);
}
function shouldIgnore(context, ignore, only, dirname) {
if (ignore && matchesPatterns(context, ignore, dirname)) {
debug("Ignored %o because it matched one of %O from %o", context.filename, ignore, dirname);
return true;
}
if (only && !matchesPatterns(context, only, dirname)) {
debug("Ignored %o because it failed to match one of %O from %o", context.filename, only, dirname);
return true;
}
return false;
}
function matchesPatterns(context, patterns, dirname) {
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
}
function matchPattern(pattern, dirname, pathToTest, context) {
if (typeof pattern === "function") {
return !!pattern(pathToTest, {
dirname,
envName: context.envName,
caller: context.caller
});
}
if (typeof pathToTest !== "string") {
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
}
if (typeof pattern === "string") {
pattern = (0, _patternToRegex.default)(pattern, dirname);
}
return pattern.test(pathToTest);
}

View File

@ -1,211 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createCachedDescriptors = createCachedDescriptors;
exports.createUncachedDescriptors = createUncachedDescriptors;
exports.createDescriptor = createDescriptor;
var _files = require("./files");
var _item = require("./item");
var _caching = require("./caching");
function isEqualDescriptor(a, b) {
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
}
function createCachedDescriptors(dirname, options, alias) {
const {
plugins,
presets,
passPerPreset
} = options;
return {
options,
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => [],
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => []
};
}
function createUncachedDescriptors(dirname, options, alias) {
let plugins;
let presets;
return {
options,
plugins: () => {
if (!plugins) {
plugins = createPluginDescriptors(options.plugins || [], dirname, alias);
}
return plugins;
},
presets: () => {
if (!presets) {
presets = createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
}
return presets;
}
};
}
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCacheSync)(passPerPreset => createPresetDescriptors(items, dirname, alias, passPerPreset).map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc))));
});
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCacheSync)(alias => createPluginDescriptors(items, dirname, alias).map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc)));
});
const DEFAULT_OPTIONS = {};
function loadCachedDescriptor(cache, desc) {
const {
value,
options = DEFAULT_OPTIONS
} = desc;
if (options === false) return desc;
let cacheByOptions = cache.get(value);
if (!cacheByOptions) {
cacheByOptions = new WeakMap();
cache.set(value, cacheByOptions);
}
let possibilities = cacheByOptions.get(options);
if (!possibilities) {
possibilities = [];
cacheByOptions.set(options, possibilities);
}
if (possibilities.indexOf(desc) === -1) {
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
if (matches.length > 0) {
return matches[0];
}
possibilities.push(desc);
}
return desc;
}
function createPresetDescriptors(items, dirname, alias, passPerPreset) {
return createDescriptors("preset", items, dirname, alias, passPerPreset);
}
function createPluginDescriptors(items, dirname, alias) {
return createDescriptors("plugin", items, dirname, alias);
}
function createDescriptors(type, items, dirname, alias, ownPass) {
const descriptors = items.map((item, index) => createDescriptor(item, dirname, {
type,
alias: `${alias}$${index}`,
ownPass: !!ownPass
}));
assertNoDuplicates(descriptors);
return descriptors;
}
function createDescriptor(pair, dirname, {
type,
alias,
ownPass
}) {
const desc = (0, _item.getItemDescriptor)(pair);
if (desc) {
return desc;
}
let name;
let options;
let value = pair;
if (Array.isArray(value)) {
if (value.length === 3) {
[value, options, name] = value;
} else {
[value, options] = value;
}
}
let file = undefined;
let filepath = null;
if (typeof value === "string") {
if (typeof type !== "string") {
throw new Error("To resolve a string-based item, the type of item must be given");
}
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
const request = value;
({
filepath,
value
} = resolver(value, dirname));
file = {
request,
resolved: filepath
};
}
if (!value) {
throw new Error(`Unexpected falsy value: ${String(value)}`);
}
if (typeof value === "object" && value.__esModule) {
if (value.default) {
value = value.default;
} else {
throw new Error("Must export a default export when using ES6 modules.");
}
}
if (typeof value !== "object" && typeof value !== "function") {
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
}
if (filepath !== null && typeof value === "object" && value) {
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
}
return {
name,
alias: filepath || alias,
value,
options,
dirname,
ownPass,
file
};
}
function assertNoDuplicates(items) {
const map = new Map();
for (const item of items) {
if (typeof item.value !== "function") continue;
let nameMap = map.get(item.value);
if (!nameMap) {
nameMap = new Set();
map.set(item.value, nameMap);
}
if (nameMap.has(item.name)) {
const conflicts = items.filter(i => i.value === item.value);
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
}
nameMap.add(item.name);
}
}

View File

@ -1,314 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findConfigUpwards = findConfigUpwards;
exports.findRelativeConfig = findRelativeConfig;
exports.findRootConfig = findRootConfig;
exports.loadConfig = loadConfig;
exports.ROOT_CONFIG_FILENAMES = void 0;
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _json() {
const data = _interopRequireDefault(require("json5"));
_json = function () {
return data;
};
return data;
}
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _caching = require("../caching");
var _configApi = _interopRequireDefault(require("../helpers/config-api"));
var _utils = require("./utils");
var _moduleTypes = _interopRequireDefault(require("./module-types"));
var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex"));
var fs = _interopRequireWildcard(require("../../gensync-utils/fs"));
var _resolve = _interopRequireDefault(require("../../gensync-utils/resolve"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:config:loading:files:configuration");
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json"];
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json"];
const BABELIGNORE_FILENAME = ".babelignore";
function* findConfigUpwards(rootDir) {
let dirname = rootDir;
while (true) {
for (const filename of ROOT_CONFIG_FILENAMES) {
if (yield* fs.exists(_path().default.join(dirname, filename))) {
return dirname;
}
}
const nextDir = _path().default.dirname(dirname);
if (dirname === nextDir) break;
dirname = nextDir;
}
return null;
}
function* findRelativeConfig(packageData, envName, caller) {
let config = null;
let ignore = null;
const dirname = _path().default.dirname(packageData.filepath);
for (const loc of packageData.directories) {
if (!config) {
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, packageData.pkg && packageData.pkg.dirname === loc ? packageToBabelConfig(packageData.pkg) : null);
}
if (!ignore) {
const ignoreLoc = _path().default.join(loc, BABELIGNORE_FILENAME);
ignore = yield* readIgnoreConfig(ignoreLoc);
if (ignore) {
debug("Found ignore %o from %o.", ignore.filepath, dirname);
}
}
}
return {
config,
ignore
};
}
function findRootConfig(dirname, envName, caller) {
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
}
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
const configs = yield* _gensync().default.all(names.map(filename => readConfig(_path().default.join(dirname, filename), envName, caller)));
const config = configs.reduce((previousConfig, config) => {
if (config && previousConfig) {
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
}
return config || previousConfig;
}, previousConfig);
if (config) {
debug("Found configuration %o from %o.", config.filepath, dirname);
}
return config;
}
function* loadConfig(name, dirname, envName, caller) {
const filepath = yield* (0, _resolve.default)(name, {
basedir: dirname
});
const conf = yield* readConfig(filepath, envName, caller);
if (!conf) {
throw new Error(`Config file ${filepath} contains no configuration data`);
}
debug("Loaded config %o from %o.", name, dirname);
return conf;
}
function readConfig(filepath, envName, caller) {
const ext = _path().default.extname(filepath);
return ext === ".js" || ext === ".cjs" || ext === ".mjs" ? readConfigJS(filepath, {
envName,
caller
}) : readConfigJSON5(filepath);
}
const LOADING_CONFIGS = new Set();
const readConfigJS = (0, _caching.makeStrongCache)(function* readConfigJS(filepath, cache) {
if (!fs.exists.sync(filepath)) {
cache.forever();
return null;
}
if (LOADING_CONFIGS.has(filepath)) {
cache.never();
debug("Auto-ignoring usage of config %o.", filepath);
return {
filepath,
dirname: _path().default.dirname(filepath),
options: {}
};
}
let options;
try {
LOADING_CONFIGS.add(filepath);
options = yield* (0, _moduleTypes.default)(filepath, "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously.");
} catch (err) {
err.message = `${filepath}: Error while loading config - ${err.message}`;
throw err;
} finally {
LOADING_CONFIGS.delete(filepath);
}
let assertCache = false;
if (typeof options === "function") {
yield* [];
options = options((0, _configApi.default)(cache));
assertCache = true;
}
if (!options || typeof options !== "object" || Array.isArray(options)) {
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
}
if (typeof options.then === "function") {
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
}
if (assertCache && !cache.configured()) throwConfigError();
return {
filepath,
dirname: _path().default.dirname(filepath),
options
};
});
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
const babel = file.options["babel"];
if (typeof babel === "undefined") return null;
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
throw new Error(`${file.filepath}: .babel property must be an object`);
}
return {
filepath: file.filepath,
dirname: file.dirname,
options: babel
};
});
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
let options;
try {
options = _json().default.parse(content);
} catch (err) {
err.message = `${filepath}: Error while parsing config - ${err.message}`;
throw err;
}
if (!options) throw new Error(`${filepath}: No config detected`);
if (typeof options !== "object") {
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
}
if (Array.isArray(options)) {
throw new Error(`${filepath}: Expected config object but found array`);
}
return {
filepath,
dirname: _path().default.dirname(filepath),
options
};
});
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
const ignoreDir = _path().default.dirname(filepath);
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
for (const pattern of ignorePatterns) {
if (pattern[0] === "!") {
throw new Error(`Negation of file paths is not supported.`);
}
}
return {
filepath,
dirname: _path().default.dirname(filepath),
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
};
});
function throwConfigError() {
throw new Error(`\
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
for various types of caching, using the first param of their handler functions:
module.exports = function(api) {
// The API exposes the following:
// Cache the returned value forever and don't call this function again.
api.cache(true);
// Don't cache at all. Not recommended because it will be very slow.
api.cache(false);
// Cached based on the value of some function. If this function returns a value different from
// a previously-encountered value, the plugins will re-evaluate.
var env = api.cache(() => process.env.NODE_ENV);
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
// any possible NODE_ENV value that might come up during plugin execution.
var isProd = api.cache(() => process.env.NODE_ENV === "production");
// .cache(fn) will perform a linear search though instances to find the matching plugin based
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
// previous instance whenever something changes, you may use:
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
// Note, we also expose the following more-verbose versions of the above examples:
api.cache.forever(); // api.cache(true)
api.cache.never(); // api.cache(false)
api.cache.using(fn); // api.cache(fn)
// Return the value that will be cached.
return { };
};`);
}

View File

@ -1,10 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = import_;
function import_(filepath) {
return import(filepath);
}

View File

@ -1,63 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findConfigUpwards = findConfigUpwards;
exports.findPackageData = findPackageData;
exports.findRelativeConfig = findRelativeConfig;
exports.findRootConfig = findRootConfig;
exports.loadConfig = loadConfig;
exports.resolvePlugin = resolvePlugin;
exports.resolvePreset = resolvePreset;
exports.loadPlugin = loadPlugin;
exports.loadPreset = loadPreset;
exports.ROOT_CONFIG_FILENAMES = void 0;
function* findConfigUpwards(rootDir) {
return null;
}
function* findPackageData(filepath) {
return {
filepath,
directories: [],
pkg: null,
isPackage: false
};
}
function* findRelativeConfig(pkgData, envName, caller) {
return {
pkg: null,
config: null,
ignore: null
};
}
function* findRootConfig(dirname, envName, caller) {
return null;
}
function* loadConfig(name, dirname, envName, caller) {
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
}
const ROOT_CONFIG_FILENAMES = [];
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
function resolvePlugin(name, dirname) {
return null;
}
function resolvePreset(name, dirname) {
return null;
}
function loadPlugin(name, dirname) {
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
}
function loadPreset(name, dirname) {
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
}

View File

@ -1,73 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "findPackageData", {
enumerable: true,
get: function () {
return _package.findPackageData;
}
});
Object.defineProperty(exports, "findConfigUpwards", {
enumerable: true,
get: function () {
return _configuration.findConfigUpwards;
}
});
Object.defineProperty(exports, "findRelativeConfig", {
enumerable: true,
get: function () {
return _configuration.findRelativeConfig;
}
});
Object.defineProperty(exports, "findRootConfig", {
enumerable: true,
get: function () {
return _configuration.findRootConfig;
}
});
Object.defineProperty(exports, "loadConfig", {
enumerable: true,
get: function () {
return _configuration.loadConfig;
}
});
Object.defineProperty(exports, "ROOT_CONFIG_FILENAMES", {
enumerable: true,
get: function () {
return _configuration.ROOT_CONFIG_FILENAMES;
}
});
Object.defineProperty(exports, "resolvePlugin", {
enumerable: true,
get: function () {
return _plugins.resolvePlugin;
}
});
Object.defineProperty(exports, "resolvePreset", {
enumerable: true,
get: function () {
return _plugins.resolvePreset;
}
});
Object.defineProperty(exports, "loadPlugin", {
enumerable: true,
get: function () {
return _plugins.loadPlugin;
}
});
Object.defineProperty(exports, "loadPreset", {
enumerable: true,
get: function () {
return _plugins.loadPreset;
}
});
var _package = require("./package");
var _configuration = require("./configuration");
var _plugins = require("./plugins");
({});

View File

@ -1,86 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = loadCjsOrMjsDefault;
var _async = require("../../gensync-utils/async");
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
let import_;
try {
import_ = require("./import").default;
} catch (_unused) {}
function* loadCjsOrMjsDefault(filepath, asyncError) {
switch (guessJSModuleType(filepath)) {
case "cjs":
return loadCjsDefault(filepath);
case "unknown":
try {
return loadCjsDefault(filepath);
} catch (e) {
if (e.code !== "ERR_REQUIRE_ESM") throw e;
}
case "mjs":
if (yield* (0, _async.isAsync)()) {
return yield* (0, _async.waitFor)(loadMjsDefault(filepath));
}
throw new Error(asyncError);
}
}
function guessJSModuleType(filename) {
switch (_path().default.extname(filename)) {
case ".cjs":
return "cjs";
case ".mjs":
return "mjs";
default:
return "unknown";
}
}
function loadCjsDefault(filepath) {
const module = require(filepath);
return (module == null ? void 0 : module.__esModule) ? module.default || undefined : module;
}
function loadMjsDefault(_x) {
return _loadMjsDefault.apply(this, arguments);
}
function _loadMjsDefault() {
_loadMjsDefault = _asyncToGenerator(function* (filepath) {
if (!import_) {
throw new Error("Internal error: Native ECMAScript modules aren't supported" + " by this platform.\n");
}
const module = yield import_(filepath);
return module.default;
});
return _loadMjsDefault.apply(this, arguments);
}

View File

@ -1,76 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findPackageData = findPackageData;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
var _utils = require("./utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const PACKAGE_FILENAME = "package.json";
function* findPackageData(filepath) {
let pkg = null;
const directories = [];
let isPackage = true;
let dirname = _path().default.dirname(filepath);
while (!pkg && _path().default.basename(dirname) !== "node_modules") {
directories.push(dirname);
pkg = yield* readConfigPackage(_path().default.join(dirname, PACKAGE_FILENAME));
const nextLoc = _path().default.dirname(dirname);
if (dirname === nextLoc) {
isPackage = false;
break;
}
dirname = nextLoc;
}
return {
filepath,
directories,
pkg,
isPackage
};
}
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
let options;
try {
options = JSON.parse(content);
} catch (err) {
err.message = `${filepath}: Error while parsing JSON - ${err.message}`;
throw err;
}
if (typeof options !== "object") {
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
}
if (Array.isArray(options)) {
throw new Error(`${filepath}: Expected config object but found array`);
}
return {
filepath,
dirname: _path().default.dirname(filepath),
options
};
});

View File

@ -1,169 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.resolvePlugin = resolvePlugin;
exports.resolvePreset = resolvePreset;
exports.loadPlugin = loadPlugin;
exports.loadPreset = loadPreset;
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
function _resolve() {
const data = _interopRequireDefault(require("resolve"));
_resolve = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:config:loading:files:plugins");
const EXACT_RE = /^module:/;
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
function resolvePlugin(name, dirname) {
return resolveStandardizedName("plugin", name, dirname);
}
function resolvePreset(name, dirname) {
return resolveStandardizedName("preset", name, dirname);
}
function loadPlugin(name, dirname) {
const filepath = resolvePlugin(name, dirname);
if (!filepath) {
throw new Error(`Plugin ${name} not found relative to ${dirname}`);
}
const value = requireModule("plugin", filepath);
debug("Loaded plugin %o from %o.", name, dirname);
return {
filepath,
value
};
}
function loadPreset(name, dirname) {
const filepath = resolvePreset(name, dirname);
if (!filepath) {
throw new Error(`Preset ${name} not found relative to ${dirname}`);
}
const value = requireModule("preset", filepath);
debug("Loaded preset %o from %o.", name, dirname);
return {
filepath,
value
};
}
function standardizeName(type, name) {
if (_path().default.isAbsolute(name)) return name;
const isPreset = type === "preset";
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
}
function resolveStandardizedName(type, name, dirname = process.cwd()) {
const standardizedName = standardizeName(type, name);
try {
return _resolve().default.sync(standardizedName, {
basedir: dirname
});
} catch (e) {
if (e.code !== "MODULE_NOT_FOUND") throw e;
if (standardizedName !== name) {
let resolvedOriginal = false;
try {
_resolve().default.sync(name, {
basedir: dirname
});
resolvedOriginal = true;
} catch (e2) {}
if (resolvedOriginal) {
e.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
}
}
let resolvedBabel = false;
try {
_resolve().default.sync(standardizeName(type, "@babel/" + name), {
basedir: dirname
});
resolvedBabel = true;
} catch (e2) {}
if (resolvedBabel) {
e.message += `\n- Did you mean "@babel/${name}"?`;
}
let resolvedOppositeType = false;
const oppositeType = type === "preset" ? "plugin" : "preset";
try {
_resolve().default.sync(standardizeName(oppositeType, name), {
basedir: dirname
});
resolvedOppositeType = true;
} catch (e2) {}
if (resolvedOppositeType) {
e.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
}
throw e;
}
}
const LOADING_MODULES = new Set();
function requireModule(type, name) {
if (LOADING_MODULES.has(name)) {
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
}
try {
LOADING_MODULES.add(name);
return require(name);
} finally {
LOADING_MODULES.delete(name);
}
}

View File

View File

@ -1,49 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.makeStaticFileCache = makeStaticFileCache;
var _caching = require("../caching");
var fs = _interopRequireWildcard(require("../../gensync-utils/fs"));
function _fs2() {
const data = _interopRequireDefault(require("fs"));
_fs2 = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function makeStaticFileCache(fn) {
return (0, _caching.makeStrongCache)(function* (filepath, cache) {
const cached = cache.invalidate(() => fileMtime(filepath));
if (cached === null) {
cache.forever();
return null;
}
return fn(filepath, (yield* fs.readFile(filepath, "utf8")));
});
}
function fileMtime(filepath) {
try {
return +_fs2().default.statSync(filepath).mtime;
} catch (e) {
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
}
return null;
}

View File

@ -1,297 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _async = require("../gensync-utils/async");
var _util = require("./util");
var context = _interopRequireWildcard(require("../index"));
var _plugin = _interopRequireDefault(require("./plugin"));
var _item = require("./item");
var _configChain = require("./config-chain");
function _traverse() {
const data = _interopRequireDefault(require("@babel/traverse"));
_traverse = function () {
return data;
};
return data;
}
var _caching = require("./caching");
var _options = require("./validation/options");
var _plugins = require("./validation/plugins");
var _configApi = _interopRequireDefault(require("./helpers/config-api"));
var _partial = _interopRequireDefault(require("./partial"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var _default = (0, _gensync().default)(function* loadFullConfig(inputOpts) {
const result = yield* (0, _partial.default)(inputOpts);
if (!result) {
return null;
}
const {
options,
context
} = result;
const optionDefaults = {};
const passes = [[]];
try {
const {
plugins,
presets
} = options;
if (!plugins || !presets) {
throw new Error("Assertion failure - plugins and presets exist");
}
const ignored = yield* function* recurseDescriptors(config, pass) {
const plugins = [];
for (const descriptor of config.plugins) {
if (descriptor.options !== false) {
plugins.push((yield* loadPluginDescriptor(descriptor, context)));
}
}
const presets = [];
for (const descriptor of config.presets) {
if (descriptor.options !== false) {
presets.push({
preset: yield* loadPresetDescriptor(descriptor, context),
pass: descriptor.ownPass ? [] : pass
});
}
}
if (presets.length > 0) {
passes.splice(1, 0, ...presets.map(o => o.pass).filter(p => p !== pass));
for (const {
preset,
pass
} of presets) {
if (!preset) return true;
const ignored = yield* recurseDescriptors({
plugins: preset.plugins,
presets: preset.presets
}, pass);
if (ignored) return true;
preset.options.forEach(opts => {
(0, _util.mergeOptions)(optionDefaults, opts);
});
}
}
if (plugins.length > 0) {
pass.unshift(...plugins);
}
}({
plugins: plugins.map(item => {
const desc = (0, _item.getItemDescriptor)(item);
if (!desc) {
throw new Error("Assertion failure - must be config item");
}
return desc;
}),
presets: presets.map(item => {
const desc = (0, _item.getItemDescriptor)(item);
if (!desc) {
throw new Error("Assertion failure - must be config item");
}
return desc;
})
}, passes[0]);
if (ignored) return null;
} catch (e) {
if (!/^\[BABEL\]/.test(e.message)) {
e.message = `[BABEL] ${context.filename || "unknown"}: ${e.message}`;
}
throw e;
}
const opts = optionDefaults;
(0, _util.mergeOptions)(opts, options);
opts.plugins = passes[0];
opts.presets = passes.slice(1).filter(plugins => plugins.length > 0).map(plugins => ({
plugins
}));
opts.passPerPreset = opts.presets.length > 0;
return {
options: opts,
passes: passes
};
});
exports.default = _default;
const loadDescriptor = (0, _caching.makeWeakCache)(function* ({
value,
options,
dirname,
alias
}, cache) {
if (options === false) throw new Error("Assertion failure");
options = options || {};
let item = value;
if (typeof value === "function") {
const api = Object.assign({}, context, {}, (0, _configApi.default)(cache));
try {
item = value(api, options, dirname);
} catch (e) {
if (alias) {
e.message += ` (While processing: ${JSON.stringify(alias)})`;
}
throw e;
}
}
if (!item || typeof item !== "object") {
throw new Error("Plugin/Preset did not return an object.");
}
if (typeof item.then === "function") {
yield* [];
throw new Error(`You appear to be using an async plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version.`);
}
return {
value: item,
options,
dirname,
alias
};
});
function* loadPluginDescriptor(descriptor, context) {
if (descriptor.value instanceof _plugin.default) {
if (descriptor.options) {
throw new Error("Passed options to an existing Plugin instance will not work.");
}
return descriptor.value;
}
return yield* instantiatePlugin((yield* loadDescriptor(descriptor, context)), context);
}
const instantiatePlugin = (0, _caching.makeWeakCache)(function* ({
value,
options,
dirname,
alias
}, cache) {
const pluginObj = (0, _plugins.validatePluginObject)(value);
const plugin = Object.assign({}, pluginObj);
if (plugin.visitor) {
plugin.visitor = _traverse().default.explode(Object.assign({}, plugin.visitor));
}
if (plugin.inherits) {
const inheritsDescriptor = {
name: undefined,
alias: `${alias}$inherits`,
value: plugin.inherits,
options,
dirname
};
const inherits = yield* (0, _async.forwardAsync)(loadPluginDescriptor, run => {
return cache.invalidate(data => run(inheritsDescriptor, data));
});
plugin.pre = chain(inherits.pre, plugin.pre);
plugin.post = chain(inherits.post, plugin.post);
plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions);
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
}
return new _plugin.default(plugin, options, alias);
});
const validateIfOptionNeedsFilename = (options, descriptor) => {
if (options.test || options.include || options.exclude) {
const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */";
throw new Error([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transform(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n"));
}
};
const validatePreset = (preset, context, descriptor) => {
if (!context.filename) {
const {
options
} = preset;
validateIfOptionNeedsFilename(options, descriptor);
if (options.overrides) {
options.overrides.forEach(overrideOptions => validateIfOptionNeedsFilename(overrideOptions, descriptor));
}
}
};
function* loadPresetDescriptor(descriptor, context) {
const preset = instantiatePreset((yield* loadDescriptor(descriptor, context)));
validatePreset(preset, context, descriptor);
return yield* (0, _configChain.buildPresetChain)(preset, context);
}
const instantiatePreset = (0, _caching.makeWeakCacheSync)(({
value,
dirname,
alias
}) => {
return {
options: (0, _options.validate)("preset", value),
alias,
dirname
};
});
function chain(a, b) {
const fns = [a, b].filter(Boolean);
if (fns.length <= 1) return fns[0];
return function (...args) {
for (const fn of fns) {
fn.apply(this, args);
}
};
}

View File

@ -1,86 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = makeAPI;
function _semver() {
const data = _interopRequireDefault(require("semver"));
_semver = function () {
return data;
};
return data;
}
var _ = require("../../");
var _caching = require("../caching");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function makeAPI(cache) {
const env = value => cache.using(data => {
if (typeof value === "undefined") return data.envName;
if (typeof value === "function") {
return (0, _caching.assertSimpleType)(value(data.envName));
}
if (!Array.isArray(value)) value = [value];
return value.some(entry => {
if (typeof entry !== "string") {
throw new Error("Unexpected non-string value");
}
return entry === data.envName;
});
});
const caller = cb => cache.using(data => (0, _caching.assertSimpleType)(cb(data.caller)));
return {
version: _.version,
cache: cache.simple(),
env,
async: () => false,
caller,
assertVersion,
tokTypes: undefined
};
}
function assertVersion(range) {
if (typeof range === "number") {
if (!Number.isInteger(range)) {
throw new Error("Expected string or integer value.");
}
range = `^${range}.0.0-0`;
}
if (typeof range !== "string") {
throw new Error("Expected string or integer value.");
}
if (_semver().default.satisfies(_.version, range)) return;
const limit = Error.stackTraceLimit;
if (typeof limit === "number" && limit < 25) {
Error.stackTraceLimit = 25;
}
const err = new Error(`Requires Babel "${range}", but was loaded with "${_.version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`);
if (typeof limit === "number") {
Error.stackTraceLimit = limit;
}
throw Object.assign(err, {
code: "BABEL_VERSION_UNSUPPORTED",
version: _.version,
range
});
}

View File

@ -1,10 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getEnv = getEnv;
function getEnv(defaultValue = "development") {
return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;
}

View File

@ -1,55 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "default", {
enumerable: true,
get: function () {
return _full.default;
}
});
exports.loadOptionsAsync = exports.loadOptionsSync = exports.loadOptions = exports.loadPartialConfigAsync = exports.loadPartialConfigSync = exports.loadPartialConfig = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _full = _interopRequireDefault(require("./full"));
var _partial = require("./partial");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const loadOptionsRunner = (0, _gensync().default)(function* (opts) {
const config = yield* (0, _full.default)(opts);
return config ? config.options : null;
});
const maybeErrback = runner => (opts, callback) => {
if (callback === undefined && typeof opts === "function") {
callback = opts;
opts = undefined;
}
return callback ? runner.errback(opts, callback) : runner.sync(opts);
};
const loadPartialConfig = maybeErrback(_partial.loadPartialConfig);
exports.loadPartialConfig = loadPartialConfig;
const loadPartialConfigSync = _partial.loadPartialConfig.sync;
exports.loadPartialConfigSync = loadPartialConfigSync;
const loadPartialConfigAsync = _partial.loadPartialConfig.async;
exports.loadPartialConfigAsync = loadPartialConfigAsync;
const loadOptions = maybeErrback(loadOptionsRunner);
exports.loadOptions = loadOptions;
const loadOptionsSync = loadOptionsRunner.sync;
exports.loadOptionsSync = loadOptionsSync;
const loadOptionsAsync = loadOptionsRunner.async;
exports.loadOptionsAsync = loadOptionsAsync;

View File

@ -1,66 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createItemFromDescriptor = createItemFromDescriptor;
exports.createConfigItem = createConfigItem;
exports.getItemDescriptor = getItemDescriptor;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
var _configDescriptors = require("./config-descriptors");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createItemFromDescriptor(desc) {
return new ConfigItem(desc);
}
function createConfigItem(value, {
dirname = ".",
type
} = {}) {
const descriptor = (0, _configDescriptors.createDescriptor)(value, _path().default.resolve(dirname), {
type,
alias: "programmatic item"
});
return createItemFromDescriptor(descriptor);
}
function getItemDescriptor(item) {
if (item instanceof ConfigItem) {
return item._descriptor;
}
return undefined;
}
class ConfigItem {
constructor(descriptor) {
this._descriptor = descriptor;
Object.defineProperty(this, "_descriptor", {
enumerable: false
});
this.value = this._descriptor.value;
this.options = this._descriptor.options;
this.dirname = this._descriptor.dirname;
this.name = this._descriptor.name;
this.file = this._descriptor.file ? {
request: this._descriptor.file.request,
resolved: this._descriptor.file.resolved
} : undefined;
Object.freeze(this);
}
}
Object.freeze(ConfigItem.prototype);

View File

@ -1,152 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = loadPrivatePartialConfig;
exports.loadPartialConfig = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _plugin = _interopRequireDefault(require("./plugin"));
var _util = require("./util");
var _item = require("./item");
var _configChain = require("./config-chain");
var _environment = require("./helpers/environment");
var _options = require("./validation/options");
var _files = require("./files");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function* resolveRootMode(rootDir, rootMode) {
switch (rootMode) {
case "root":
return rootDir;
case "upward-optional":
{
const upwardRootDir = yield* (0, _files.findConfigUpwards)(rootDir);
return upwardRootDir === null ? rootDir : upwardRootDir;
}
case "upward":
{
const upwardRootDir = yield* (0, _files.findConfigUpwards)(rootDir);
if (upwardRootDir !== null) return upwardRootDir;
throw Object.assign(new Error(`Babel was run with rootMode:"upward" but a root could not ` + `be found when searching upward from "${rootDir}".\n` + `One of the following config files must be in the directory tree: ` + `"${_files.ROOT_CONFIG_FILENAMES.join(", ")}".`), {
code: "BABEL_ROOT_NOT_FOUND",
dirname: rootDir
});
}
default:
throw new Error(`Assertion failure - unknown rootMode value.`);
}
}
function* loadPrivatePartialConfig(inputOpts) {
if (inputOpts != null && (typeof inputOpts !== "object" || Array.isArray(inputOpts))) {
throw new Error("Babel options must be an object, null, or undefined");
}
const args = inputOpts ? (0, _options.validate)("arguments", inputOpts) : {};
const {
envName = (0, _environment.getEnv)(),
cwd = ".",
root: rootDir = ".",
rootMode = "root",
caller
} = args;
const absoluteCwd = _path().default.resolve(cwd);
const absoluteRootDir = yield* resolveRootMode(_path().default.resolve(absoluteCwd, rootDir), rootMode);
const context = {
filename: typeof args.filename === "string" ? _path().default.resolve(cwd, args.filename) : undefined,
cwd: absoluteCwd,
root: absoluteRootDir,
envName,
caller
};
const configChain = yield* (0, _configChain.buildRootChain)(args, context);
if (!configChain) return null;
const options = {};
configChain.options.forEach(opts => {
(0, _util.mergeOptions)(options, opts);
});
options.babelrc = false;
options.configFile = false;
options.passPerPreset = false;
options.envName = context.envName;
options.cwd = context.cwd;
options.root = context.root;
options.filename = typeof context.filename === "string" ? context.filename : undefined;
options.plugins = configChain.plugins.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor));
options.presets = configChain.presets.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor));
return {
options,
context,
ignore: configChain.ignore,
babelrc: configChain.babelrc,
config: configChain.config
};
}
const loadPartialConfig = (0, _gensync().default)(function* (inputOpts) {
const result = yield* loadPrivatePartialConfig(inputOpts);
if (!result) return null;
const {
options,
babelrc,
ignore,
config
} = result;
(options.plugins || []).forEach(item => {
if (item.value instanceof _plugin.default) {
throw new Error("Passing cached plugin instances is not supported in " + "babel.loadPartialConfig()");
}
});
return new PartialConfig(options, babelrc ? babelrc.filepath : undefined, ignore ? ignore.filepath : undefined, config ? config.filepath : undefined);
});
exports.loadPartialConfig = loadPartialConfig;
class PartialConfig {
constructor(options, babelrc, ignore, config) {
this.options = options;
this.babelignore = ignore;
this.babelrc = babelrc;
this.config = config;
Object.freeze(this);
}
hasFilesystemConfig() {
return this.babelrc !== undefined || this.config !== undefined;
}
}
Object.freeze(PartialConfig.prototype);

View File

@ -1,52 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = pathToPattern;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _escapeRegExp() {
const data = _interopRequireDefault(require("lodash/escapeRegExp"));
_escapeRegExp = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const sep = `\\${_path().default.sep}`;
const endSep = `(?:${sep}|$)`;
const substitution = `[^${sep}]+`;
const starPat = `(?:${substitution}${sep})`;
const starPatLast = `(?:${substitution}${endSep})`;
const starStarPat = `${starPat}*?`;
const starStarPatLast = `${starPat}*?${starPatLast}?`;
function pathToPattern(pattern, dirname) {
const parts = _path().default.resolve(dirname, pattern).split(_path().default.sep);
return new RegExp(["^", ...parts.map((part, i) => {
const last = i === parts.length - 1;
if (part === "**") return last ? starStarPatLast : starStarPat;
if (part === "*") return last ? starPatLast : starPat;
if (part.indexOf("*.") === 0) {
return substitution + (0, _escapeRegExp().default)(part.slice(1)) + (last ? endSep : sep);
}
return (0, _escapeRegExp().default)(part) + (last ? endSep : sep);
})].join(""));
}

View File

@ -1,22 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
class Plugin {
constructor(plugin, options, key) {
this.key = plugin.name || key;
this.manipulateOptions = plugin.manipulateOptions;
this.post = plugin.post;
this.pre = plugin.pre;
this.visitor = plugin.visitor || {};
this.parserOverride = plugin.parserOverride;
this.generatorOverride = plugin.generatorOverride;
this.options = options;
}
}
exports.default = Plugin;

View File

@ -1,35 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.mergeOptions = mergeOptions;
exports.isIterableIterator = isIterableIterator;
function mergeOptions(target, source) {
for (const k of Object.keys(source)) {
if (k === "parserOpts" && source.parserOpts) {
const parserOpts = source.parserOpts;
const targetObj = target.parserOpts = target.parserOpts || {};
mergeDefaultFields(targetObj, parserOpts);
} else if (k === "generatorOpts" && source.generatorOpts) {
const generatorOpts = source.generatorOpts;
const targetObj = target.generatorOpts = target.generatorOpts || {};
mergeDefaultFields(targetObj, generatorOpts);
} else {
const val = source[k];
if (val !== undefined) target[k] = val;
}
}
}
function mergeDefaultFields(target, source) {
for (const k of Object.keys(source)) {
const val = source[k];
if (val !== undefined) target[k] = val;
}
}
function isIterableIterator(value) {
return !!value && typeof value.next === "function" && typeof value[Symbol.iterator] === "function";
}

View File

@ -1,268 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.msg = msg;
exports.access = access;
exports.assertRootMode = assertRootMode;
exports.assertSourceMaps = assertSourceMaps;
exports.assertCompact = assertCompact;
exports.assertSourceType = assertSourceType;
exports.assertCallerMetadata = assertCallerMetadata;
exports.assertInputSourceMap = assertInputSourceMap;
exports.assertString = assertString;
exports.assertFunction = assertFunction;
exports.assertBoolean = assertBoolean;
exports.assertObject = assertObject;
exports.assertArray = assertArray;
exports.assertIgnoreList = assertIgnoreList;
exports.assertConfigApplicableTest = assertConfigApplicableTest;
exports.assertConfigFileSearch = assertConfigFileSearch;
exports.assertBabelrcSearch = assertBabelrcSearch;
exports.assertPluginList = assertPluginList;
function msg(loc) {
switch (loc.type) {
case "root":
return ``;
case "env":
return `${msg(loc.parent)}.env["${loc.name}"]`;
case "overrides":
return `${msg(loc.parent)}.overrides[${loc.index}]`;
case "option":
return `${msg(loc.parent)}.${loc.name}`;
case "access":
return `${msg(loc.parent)}[${JSON.stringify(loc.name)}]`;
default:
throw new Error(`Assertion failure: Unknown type ${loc.type}`);
}
}
function access(loc, name) {
return {
type: "access",
name,
parent: loc
};
}
function assertRootMode(loc, value) {
if (value !== undefined && value !== "root" && value !== "upward" && value !== "upward-optional") {
throw new Error(`${msg(loc)} must be a "root", "upward", "upward-optional" or undefined`);
}
return value;
}
function assertSourceMaps(loc, value) {
if (value !== undefined && typeof value !== "boolean" && value !== "inline" && value !== "both") {
throw new Error(`${msg(loc)} must be a boolean, "inline", "both", or undefined`);
}
return value;
}
function assertCompact(loc, value) {
if (value !== undefined && typeof value !== "boolean" && value !== "auto") {
throw new Error(`${msg(loc)} must be a boolean, "auto", or undefined`);
}
return value;
}
function assertSourceType(loc, value) {
if (value !== undefined && value !== "module" && value !== "script" && value !== "unambiguous") {
throw new Error(`${msg(loc)} must be "module", "script", "unambiguous", or undefined`);
}
return value;
}
function assertCallerMetadata(loc, value) {
const obj = assertObject(loc, value);
if (obj) {
if (typeof obj["name"] !== "string") {
throw new Error(`${msg(loc)} set but does not contain "name" property string`);
}
for (const prop of Object.keys(obj)) {
const propLoc = access(loc, prop);
const value = obj[prop];
if (value != null && typeof value !== "boolean" && typeof value !== "string" && typeof value !== "number") {
throw new Error(`${msg(propLoc)} must be null, undefined, a boolean, a string, or a number.`);
}
}
}
return value;
}
function assertInputSourceMap(loc, value) {
if (value !== undefined && typeof value !== "boolean" && (typeof value !== "object" || !value)) {
throw new Error(`${msg(loc)} must be a boolean, object, or undefined`);
}
return value;
}
function assertString(loc, value) {
if (value !== undefined && typeof value !== "string") {
throw new Error(`${msg(loc)} must be a string, or undefined`);
}
return value;
}
function assertFunction(loc, value) {
if (value !== undefined && typeof value !== "function") {
throw new Error(`${msg(loc)} must be a function, or undefined`);
}
return value;
}
function assertBoolean(loc, value) {
if (value !== undefined && typeof value !== "boolean") {
throw new Error(`${msg(loc)} must be a boolean, or undefined`);
}
return value;
}
function assertObject(loc, value) {
if (value !== undefined && (typeof value !== "object" || Array.isArray(value) || !value)) {
throw new Error(`${msg(loc)} must be an object, or undefined`);
}
return value;
}
function assertArray(loc, value) {
if (value != null && !Array.isArray(value)) {
throw new Error(`${msg(loc)} must be an array, or undefined`);
}
return value;
}
function assertIgnoreList(loc, value) {
const arr = assertArray(loc, value);
if (arr) {
arr.forEach((item, i) => assertIgnoreItem(access(loc, i), item));
}
return arr;
}
function assertIgnoreItem(loc, value) {
if (typeof value !== "string" && typeof value !== "function" && !(value instanceof RegExp)) {
throw new Error(`${msg(loc)} must be an array of string/Function/RegExp values, or undefined`);
}
return value;
}
function assertConfigApplicableTest(loc, value) {
if (value === undefined) return value;
if (Array.isArray(value)) {
value.forEach((item, i) => {
if (!checkValidTest(item)) {
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
}
});
} else if (!checkValidTest(value)) {
throw new Error(`${msg(loc)} must be a string/Function/RegExp, or an array of those`);
}
return value;
}
function checkValidTest(value) {
return typeof value === "string" || typeof value === "function" || value instanceof RegExp;
}
function assertConfigFileSearch(loc, value) {
if (value !== undefined && typeof value !== "boolean" && typeof value !== "string") {
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string, ` + `got ${JSON.stringify(value)}`);
}
return value;
}
function assertBabelrcSearch(loc, value) {
if (value === undefined || typeof value === "boolean") return value;
if (Array.isArray(value)) {
value.forEach((item, i) => {
if (!checkValidTest(item)) {
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
}
});
} else if (!checkValidTest(value)) {
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string/Function/RegExp ` + `or an array of those, got ${JSON.stringify(value)}`);
}
return value;
}
function assertPluginList(loc, value) {
const arr = assertArray(loc, value);
if (arr) {
arr.forEach((item, i) => assertPluginItem(access(loc, i), item));
}
return arr;
}
function assertPluginItem(loc, value) {
if (Array.isArray(value)) {
if (value.length === 0) {
throw new Error(`${msg(loc)} must include an object`);
}
if (value.length > 3) {
throw new Error(`${msg(loc)} may only be a two-tuple or three-tuple`);
}
assertPluginTarget(access(loc, 0), value[0]);
if (value.length > 1) {
const opts = value[1];
if (opts !== undefined && opts !== false && (typeof opts !== "object" || Array.isArray(opts) || opts === null)) {
throw new Error(`${msg(access(loc, 1))} must be an object, false, or undefined`);
}
}
if (value.length === 3) {
const name = value[2];
if (name !== undefined && typeof name !== "string") {
throw new Error(`${msg(access(loc, 2))} must be a string, or undefined`);
}
}
} else {
assertPluginTarget(loc, value);
}
return value;
}
function assertPluginTarget(loc, value) {
if ((typeof value !== "object" || !value) && typeof value !== "string" && typeof value !== "function") {
throw new Error(`${msg(loc)} must be a string, object, function`);
}
return value;
}

View File

@ -1,188 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.validate = validate;
var _plugin = _interopRequireDefault(require("../plugin"));
var _removed = _interopRequireDefault(require("./removed"));
var _optionAssertions = require("./option-assertions");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const ROOT_VALIDATORS = {
cwd: _optionAssertions.assertString,
root: _optionAssertions.assertString,
rootMode: _optionAssertions.assertRootMode,
configFile: _optionAssertions.assertConfigFileSearch,
caller: _optionAssertions.assertCallerMetadata,
filename: _optionAssertions.assertString,
filenameRelative: _optionAssertions.assertString,
code: _optionAssertions.assertBoolean,
ast: _optionAssertions.assertBoolean,
envName: _optionAssertions.assertString
};
const BABELRC_VALIDATORS = {
babelrc: _optionAssertions.assertBoolean,
babelrcRoots: _optionAssertions.assertBabelrcSearch
};
const NONPRESET_VALIDATORS = {
extends: _optionAssertions.assertString,
ignore: _optionAssertions.assertIgnoreList,
only: _optionAssertions.assertIgnoreList
};
const COMMON_VALIDATORS = {
inputSourceMap: _optionAssertions.assertInputSourceMap,
presets: _optionAssertions.assertPluginList,
plugins: _optionAssertions.assertPluginList,
passPerPreset: _optionAssertions.assertBoolean,
env: assertEnvSet,
overrides: assertOverridesList,
test: _optionAssertions.assertConfigApplicableTest,
include: _optionAssertions.assertConfigApplicableTest,
exclude: _optionAssertions.assertConfigApplicableTest,
retainLines: _optionAssertions.assertBoolean,
comments: _optionAssertions.assertBoolean,
shouldPrintComment: _optionAssertions.assertFunction,
compact: _optionAssertions.assertCompact,
minified: _optionAssertions.assertBoolean,
auxiliaryCommentBefore: _optionAssertions.assertString,
auxiliaryCommentAfter: _optionAssertions.assertString,
sourceType: _optionAssertions.assertSourceType,
wrapPluginVisitorMethod: _optionAssertions.assertFunction,
highlightCode: _optionAssertions.assertBoolean,
sourceMaps: _optionAssertions.assertSourceMaps,
sourceMap: _optionAssertions.assertSourceMaps,
sourceFileName: _optionAssertions.assertString,
sourceRoot: _optionAssertions.assertString,
getModuleId: _optionAssertions.assertFunction,
moduleRoot: _optionAssertions.assertString,
moduleIds: _optionAssertions.assertBoolean,
moduleId: _optionAssertions.assertString,
parserOpts: _optionAssertions.assertObject,
generatorOpts: _optionAssertions.assertObject
};
function getSource(loc) {
return loc.type === "root" ? loc.source : getSource(loc.parent);
}
function validate(type, opts) {
return validateNested({
type: "root",
source: type
}, opts);
}
function validateNested(loc, opts) {
const type = getSource(loc);
assertNoDuplicateSourcemap(opts);
Object.keys(opts).forEach(key => {
const optLoc = {
type: "option",
name: key,
parent: loc
};
if (type === "preset" && NONPRESET_VALIDATORS[key]) {
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in preset options`);
}
if (type !== "arguments" && ROOT_VALIDATORS[key]) {
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options`);
}
if (type !== "arguments" && type !== "configfile" && BABELRC_VALIDATORS[key]) {
if (type === "babelrcfile" || type === "extendsfile") {
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in .babelrc or "extends"ed files, only in root programmatic options, ` + `or babel.config.js/config file options`);
}
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options, or babel.config.js/config file options`);
}
const validator = COMMON_VALIDATORS[key] || NONPRESET_VALIDATORS[key] || BABELRC_VALIDATORS[key] || ROOT_VALIDATORS[key] || throwUnknownError;
validator(optLoc, opts[key]);
});
return opts;
}
function throwUnknownError(loc) {
const key = loc.name;
if (_removed.default[key]) {
const {
message,
version = 5
} = _removed.default[key];
throw new ReferenceError(`Using removed Babel ${version} option: ${(0, _optionAssertions.msg)(loc)} - ${message}`);
} else {
const unknownOptErr = `Unknown option: ${(0, _optionAssertions.msg)(loc)}. Check out https://babeljs.io/docs/en/babel-core/#options for more information about options.`;
throw new ReferenceError(unknownOptErr);
}
}
function has(obj, key) {
return Object.prototype.hasOwnProperty.call(obj, key);
}
function assertNoDuplicateSourcemap(opts) {
if (has(opts, "sourceMap") && has(opts, "sourceMaps")) {
throw new Error(".sourceMap is an alias for .sourceMaps, cannot use both");
}
}
function assertEnvSet(loc, value) {
if (loc.parent.type === "env") {
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside of another .env block`);
}
const parent = loc.parent;
const obj = (0, _optionAssertions.assertObject)(loc, value);
if (obj) {
for (const envName of Object.keys(obj)) {
const env = (0, _optionAssertions.assertObject)((0, _optionAssertions.access)(loc, envName), obj[envName]);
if (!env) continue;
const envLoc = {
type: "env",
name: envName,
parent
};
validateNested(envLoc, env);
}
}
return obj;
}
function assertOverridesList(loc, value) {
if (loc.parent.type === "env") {
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .env block`);
}
if (loc.parent.type === "overrides") {
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .overrides block`);
}
const parent = loc.parent;
const arr = (0, _optionAssertions.assertArray)(loc, value);
if (arr) {
for (const [index, item] of arr.entries()) {
const objLoc = (0, _optionAssertions.access)(loc, index);
const env = (0, _optionAssertions.assertObject)(objLoc, item);
if (!env) throw new Error(`${(0, _optionAssertions.msg)(objLoc)} must be an object`);
const overridesLoc = {
type: "overrides",
index,
parent
};
validateNested(overridesLoc, env);
}
}
return arr;
}

View File

@ -1,64 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.validatePluginObject = validatePluginObject;
var _optionAssertions = require("./option-assertions");
const VALIDATORS = {
name: _optionAssertions.assertString,
manipulateOptions: _optionAssertions.assertFunction,
pre: _optionAssertions.assertFunction,
post: _optionAssertions.assertFunction,
inherits: _optionAssertions.assertFunction,
visitor: assertVisitorMap,
parserOverride: _optionAssertions.assertFunction,
generatorOverride: _optionAssertions.assertFunction
};
function assertVisitorMap(key, value) {
const obj = (0, _optionAssertions.assertObject)(key, value);
if (obj) {
Object.keys(obj).forEach(prop => assertVisitorHandler(prop, obj[prop]));
if (obj.enter || obj.exit) {
throw new Error(`.${key} cannot contain catch-all "enter" or "exit" handlers. Please target individual nodes.`);
}
}
return obj;
}
function assertVisitorHandler(key, value) {
if (value && typeof value === "object") {
Object.keys(value).forEach(handler => {
if (handler !== "enter" && handler !== "exit") {
throw new Error(`.visitor["${key}"] may only have .enter and/or .exit handlers.`);
}
});
} else if (typeof value !== "function") {
throw new Error(`.visitor["${key}"] must be a function`);
}
return value;
}
function validatePluginObject(obj) {
const rootPath = {
type: "root",
source: "plugin"
};
Object.keys(obj).forEach(key => {
const validator = VALIDATORS[key];
const optLoc = {
type: "option",
name: key,
parent: rootPath
};
if (validator) validator(optLoc, obj[key]);else throw new Error(`.${key} is not a valid Plugin property`);
});
return obj;
}

View File

@ -1,66 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _default = {
auxiliaryComment: {
message: "Use `auxiliaryCommentBefore` or `auxiliaryCommentAfter`"
},
blacklist: {
message: "Put the specific transforms you want in the `plugins` option"
},
breakConfig: {
message: "This is not a necessary option in Babel 6"
},
experimental: {
message: "Put the specific transforms you want in the `plugins` option"
},
externalHelpers: {
message: "Use the `external-helpers` plugin instead. " + "Check out http://babeljs.io/docs/plugins/external-helpers/"
},
extra: {
message: ""
},
jsxPragma: {
message: "use the `pragma` option in the `react-jsx` plugin. " + "Check out http://babeljs.io/docs/plugins/transform-react-jsx/"
},
loose: {
message: "Specify the `loose` option for the relevant plugin you are using " + "or use a preset that sets the option."
},
metadataUsedHelpers: {
message: "Not required anymore as this is enabled by default"
},
modules: {
message: "Use the corresponding module transform plugin in the `plugins` option. " + "Check out http://babeljs.io/docs/plugins/#modules"
},
nonStandard: {
message: "Use the `react-jsx` and `flow-strip-types` plugins to support JSX and Flow. " + "Also check out the react preset http://babeljs.io/docs/plugins/preset-react/"
},
optional: {
message: "Put the specific transforms you want in the `plugins` option"
},
sourceMapName: {
message: "The `sourceMapName` option has been removed because it makes more sense for the " + "tooling that calls Babel to assign `map.file` themselves."
},
stage: {
message: "Check out the corresponding stage-x presets http://babeljs.io/docs/plugins/#presets"
},
whitelist: {
message: "Put the specific transforms you want in the `plugins` option"
},
resolveModuleSource: {
version: 6,
message: "Use `babel-plugin-module-resolver@3`'s 'resolvePath' options"
},
metadata: {
version: 6,
message: "Generated plugin metadata is always included in the output result"
},
sourceMapTarget: {
version: 6,
message: "The `sourceMapTarget` option has been removed because it makes more sense for the tooling " + "that calls Babel to assign `map.file` themselves."
}
};
exports.default = _default;

View File

@ -1,89 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.maybeAsync = maybeAsync;
exports.forwardAsync = forwardAsync;
exports.isThenable = isThenable;
exports.waitFor = exports.onFirstPause = exports.isAsync = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const id = x => x;
const runGenerator = (0, _gensync().default)(function* (item) {
return yield* item;
});
const isAsync = (0, _gensync().default)({
sync: () => false,
errback: cb => cb(null, true)
});
exports.isAsync = isAsync;
function maybeAsync(fn, message) {
return (0, _gensync().default)({
sync(...args) {
const result = fn.apply(this, args);
if (isThenable(result)) throw new Error(message);
return result;
},
async(...args) {
return Promise.resolve(fn.apply(this, args));
}
});
}
const withKind = (0, _gensync().default)({
sync: cb => cb("sync"),
async: cb => cb("async")
});
function forwardAsync(action, cb) {
const g = (0, _gensync().default)(action);
return withKind(kind => {
const adapted = g[kind];
return cb(adapted);
});
}
const onFirstPause = (0, _gensync().default)({
name: "onFirstPause",
arity: 2,
sync: function (item) {
return runGenerator.sync(item);
},
errback: function (item, firstPause, cb) {
let completed = false;
runGenerator.errback(item, (err, value) => {
completed = true;
cb(err, value);
});
if (!completed) {
firstPause();
}
}
});
exports.onFirstPause = onFirstPause;
const waitFor = (0, _gensync().default)({
sync: id,
async: id
});
exports.waitFor = waitFor;
function isThenable(val) {
return !!val && (typeof val === "object" || typeof val === "function") && !!val.then && typeof val.then === "function";
}

View File

@ -1,48 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.exists = exports.readFile = void 0;
function _fs() {
const data = _interopRequireDefault(require("fs"));
_fs = function () {
return data;
};
return data;
}
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const readFile = (0, _gensync().default)({
sync: _fs().default.readFileSync,
errback: _fs().default.readFile
});
exports.readFile = readFile;
const exists = (0, _gensync().default)({
sync(path) {
try {
_fs().default.accessSync(path);
return true;
} catch (_unused) {
return false;
}
},
errback: (path, cb) => _fs().default.access(path, undefined, err => cb(null, !err))
});
exports.exists = exists;

View File

@ -1,35 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _resolve() {
const data = _interopRequireDefault(require("resolve"));
_resolve = function () {
return data;
};
return data;
}
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var _default = (0, _gensync().default)({
sync: _resolve().default.sync,
errback: _resolve().default
});
exports.default = _default;

266
node_modules/@babel/core/lib/index.js generated vendored
View File

@ -1,266 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.Plugin = Plugin;
Object.defineProperty(exports, "File", {
enumerable: true,
get: function () {
return _file.default;
}
});
Object.defineProperty(exports, "buildExternalHelpers", {
enumerable: true,
get: function () {
return _buildExternalHelpers.default;
}
});
Object.defineProperty(exports, "resolvePlugin", {
enumerable: true,
get: function () {
return _files.resolvePlugin;
}
});
Object.defineProperty(exports, "resolvePreset", {
enumerable: true,
get: function () {
return _files.resolvePreset;
}
});
Object.defineProperty(exports, "version", {
enumerable: true,
get: function () {
return _package.version;
}
});
Object.defineProperty(exports, "getEnv", {
enumerable: true,
get: function () {
return _environment.getEnv;
}
});
Object.defineProperty(exports, "tokTypes", {
enumerable: true,
get: function () {
return _parser().tokTypes;
}
});
Object.defineProperty(exports, "traverse", {
enumerable: true,
get: function () {
return _traverse().default;
}
});
Object.defineProperty(exports, "template", {
enumerable: true,
get: function () {
return _template().default;
}
});
Object.defineProperty(exports, "createConfigItem", {
enumerable: true,
get: function () {
return _item.createConfigItem;
}
});
Object.defineProperty(exports, "loadPartialConfig", {
enumerable: true,
get: function () {
return _config.loadPartialConfig;
}
});
Object.defineProperty(exports, "loadPartialConfigSync", {
enumerable: true,
get: function () {
return _config.loadPartialConfigSync;
}
});
Object.defineProperty(exports, "loadPartialConfigAsync", {
enumerable: true,
get: function () {
return _config.loadPartialConfigAsync;
}
});
Object.defineProperty(exports, "loadOptions", {
enumerable: true,
get: function () {
return _config.loadOptions;
}
});
Object.defineProperty(exports, "loadOptionsSync", {
enumerable: true,
get: function () {
return _config.loadOptionsSync;
}
});
Object.defineProperty(exports, "loadOptionsAsync", {
enumerable: true,
get: function () {
return _config.loadOptionsAsync;
}
});
Object.defineProperty(exports, "transform", {
enumerable: true,
get: function () {
return _transform.transform;
}
});
Object.defineProperty(exports, "transformSync", {
enumerable: true,
get: function () {
return _transform.transformSync;
}
});
Object.defineProperty(exports, "transformAsync", {
enumerable: true,
get: function () {
return _transform.transformAsync;
}
});
Object.defineProperty(exports, "transformFile", {
enumerable: true,
get: function () {
return _transformFile.transformFile;
}
});
Object.defineProperty(exports, "transformFileSync", {
enumerable: true,
get: function () {
return _transformFile.transformFileSync;
}
});
Object.defineProperty(exports, "transformFileAsync", {
enumerable: true,
get: function () {
return _transformFile.transformFileAsync;
}
});
Object.defineProperty(exports, "transformFromAst", {
enumerable: true,
get: function () {
return _transformAst.transformFromAst;
}
});
Object.defineProperty(exports, "transformFromAstSync", {
enumerable: true,
get: function () {
return _transformAst.transformFromAstSync;
}
});
Object.defineProperty(exports, "transformFromAstAsync", {
enumerable: true,
get: function () {
return _transformAst.transformFromAstAsync;
}
});
Object.defineProperty(exports, "parse", {
enumerable: true,
get: function () {
return _parse.parse;
}
});
Object.defineProperty(exports, "parseSync", {
enumerable: true,
get: function () {
return _parse.parseSync;
}
});
Object.defineProperty(exports, "parseAsync", {
enumerable: true,
get: function () {
return _parse.parseAsync;
}
});
exports.types = exports.OptionManager = exports.DEFAULT_EXTENSIONS = void 0;
var _file = _interopRequireDefault(require("./transformation/file/file"));
var _buildExternalHelpers = _interopRequireDefault(require("./tools/build-external-helpers"));
var _files = require("./config/files");
var _package = require("../package.json");
var _environment = require("./config/helpers/environment");
function _types() {
const data = _interopRequireWildcard(require("@babel/types"));
_types = function () {
return data;
};
return data;
}
Object.defineProperty(exports, "types", {
enumerable: true,
get: function () {
return _types();
}
});
function _parser() {
const data = require("@babel/parser");
_parser = function () {
return data;
};
return data;
}
function _traverse() {
const data = _interopRequireDefault(require("@babel/traverse"));
_traverse = function () {
return data;
};
return data;
}
function _template() {
const data = _interopRequireDefault(require("@babel/template"));
_template = function () {
return data;
};
return data;
}
var _item = require("./config/item");
var _config = require("./config");
var _transform = require("./transform");
var _transformFile = require("./transform-file");
var _transformAst = require("./transform-ast");
var _parse = require("./parse");
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const DEFAULT_EXTENSIONS = Object.freeze([".js", ".jsx", ".es6", ".es", ".mjs"]);
exports.DEFAULT_EXTENSIONS = DEFAULT_EXTENSIONS;
class OptionManager {
init(opts) {
return (0, _config.loadOptions)(opts);
}
}
exports.OptionManager = OptionManager;
function Plugin(alias) {
throw new Error(`The (${alias}) Babel 5 plugin is being run with an unsupported Babel version.`);
}

View File

@ -1,50 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.parseAsync = exports.parseSync = exports.parse = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _config = _interopRequireDefault(require("./config"));
var _parser = _interopRequireDefault(require("./parser"));
var _normalizeOpts = _interopRequireDefault(require("./transformation/normalize-opts"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const parseRunner = (0, _gensync().default)(function* parse(code, opts) {
const config = yield* (0, _config.default)(opts);
if (config === null) {
return null;
}
return yield* (0, _parser.default)(config.passes, (0, _normalizeOpts.default)(config), code);
});
const parse = function parse(code, opts, callback) {
if (typeof opts === "function") {
callback = opts;
opts = undefined;
}
if (callback === undefined) return parseRunner.sync(code, opts);
parseRunner.errback(code, opts, callback);
};
exports.parse = parse;
const parseSync = parseRunner.sync;
exports.parseSync = parseSync;
const parseAsync = parseRunner.async;
exports.parseAsync = parseAsync;

View File

@ -1,97 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = parser;
function _parser() {
const data = require("@babel/parser");
_parser = function () {
return data;
};
return data;
}
function _codeFrame() {
const data = require("@babel/code-frame");
_codeFrame = function () {
return data;
};
return data;
}
var _missingPluginHelper = _interopRequireDefault(require("./util/missing-plugin-helper"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function* parser(pluginPasses, {
parserOpts,
highlightCode = true,
filename = "unknown"
}, code) {
try {
const results = [];
for (const plugins of pluginPasses) {
for (const plugin of plugins) {
const {
parserOverride
} = plugin;
if (parserOverride) {
const ast = parserOverride(code, parserOpts, _parser().parse);
if (ast !== undefined) results.push(ast);
}
}
}
if (results.length === 0) {
return (0, _parser().parse)(code, parserOpts);
} else if (results.length === 1) {
yield* [];
if (typeof results[0].then === "function") {
throw new Error(`You appear to be using an async parser plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
}
return results[0];
}
throw new Error("More than one plugin attempted to override parsing.");
} catch (err) {
if (err.code === "BABEL_PARSER_SOURCETYPE_MODULE_REQUIRED") {
err.message += "\nConsider renaming the file to '.mjs', or setting sourceType:module " + "or sourceType:unambiguous in your Babel config for this file.";
}
const {
loc,
missingPlugin
} = err;
if (loc) {
const codeFrame = (0, _codeFrame().codeFrameColumns)(code, {
start: {
line: loc.line,
column: loc.column + 1
}
}, {
highlightCode
});
if (missingPlugin) {
err.message = `${filename}: ` + (0, _missingPluginHelper.default)(missingPlugin[0], loc, codeFrame);
} else {
err.message = `${filename}: ${err.message}\n\n` + codeFrame;
}
err.code = "BABEL_PARSE_ERROR";
}
throw err;
}
}

View File

@ -1,239 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = generateMissingPluginMessage;
const pluginNameMap = {
classProperties: {
syntax: {
name: "@babel/plugin-syntax-class-properties",
url: "https://git.io/vb4yQ"
},
transform: {
name: "@babel/plugin-proposal-class-properties",
url: "https://git.io/vb4SL"
}
},
decorators: {
syntax: {
name: "@babel/plugin-syntax-decorators",
url: "https://git.io/vb4y9"
},
transform: {
name: "@babel/plugin-proposal-decorators",
url: "https://git.io/vb4ST"
}
},
doExpressions: {
syntax: {
name: "@babel/plugin-syntax-do-expressions",
url: "https://git.io/vb4yh"
},
transform: {
name: "@babel/plugin-proposal-do-expressions",
url: "https://git.io/vb4S3"
}
},
dynamicImport: {
syntax: {
name: "@babel/plugin-syntax-dynamic-import",
url: "https://git.io/vb4Sv"
}
},
exportDefaultFrom: {
syntax: {
name: "@babel/plugin-syntax-export-default-from",
url: "https://git.io/vb4SO"
},
transform: {
name: "@babel/plugin-proposal-export-default-from",
url: "https://git.io/vb4yH"
}
},
exportNamespaceFrom: {
syntax: {
name: "@babel/plugin-syntax-export-namespace-from",
url: "https://git.io/vb4Sf"
},
transform: {
name: "@babel/plugin-proposal-export-namespace-from",
url: "https://git.io/vb4SG"
}
},
flow: {
syntax: {
name: "@babel/plugin-syntax-flow",
url: "https://git.io/vb4yb"
},
transform: {
name: "@babel/plugin-transform-flow-strip-types",
url: "https://git.io/vb49g"
}
},
functionBind: {
syntax: {
name: "@babel/plugin-syntax-function-bind",
url: "https://git.io/vb4y7"
},
transform: {
name: "@babel/plugin-proposal-function-bind",
url: "https://git.io/vb4St"
}
},
functionSent: {
syntax: {
name: "@babel/plugin-syntax-function-sent",
url: "https://git.io/vb4yN"
},
transform: {
name: "@babel/plugin-proposal-function-sent",
url: "https://git.io/vb4SZ"
}
},
importMeta: {
syntax: {
name: "@babel/plugin-syntax-import-meta",
url: "https://git.io/vbKK6"
}
},
jsx: {
syntax: {
name: "@babel/plugin-syntax-jsx",
url: "https://git.io/vb4yA"
},
transform: {
name: "@babel/plugin-transform-react-jsx",
url: "https://git.io/vb4yd"
}
},
logicalAssignment: {
syntax: {
name: "@babel/plugin-syntax-logical-assignment-operators",
url: "https://git.io/vAlBp"
},
transform: {
name: "@babel/plugin-proposal-logical-assignment-operators",
url: "https://git.io/vAlRe"
}
},
numericSeparator: {
syntax: {
name: "@babel/plugin-syntax-numeric-separator",
url: "https://git.io/vb4Sq"
},
transform: {
name: "@babel/plugin-proposal-numeric-separator",
url: "https://git.io/vb4yS"
}
},
optionalChaining: {
syntax: {
name: "@babel/plugin-syntax-optional-chaining",
url: "https://git.io/vb4Sc"
},
transform: {
name: "@babel/plugin-proposal-optional-chaining",
url: "https://git.io/vb4Sk"
}
},
pipelineOperator: {
syntax: {
name: "@babel/plugin-syntax-pipeline-operator",
url: "https://git.io/vb4yj"
},
transform: {
name: "@babel/plugin-proposal-pipeline-operator",
url: "https://git.io/vb4SU"
}
},
throwExpressions: {
syntax: {
name: "@babel/plugin-syntax-throw-expressions",
url: "https://git.io/vb4SJ"
},
transform: {
name: "@babel/plugin-proposal-throw-expressions",
url: "https://git.io/vb4yF"
}
},
typescript: {
syntax: {
name: "@babel/plugin-syntax-typescript",
url: "https://git.io/vb4SC"
},
transform: {
name: "@babel/plugin-transform-typescript",
url: "https://git.io/vb4Sm"
}
},
asyncGenerators: {
syntax: {
name: "@babel/plugin-syntax-async-generators",
url: "https://git.io/vb4SY"
},
transform: {
name: "@babel/plugin-proposal-async-generator-functions",
url: "https://git.io/vb4yp"
}
},
nullishCoalescingOperator: {
syntax: {
name: "@babel/plugin-syntax-nullish-coalescing-operator",
url: "https://git.io/vb4yx"
},
transform: {
name: "@babel/plugin-proposal-nullish-coalescing-operator",
url: "https://git.io/vb4Se"
}
},
objectRestSpread: {
syntax: {
name: "@babel/plugin-syntax-object-rest-spread",
url: "https://git.io/vb4y5"
},
transform: {
name: "@babel/plugin-proposal-object-rest-spread",
url: "https://git.io/vb4Ss"
}
},
optionalCatchBinding: {
syntax: {
name: "@babel/plugin-syntax-optional-catch-binding",
url: "https://git.io/vb4Sn"
},
transform: {
name: "@babel/plugin-proposal-optional-catch-binding",
url: "https://git.io/vb4SI"
}
}
};
const getNameURLCombination = ({
name,
url
}) => `${name} (${url})`;
function generateMissingPluginMessage(missingPluginName, loc, codeFrame) {
let helpMessage = `Support for the experimental syntax '${missingPluginName}' isn't currently enabled ` + `(${loc.line}:${loc.column + 1}):\n\n` + codeFrame;
const pluginInfo = pluginNameMap[missingPluginName];
if (pluginInfo) {
const {
syntax: syntaxPlugin,
transform: transformPlugin
} = pluginInfo;
if (syntaxPlugin) {
if (transformPlugin) {
const transformPluginInfo = getNameURLCombination(transformPlugin);
helpMessage += `\n\nAdd ${transformPluginInfo} to the 'plugins' section of your Babel config ` + `to enable transformation.`;
} else {
const syntaxPluginInfo = getNameURLCombination(syntaxPlugin);
helpMessage += `\n\nAdd ${syntaxPluginInfo} to the 'plugins' section of your Babel config ` + `to enable parsing.`;
}
}
}
return helpMessage;
}

View File

@ -1,145 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = _default;
function helpers() {
const data = _interopRequireWildcard(require("@babel/helpers"));
helpers = function () {
return data;
};
return data;
}
function _generator() {
const data = _interopRequireDefault(require("@babel/generator"));
_generator = function () {
return data;
};
return data;
}
function _template() {
const data = _interopRequireDefault(require("@babel/template"));
_template = function () {
return data;
};
return data;
}
function t() {
const data = _interopRequireWildcard(require("@babel/types"));
t = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
const buildUmdWrapper = replacements => _template().default`
(function (root, factory) {
if (typeof define === "function" && define.amd) {
define(AMD_ARGUMENTS, factory);
} else if (typeof exports === "object") {
factory(COMMON_ARGUMENTS);
} else {
factory(BROWSER_ARGUMENTS);
}
})(UMD_ROOT, function (FACTORY_PARAMETERS) {
FACTORY_BODY
});
`(replacements);
function buildGlobal(whitelist) {
const namespace = t().identifier("babelHelpers");
const body = [];
const container = t().functionExpression(null, [t().identifier("global")], t().blockStatement(body));
const tree = t().program([t().expressionStatement(t().callExpression(container, [t().conditionalExpression(t().binaryExpression("===", t().unaryExpression("typeof", t().identifier("global")), t().stringLiteral("undefined")), t().identifier("self"), t().identifier("global"))]))]);
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().assignmentExpression("=", t().memberExpression(t().identifier("global"), namespace), t().objectExpression([])))]));
buildHelpers(body, namespace, whitelist);
return tree;
}
function buildModule(whitelist) {
const body = [];
const refs = buildHelpers(body, null, whitelist);
body.unshift(t().exportNamedDeclaration(null, Object.keys(refs).map(name => {
return t().exportSpecifier(t().cloneNode(refs[name]), t().identifier(name));
})));
return t().program(body, [], "module");
}
function buildUmd(whitelist) {
const namespace = t().identifier("babelHelpers");
const body = [];
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().identifier("global"))]));
buildHelpers(body, namespace, whitelist);
return t().program([buildUmdWrapper({
FACTORY_PARAMETERS: t().identifier("global"),
BROWSER_ARGUMENTS: t().assignmentExpression("=", t().memberExpression(t().identifier("root"), namespace), t().objectExpression([])),
COMMON_ARGUMENTS: t().identifier("exports"),
AMD_ARGUMENTS: t().arrayExpression([t().stringLiteral("exports")]),
FACTORY_BODY: body,
UMD_ROOT: t().identifier("this")
})]);
}
function buildVar(whitelist) {
const namespace = t().identifier("babelHelpers");
const body = [];
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().objectExpression([]))]));
const tree = t().program(body);
buildHelpers(body, namespace, whitelist);
body.push(t().expressionStatement(namespace));
return tree;
}
function buildHelpers(body, namespace, whitelist) {
const getHelperReference = name => {
return namespace ? t().memberExpression(namespace, t().identifier(name)) : t().identifier(`_${name}`);
};
const refs = {};
helpers().list.forEach(function (name) {
if (whitelist && whitelist.indexOf(name) < 0) return;
const ref = refs[name] = getHelperReference(name);
const {
nodes
} = helpers().get(name, getHelperReference, ref);
body.push(...nodes);
});
return refs;
}
function _default(whitelist, outputType = "global") {
let tree;
const build = {
global: buildGlobal,
module: buildModule,
umd: buildUmd,
var: buildVar
}[outputType];
if (build) {
tree = build(whitelist);
} else {
throw new Error(`Unsupported output type ${outputType}`);
}
return (0, _generator().default)(tree).code;
}

View File

@ -1,48 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transformFromAstAsync = exports.transformFromAstSync = exports.transformFromAst = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _config = _interopRequireDefault(require("./config"));
var _transformation = require("./transformation");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const transformFromAstRunner = (0, _gensync().default)(function* (ast, code, opts) {
const config = yield* (0, _config.default)(opts);
if (config === null) return null;
if (!ast) throw new Error("No AST given");
return yield* (0, _transformation.run)(config, code, ast);
});
const transformFromAst = function transformFromAst(ast, code, opts, callback) {
if (typeof opts === "function") {
callback = opts;
opts = undefined;
}
if (callback === undefined) {
return transformFromAstRunner.sync(ast, code, opts);
}
transformFromAstRunner.errback(ast, code, opts, callback);
};
exports.transformFromAst = transformFromAst;
const transformFromAstSync = transformFromAstRunner.sync;
exports.transformFromAstSync = transformFromAstSync;
const transformFromAstAsync = transformFromAstRunner.async;
exports.transformFromAstAsync = transformFromAstAsync;

View File

@ -1,26 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transformFileSync = transformFileSync;
exports.transformFileAsync = transformFileAsync;
exports.transformFile = void 0;
const transformFile = function transformFile(filename, opts, callback) {
if (typeof opts === "function") {
callback = opts;
}
callback(new Error("Transforming files is not supported in browsers"), null);
};
exports.transformFile = transformFile;
function transformFileSync() {
throw new Error("Transforming files is not supported in browsers");
}
function transformFileAsync() {
return Promise.reject(new Error("Transforming files is not supported in browsers"));
}

View File

@ -1,54 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transformFileAsync = exports.transformFileSync = exports.transformFile = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _config = _interopRequireDefault(require("./config"));
var _transformation = require("./transformation");
var fs = _interopRequireWildcard(require("./gensync-utils/fs"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
({});
const transformFileRunner = (0, _gensync().default)(function* (filename, opts) {
let options;
if (opts == null) {
options = {
filename
};
} else if (opts && typeof opts === "object") {
options = Object.assign({}, opts, {
filename
});
}
const config = yield* (0, _config.default)(options);
if (config === null) return null;
const code = yield* fs.readFile(filename, "utf8");
return yield* (0, _transformation.run)(config, code);
});
const transformFile = transformFileRunner.errback;
exports.transformFile = transformFile;
const transformFileSync = transformFileRunner.sync;
exports.transformFileSync = transformFileSync;
const transformFileAsync = transformFileRunner.async;
exports.transformFileAsync = transformFileAsync;

View File

@ -1,44 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transformAsync = exports.transformSync = exports.transform = void 0;
function _gensync() {
const data = _interopRequireDefault(require("gensync"));
_gensync = function () {
return data;
};
return data;
}
var _config = _interopRequireDefault(require("./config"));
var _transformation = require("./transformation");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const transformRunner = (0, _gensync().default)(function* transform(code, opts) {
const config = yield* (0, _config.default)(opts);
if (config === null) return null;
return yield* (0, _transformation.run)(config, code);
});
const transform = function transform(code, opts, callback) {
if (typeof opts === "function") {
callback = opts;
opts = undefined;
}
if (callback === undefined) return transformRunner.sync(code, opts);
transformRunner.errback(code, opts, callback);
};
exports.transform = transform;
const transformSync = transformRunner.sync;
exports.transformSync = transformSync;
const transformAsync = transformRunner.async;
exports.transformAsync = transformAsync;

View File

@ -1,68 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = loadBlockHoistPlugin;
function _sortBy() {
const data = _interopRequireDefault(require("lodash/sortBy"));
_sortBy = function () {
return data;
};
return data;
}
var _config = _interopRequireDefault(require("../config"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
let LOADED_PLUGIN;
function loadBlockHoistPlugin() {
if (!LOADED_PLUGIN) {
const config = _config.default.sync({
babelrc: false,
configFile: false,
plugins: [blockHoistPlugin]
});
LOADED_PLUGIN = config ? config.passes[0][0] : undefined;
if (!LOADED_PLUGIN) throw new Error("Assertion failure");
}
return LOADED_PLUGIN;
}
const blockHoistPlugin = {
name: "internal.blockHoist",
visitor: {
Block: {
exit({
node
}) {
let hasChange = false;
for (let i = 0; i < node.body.length; i++) {
const bodyNode = node.body[i];
if (bodyNode && bodyNode._blockHoist != null) {
hasChange = true;
break;
}
}
if (!hasChange) return;
node.body = (0, _sortBy().default)(node.body, function (bodyNode) {
let priority = bodyNode && bodyNode._blockHoist;
if (priority == null) priority = 1;
if (priority === true) priority = 2;
return -1 * priority;
});
}
}
}
};

View File

@ -1,272 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function helpers() {
const data = _interopRequireWildcard(require("@babel/helpers"));
helpers = function () {
return data;
};
return data;
}
function _traverse() {
const data = _interopRequireWildcard(require("@babel/traverse"));
_traverse = function () {
return data;
};
return data;
}
function _codeFrame() {
const data = require("@babel/code-frame");
_codeFrame = function () {
return data;
};
return data;
}
function t() {
const data = _interopRequireWildcard(require("@babel/types"));
t = function () {
return data;
};
return data;
}
function _semver() {
const data = _interopRequireDefault(require("semver"));
_semver = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
const errorVisitor = {
enter(path, state) {
const loc = path.node.loc;
if (loc) {
state.loc = loc;
path.stop();
}
}
};
class File {
constructor(options, {
code,
ast,
inputMap
}) {
this._map = new Map();
this.declarations = {};
this.path = null;
this.ast = {};
this.metadata = {};
this.code = "";
this.inputMap = null;
this.hub = {
file: this,
getCode: () => this.code,
getScope: () => this.scope,
addHelper: this.addHelper.bind(this),
buildError: this.buildCodeFrameError.bind(this)
};
this.opts = options;
this.code = code;
this.ast = ast;
this.inputMap = inputMap;
this.path = _traverse().NodePath.get({
hub: this.hub,
parentPath: null,
parent: this.ast,
container: this.ast,
key: "program"
}).setContext();
this.scope = this.path.scope;
}
get shebang() {
const {
interpreter
} = this.path.node;
return interpreter ? interpreter.value : "";
}
set shebang(value) {
if (value) {
this.path.get("interpreter").replaceWith(t().interpreterDirective(value));
} else {
this.path.get("interpreter").remove();
}
}
set(key, val) {
if (key === "helpersNamespace") {
throw new Error("Babel 7.0.0-beta.56 has dropped support for the 'helpersNamespace' utility." + "If you are using @babel/plugin-external-helpers you will need to use a newer " + "version than the one you currently have installed. " + "If you have your own implementation, you'll want to explore using 'helperGenerator' " + "alongside 'file.availableHelper()'.");
}
this._map.set(key, val);
}
get(key) {
return this._map.get(key);
}
has(key) {
return this._map.has(key);
}
getModuleName() {
const {
filename,
filenameRelative = filename,
moduleId,
moduleIds = !!moduleId,
getModuleId,
sourceRoot: sourceRootTmp,
moduleRoot = sourceRootTmp,
sourceRoot = moduleRoot
} = this.opts;
if (!moduleIds) return null;
if (moduleId != null && !getModuleId) {
return moduleId;
}
let moduleName = moduleRoot != null ? moduleRoot + "/" : "";
if (filenameRelative) {
const sourceRootReplacer = sourceRoot != null ? new RegExp("^" + sourceRoot + "/?") : "";
moduleName += filenameRelative.replace(sourceRootReplacer, "").replace(/\.(\w*?)$/, "");
}
moduleName = moduleName.replace(/\\/g, "/");
if (getModuleId) {
return getModuleId(moduleName) || moduleName;
} else {
return moduleName;
}
}
addImport() {
throw new Error("This API has been removed. If you're looking for this " + "functionality in Babel 7, you should import the " + "'@babel/helper-module-imports' module and use the functions exposed " + " from that module, such as 'addNamed' or 'addDefault'.");
}
availableHelper(name, versionRange) {
let minVersion;
try {
minVersion = helpers().minVersion(name);
} catch (err) {
if (err.code !== "BABEL_HELPER_UNKNOWN") throw err;
return false;
}
if (typeof versionRange !== "string") return true;
if (_semver().default.valid(versionRange)) versionRange = `^${versionRange}`;
return !_semver().default.intersects(`<${minVersion}`, versionRange) && !_semver().default.intersects(`>=8.0.0`, versionRange);
}
addHelper(name) {
const declar = this.declarations[name];
if (declar) return t().cloneNode(declar);
const generator = this.get("helperGenerator");
if (generator) {
const res = generator(name);
if (res) return res;
}
helpers().ensure(name);
const uid = this.declarations[name] = this.scope.generateUidIdentifier(name);
const dependencies = {};
for (const dep of helpers().getDependencies(name)) {
dependencies[dep] = this.addHelper(dep);
}
const {
nodes,
globals
} = helpers().get(name, dep => dependencies[dep], uid, Object.keys(this.scope.getAllBindings()));
globals.forEach(name => {
if (this.path.scope.hasBinding(name, true)) {
this.path.scope.rename(name);
}
});
nodes.forEach(node => {
node._compact = true;
});
this.path.unshiftContainer("body", nodes);
this.path.get("body").forEach(path => {
if (nodes.indexOf(path.node) === -1) return;
if (path.isVariableDeclaration()) this.scope.registerDeclaration(path);
});
return uid;
}
addTemplateObject() {
throw new Error("This function has been moved into the template literal transform itself.");
}
buildCodeFrameError(node, msg, Error = SyntaxError) {
let loc = node && (node.loc || node._loc);
if (!loc && node) {
const state = {
loc: null
};
(0, _traverse().default)(node, errorVisitor, this.scope, state);
loc = state.loc;
let txt = "This is an error on an internal node. Probably an internal error.";
if (loc) txt += " Location has been estimated.";
msg += ` (${txt})`;
}
if (loc) {
const {
highlightCode = true
} = this.opts;
msg += "\n" + (0, _codeFrame().codeFrameColumns)(this.code, {
start: {
line: loc.start.line,
column: loc.start.column + 1
},
end: loc.end && loc.start.line === loc.end.line ? {
line: loc.end.line,
column: loc.end.column + 1
} : undefined
}, {
highlightCode
});
}
return new Error(msg);
}
}
exports.default = File;

View File

@ -1,89 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = generateCode;
function _convertSourceMap() {
const data = _interopRequireDefault(require("convert-source-map"));
_convertSourceMap = function () {
return data;
};
return data;
}
function _generator() {
const data = _interopRequireDefault(require("@babel/generator"));
_generator = function () {
return data;
};
return data;
}
var _mergeMap = _interopRequireDefault(require("./merge-map"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function generateCode(pluginPasses, file) {
const {
opts,
ast,
code,
inputMap
} = file;
const results = [];
for (const plugins of pluginPasses) {
for (const plugin of plugins) {
const {
generatorOverride
} = plugin;
if (generatorOverride) {
const result = generatorOverride(ast, opts.generatorOpts, code, _generator().default);
if (result !== undefined) results.push(result);
}
}
}
let result;
if (results.length === 0) {
result = (0, _generator().default)(ast, opts.generatorOpts, code);
} else if (results.length === 1) {
result = results[0];
if (typeof result.then === "function") {
throw new Error(`You appear to be using an async codegen plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version.`);
}
} else {
throw new Error("More than one plugin attempted to override codegen.");
}
let {
code: outputCode,
map: outputMap
} = result;
if (outputMap && inputMap) {
outputMap = (0, _mergeMap.default)(inputMap.toObject(), outputMap);
}
if (opts.sourceMaps === "inline" || opts.sourceMaps === "both") {
outputCode += "\n" + _convertSourceMap().default.fromObject(outputMap).toComment();
}
if (opts.sourceMaps === "inline") {
outputMap = null;
}
return {
outputCode,
outputMap
};
}

View File

@ -1,247 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = mergeSourceMap;
function _sourceMap() {
const data = _interopRequireDefault(require("source-map"));
_sourceMap = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function mergeSourceMap(inputMap, map) {
const input = buildMappingData(inputMap);
const output = buildMappingData(map);
const mergedGenerator = new (_sourceMap().default.SourceMapGenerator)();
for (const {
source
} of input.sources) {
if (typeof source.content === "string") {
mergedGenerator.setSourceContent(source.path, source.content);
}
}
if (output.sources.length === 1) {
const defaultSource = output.sources[0];
const insertedMappings = new Map();
eachInputGeneratedRange(input, (generated, original, source) => {
eachOverlappingGeneratedOutputRange(defaultSource, generated, item => {
const key = makeMappingKey(item);
if (insertedMappings.has(key)) return;
insertedMappings.set(key, item);
mergedGenerator.addMapping({
source: source.path,
original: {
line: original.line,
column: original.columnStart
},
generated: {
line: item.line,
column: item.columnStart
},
name: original.name
});
});
});
for (const item of insertedMappings.values()) {
if (item.columnEnd === Infinity) {
continue;
}
const clearItem = {
line: item.line,
columnStart: item.columnEnd
};
const key = makeMappingKey(clearItem);
if (insertedMappings.has(key)) {
continue;
}
mergedGenerator.addMapping({
generated: {
line: clearItem.line,
column: clearItem.columnStart
}
});
}
}
const result = mergedGenerator.toJSON();
if (typeof input.sourceRoot === "string") {
result.sourceRoot = input.sourceRoot;
}
return result;
}
function makeMappingKey(item) {
return `${item.line}/${item.columnStart}`;
}
function eachOverlappingGeneratedOutputRange(outputFile, inputGeneratedRange, callback) {
const overlappingOriginal = filterApplicableOriginalRanges(outputFile, inputGeneratedRange);
for (const {
generated
} of overlappingOriginal) {
for (const item of generated) {
callback(item);
}
}
}
function filterApplicableOriginalRanges({
mappings
}, {
line,
columnStart,
columnEnd
}) {
return filterSortedArray(mappings, ({
original: outOriginal
}) => {
if (line > outOriginal.line) return -1;
if (line < outOriginal.line) return 1;
if (columnStart >= outOriginal.columnEnd) return -1;
if (columnEnd <= outOriginal.columnStart) return 1;
return 0;
});
}
function eachInputGeneratedRange(map, callback) {
for (const {
source,
mappings
} of map.sources) {
for (const {
original,
generated
} of mappings) {
for (const item of generated) {
callback(item, original, source);
}
}
}
}
function buildMappingData(map) {
const consumer = new (_sourceMap().default.SourceMapConsumer)(Object.assign({}, map, {
sourceRoot: null
}));
const sources = new Map();
const mappings = new Map();
let last = null;
consumer.computeColumnSpans();
consumer.eachMapping(m => {
if (m.originalLine === null) return;
let source = sources.get(m.source);
if (!source) {
source = {
path: m.source,
content: consumer.sourceContentFor(m.source, true)
};
sources.set(m.source, source);
}
let sourceData = mappings.get(source);
if (!sourceData) {
sourceData = {
source,
mappings: []
};
mappings.set(source, sourceData);
}
const obj = {
line: m.originalLine,
columnStart: m.originalColumn,
columnEnd: Infinity,
name: m.name
};
if (last && last.source === source && last.mapping.line === m.originalLine) {
last.mapping.columnEnd = m.originalColumn;
}
last = {
source,
mapping: obj
};
sourceData.mappings.push({
original: obj,
generated: consumer.allGeneratedPositionsFor({
source: m.source,
line: m.originalLine,
column: m.originalColumn
}).map(item => ({
line: item.line,
columnStart: item.column,
columnEnd: item.lastColumn + 1
}))
});
}, null, _sourceMap().default.SourceMapConsumer.ORIGINAL_ORDER);
return {
file: map.file,
sourceRoot: map.sourceRoot,
sources: Array.from(mappings.values())
};
}
function findInsertionLocation(array, callback) {
let left = 0;
let right = array.length;
while (left < right) {
const mid = Math.floor((left + right) / 2);
const item = array[mid];
const result = callback(item);
if (result === 0) {
left = mid;
break;
}
if (result >= 0) {
right = mid;
} else {
left = mid + 1;
}
}
let i = left;
if (i < array.length) {
while (i >= 0 && callback(array[i]) >= 0) {
i--;
}
return i + 1;
}
return i;
}
function filterSortedArray(array, callback) {
const start = findInsertionLocation(array, callback);
const results = [];
for (let i = start; i < array.length && callback(array[i]) === 0; i++) {
results.push(array[i]);
}
return results;
}

View File

@ -1,126 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.run = run;
function _traverse() {
const data = _interopRequireDefault(require("@babel/traverse"));
_traverse = function () {
return data;
};
return data;
}
var _pluginPass = _interopRequireDefault(require("./plugin-pass"));
var _blockHoistPlugin = _interopRequireDefault(require("./block-hoist-plugin"));
var _normalizeOpts = _interopRequireDefault(require("./normalize-opts"));
var _normalizeFile = _interopRequireDefault(require("./normalize-file"));
var _generate = _interopRequireDefault(require("./file/generate"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function* run(config, code, ast) {
const file = yield* (0, _normalizeFile.default)(config.passes, (0, _normalizeOpts.default)(config), code, ast);
const opts = file.opts;
try {
yield* transformFile(file, config.passes);
} catch (e) {
var _opts$filename;
e.message = `${(_opts$filename = opts.filename) != null ? _opts$filename : "unknown"}: ${e.message}`;
if (!e.code) {
e.code = "BABEL_TRANSFORM_ERROR";
}
throw e;
}
let outputCode, outputMap;
try {
if (opts.code !== false) {
({
outputCode,
outputMap
} = (0, _generate.default)(config.passes, file));
}
} catch (e) {
var _opts$filename2;
e.message = `${(_opts$filename2 = opts.filename) != null ? _opts$filename2 : "unknown"}: ${e.message}`;
if (!e.code) {
e.code = "BABEL_GENERATE_ERROR";
}
throw e;
}
return {
metadata: file.metadata,
options: opts,
ast: opts.ast === true ? file.ast : null,
code: outputCode === undefined ? null : outputCode,
map: outputMap === undefined ? null : outputMap,
sourceType: file.ast.program.sourceType
};
}
function* transformFile(file, pluginPasses) {
for (const pluginPairs of pluginPasses) {
const passPairs = [];
const passes = [];
const visitors = [];
for (const plugin of pluginPairs.concat([(0, _blockHoistPlugin.default)()])) {
const pass = new _pluginPass.default(file, plugin.key, plugin.options);
passPairs.push([plugin, pass]);
passes.push(pass);
visitors.push(plugin.visitor);
}
for (const [plugin, pass] of passPairs) {
const fn = plugin.pre;
if (fn) {
const result = fn.call(pass, file);
yield* [];
if (isThenable(result)) {
throw new Error(`You appear to be using an plugin with an async .pre, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
}
}
}
const visitor = _traverse().default.visitors.merge(visitors, passes, file.opts.wrapPluginVisitorMethod);
(0, _traverse().default)(file.ast, visitor, file.scope);
for (const [plugin, pass] of passPairs) {
const fn = plugin.post;
if (fn) {
const result = fn.call(pass, file);
yield* [];
if (isThenable(result)) {
throw new Error(`You appear to be using an plugin with an async .post, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
}
}
}
}
}
function isThenable(val) {
return !!val && (typeof val === "object" || typeof val === "function") && !!val.then && typeof val.then === "function";
}

View File

@ -1,173 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = normalizeFile;
function _fs() {
const data = _interopRequireDefault(require("fs"));
_fs = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
function _cloneDeep() {
const data = _interopRequireDefault(require("lodash/cloneDeep"));
_cloneDeep = function () {
return data;
};
return data;
}
function t() {
const data = _interopRequireWildcard(require("@babel/types"));
t = function () {
return data;
};
return data;
}
function _convertSourceMap() {
const data = _interopRequireDefault(require("convert-source-map"));
_convertSourceMap = function () {
return data;
};
return data;
}
var _file = _interopRequireDefault(require("./file/file"));
var _parser = _interopRequireDefault(require("../parser"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:transform:file");
const LARGE_INPUT_SOURCEMAP_THRESHOLD = 1000000;
function* normalizeFile(pluginPasses, options, code, ast) {
code = `${code || ""}`;
if (ast) {
if (ast.type === "Program") {
ast = t().file(ast, [], []);
} else if (ast.type !== "File") {
throw new Error("AST root must be a Program or File node");
}
ast = (0, _cloneDeep().default)(ast);
} else {
ast = yield* (0, _parser.default)(pluginPasses, options, code);
}
let inputMap = null;
if (options.inputSourceMap !== false) {
if (typeof options.inputSourceMap === "object") {
inputMap = _convertSourceMap().default.fromObject(options.inputSourceMap);
}
if (!inputMap) {
const lastComment = extractComments(INLINE_SOURCEMAP_REGEX, ast);
if (lastComment) {
try {
inputMap = _convertSourceMap().default.fromComment(lastComment);
} catch (err) {
debug("discarding unknown inline input sourcemap", err);
}
}
}
if (!inputMap) {
const lastComment = extractComments(EXTERNAL_SOURCEMAP_REGEX, ast);
if (typeof options.filename === "string" && lastComment) {
try {
const match = EXTERNAL_SOURCEMAP_REGEX.exec(lastComment);
const inputMapContent = _fs().default.readFileSync(_path().default.resolve(_path().default.dirname(options.filename), match[1]));
if (inputMapContent.length > LARGE_INPUT_SOURCEMAP_THRESHOLD) {
debug("skip merging input map > 1 MB");
} else {
inputMap = _convertSourceMap().default.fromJSON(inputMapContent);
}
} catch (err) {
debug("discarding unknown file input sourcemap", err);
}
} else if (lastComment) {
debug("discarding un-loadable file input sourcemap");
}
}
}
return new _file.default(options, {
code,
ast,
inputMap
});
}
const INLINE_SOURCEMAP_REGEX = /^[@#]\s+sourceMappingURL=data:(?:application|text)\/json;(?:charset[:=]\S+?;)?base64,(?:.*)$/;
const EXTERNAL_SOURCEMAP_REGEX = /^[@#][ \t]+sourceMappingURL=([^\s'"`]+)[ \t]*$/;
function extractCommentsFromList(regex, comments, lastComment) {
if (comments) {
comments = comments.filter(({
value
}) => {
if (regex.test(value)) {
lastComment = value;
return false;
}
return true;
});
}
return [comments, lastComment];
}
function extractComments(regex, ast) {
let lastComment = null;
t().traverseFast(ast, node => {
[node.leadingComments, lastComment] = extractCommentsFromList(regex, node.leadingComments, lastComment);
[node.innerComments, lastComment] = extractCommentsFromList(regex, node.innerComments, lastComment);
[node.trailingComments, lastComment] = extractCommentsFromList(regex, node.trailingComments, lastComment);
});
return lastComment;
}

View File

@ -1,65 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = normalizeOptions;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function normalizeOptions(config) {
const {
filename,
cwd,
filenameRelative = typeof filename === "string" ? _path().default.relative(cwd, filename) : "unknown",
sourceType = "module",
inputSourceMap,
sourceMaps = !!inputSourceMap,
moduleRoot,
sourceRoot = moduleRoot,
sourceFileName = _path().default.basename(filenameRelative),
comments = true,
compact = "auto"
} = config.options;
const opts = config.options;
const options = Object.assign({}, opts, {
parserOpts: Object.assign({
sourceType: _path().default.extname(filenameRelative) === ".mjs" ? "module" : sourceType,
sourceFileName: filename,
plugins: []
}, opts.parserOpts),
generatorOpts: Object.assign({
filename,
auxiliaryCommentBefore: opts.auxiliaryCommentBefore,
auxiliaryCommentAfter: opts.auxiliaryCommentAfter,
retainLines: opts.retainLines,
comments,
shouldPrintComment: opts.shouldPrintComment,
compact,
minified: opts.minified,
sourceMaps,
sourceRoot,
sourceFileName
}, opts.generatorOpts)
});
for (const plugins of config.passes) {
for (const plugin of plugins) {
if (plugin.manipulateOptions) {
plugin.manipulateOptions(options, options.parserOpts);
}
}
}
return options;
}

View File

@ -1,48 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
class PluginPass {
constructor(file, key, options) {
this._map = new Map();
this.key = key;
this.file = file;
this.opts = options || {};
this.cwd = file.opts.cwd;
this.filename = file.opts.filename;
}
set(key, val) {
this._map.set(key, val);
}
get(key) {
return this._map.get(key);
}
availableHelper(name, versionRange) {
return this.file.availableHelper(name, versionRange);
}
addHelper(name) {
return this.file.addHelper(name);
}
addImport() {
return this.file.addImport();
}
getModuleName() {
return this.file.getModuleName();
}
buildCodeFrameError(node, msg, Error) {
return this.file.buildCodeFrameError(node, msg, Error);
}
}
exports.default = PluginPass;

View File

@ -1 +0,0 @@
../semver/bin/semver

View File

@ -1,39 +0,0 @@
# changes log
## 5.7
* Add `minVersion` method
## 5.6
* Move boolean `loose` param to an options object, with
backwards-compatibility protection.
* Add ability to opt out of special prerelease version handling with
the `includePrerelease` option flag.
## 5.5
* Add version coercion capabilities
## 5.4
* Add intersection checking
## 5.3
* Add `minSatisfying` method
## 5.2
* Add `prerelease(v)` that returns prerelease components
## 5.1
* Add Backus-Naur for ranges
* Remove excessively cute inspection methods
## 5.0
* Remove AMD/Browserified build artifacts
* Fix ltr and gtr when using the `*` range
* Fix for range `*` with a prerelease identifier

View File

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -1,412 +0,0 @@
semver(1) -- The semantic versioner for npm
===========================================
## Install
```bash
npm install --save semver
````
## Usage
As a node module:
```js
const semver = require('semver')
semver.valid('1.2.3') // '1.2.3'
semver.valid('a.b.c') // null
semver.clean(' =v1.2.3 ') // '1.2.3'
semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
semver.gt('1.2.3', '9.8.7') // false
semver.lt('1.2.3', '9.8.7') // true
semver.minVersion('>=1.0.0') // '1.0.0'
semver.valid(semver.coerce('v2')) // '2.0.0'
semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
```
As a command-line utility:
```
$ semver -h
A JavaScript implementation of the https://semver.org/ specification
Copyright Isaac Z. Schlueter
Usage: semver [options] <version> [<version> [...]]
Prints valid versions sorted by SemVer precedence
Options:
-r --range <range>
Print versions that match the specified range.
-i --increment [<level>]
Increment a version by the specified level. Level can
be one of: major, minor, patch, premajor, preminor,
prepatch, or prerelease. Default level is 'patch'.
Only one version may be specified.
--preid <identifier>
Identifier to be used to prefix premajor, preminor,
prepatch or prerelease version increments.
-l --loose
Interpret versions and ranges loosely
-p --include-prerelease
Always include prerelease versions in range matching
-c --coerce
Coerce a string into SemVer if possible
(does not imply --loose)
Program exits successfully if any valid version satisfies
all supplied ranges, and prints all satisfying versions.
If no satisfying versions are found, then exits failure.
Versions are printed in ascending order, so supplying
multiple versions to the utility will just sort them.
```
## Versions
A "version" is described by the `v2.0.0` specification found at
<https://semver.org/>.
A leading `"="` or `"v"` character is stripped off and ignored.
## Ranges
A `version range` is a set of `comparators` which specify versions
that satisfy the range.
A `comparator` is composed of an `operator` and a `version`. The set
of primitive `operators` is:
* `<` Less than
* `<=` Less than or equal to
* `>` Greater than
* `>=` Greater than or equal to
* `=` Equal. If no operator is specified, then equality is assumed,
so this operator is optional, but MAY be included.
For example, the comparator `>=1.2.7` would match the versions
`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
or `1.1.0`.
Comparators can be joined by whitespace to form a `comparator set`,
which is satisfied by the **intersection** of all of the comparators
it includes.
A range is composed of one or more comparator sets, joined by `||`. A
version matches a range if and only if every comparator in at least
one of the `||`-separated comparator sets is satisfied by the version.
For example, the range `>=1.2.7 <1.3.0` would match the versions
`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
or `1.1.0`.
The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
### Prerelease Tags
If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
it will only be allowed to satisfy comparator sets if at least one
comparator with the same `[major, minor, patch]` tuple also has a
prerelease tag.
For example, the range `>1.2.3-alpha.3` would be allowed to match the
version `1.2.3-alpha.7`, but it would *not* be satisfied by
`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
range only accepts prerelease tags on the `1.2.3` version. The
version `3.4.5` *would* satisfy the range, because it does not have a
prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
The purpose for this behavior is twofold. First, prerelease versions
frequently are updated very quickly, and contain many breaking changes
that are (by the author's design) not yet fit for public consumption.
Therefore, by default, they are excluded from range matching
semantics.
Second, a user who has opted into using a prerelease version has
clearly indicated the intent to use *that specific* set of
alpha/beta/rc versions. By including a prerelease tag in the range,
the user is indicating that they are aware of the risk. However, it
is still not appropriate to assume that they have opted into taking a
similar risk on the *next* set of prerelease versions.
Note that this behavior can be suppressed (treating all prerelease
versions as if they were normal versions, for the purpose of range
matching) by setting the `includePrerelease` flag on the options
object to any
[functions](https://github.com/npm/node-semver#functions) that do
range matching.
#### Prerelease Identifiers
The method `.inc` takes an additional `identifier` string argument that
will append the value of the string as a prerelease identifier:
```javascript
semver.inc('1.2.3', 'prerelease', 'beta')
// '1.2.4-beta.0'
```
command-line example:
```bash
$ semver 1.2.3 -i prerelease --preid beta
1.2.4-beta.0
```
Which then can be used to increment further:
```bash
$ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1
```
### Advanced Range Syntax
Advanced range syntax desugars to primitive comparators in
deterministic ways.
Advanced ranges may be combined in the same way as primitive
comparators using white space or `||`.
#### Hyphen Ranges `X.Y.Z - A.B.C`
Specifies an inclusive set.
* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
If a partial version is provided as the first version in the inclusive
range, then the missing pieces are replaced with zeroes.
* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
If a partial version is provided as the second version in the
inclusive range, then all versions that start with the supplied parts
of the tuple are accepted, but nothing that would be greater than the
provided tuple parts.
* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
Any of `X`, `x`, or `*` may be used to "stand in" for one of the
numeric values in the `[major, minor, patch]` tuple.
* `*` := `>=0.0.0` (Any version satisfies)
* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
A partial version range is treated as an X-Range, so the special
character is in fact optional.
* `""` (empty string) := `*` := `>=0.0.0`
* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
#### Tilde Ranges `~1.2.3` `~1.2` `~1`
Allows patch-level changes if a minor version is specified on the
comparator. Allows minor-level changes if not.
* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
different `[major, minor, patch]` tuple.
#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
Allows changes that do not modify the left-most non-zero digit in the
`[major, minor, patch]` tuple. In other words, this allows patch and
minor updates for versions `1.0.0` and above, patch updates for
versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
Many authors treat a `0.x` version as if the `x` were the major
"breaking-change" indicator.
Caret ranges are ideal when an author may make breaking changes
between `0.2.4` and `0.3.0` releases, which is a common practice.
However, it presumes that there will *not* be breaking changes between
`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
additive (but non-breaking), according to commonly observed practices.
* `^1.2.3` := `>=1.2.3 <2.0.0`
* `^0.2.3` := `>=0.2.3 <0.3.0`
* `^0.0.3` := `>=0.0.3 <0.0.4`
* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
different `[major, minor, patch]` tuple.
* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
`0.0.3` version *only* will be allowed, if they are greater than or
equal to `beta`. So, `0.0.3-pr.2` would be allowed.
When parsing caret ranges, a missing `patch` value desugars to the
number `0`, but will allow flexibility within that value, even if the
major and minor versions are both `0`.
* `^1.2.x` := `>=1.2.0 <2.0.0`
* `^0.0.x` := `>=0.0.0 <0.1.0`
* `^0.0` := `>=0.0.0 <0.1.0`
A missing `minor` and `patch` values will desugar to zero, but also
allow flexibility within those values, even if the major version is
zero.
* `^1.x` := `>=1.0.0 <2.0.0`
* `^0.x` := `>=0.0.0 <1.0.0`
### Range Grammar
Putting all this together, here is a Backus-Naur grammar for ranges,
for the benefit of parser authors:
```bnf
range-set ::= range ( logical-or range ) *
logical-or ::= ( ' ' ) * '||' ( ' ' ) *
range ::= hyphen | simple ( ' ' simple ) * | ''
hyphen ::= partial ' - ' partial
simple ::= primitive | partial | tilde | caret
primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
xr ::= 'x' | 'X' | '*' | nr
nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
tilde ::= '~' partial
caret ::= '^' partial
qualifier ::= ( '-' pre )? ( '+' build )?
pre ::= parts
build ::= parts
parts ::= part ( '.' part ) *
part ::= nr | [-0-9A-Za-z]+
```
## Functions
All methods and classes take a final `options` object argument. All
options in this object are `false` by default. The options supported
are:
- `loose` Be more forgiving about not-quite-valid semver strings.
(Any resulting output will always be 100% strict compliant, of
course.) For backwards compatibility reasons, if the `options`
argument is a boolean value instead of an object, it is interpreted
to be the `loose` param.
- `includePrerelease` Set to suppress the [default
behavior](https://github.com/npm/node-semver#prerelease-tags) of
excluding prerelease tagged versions from ranges unless they are
explicitly opted into.
Strict-mode Comparators and Ranges will be strict about the SemVer
strings that they parse.
* `valid(v)`: Return the parsed version, or null if it's not valid.
* `inc(v, release)`: Return the version incremented by the release
type (`major`, `premajor`, `minor`, `preminor`, `patch`,
`prepatch`, or `prerelease`), or null if it's not valid
* `premajor` in one call will bump the version up to the next major
version and down to a prerelease of that major version.
`preminor`, and `prepatch` work the same way.
* If called from a non-prerelease version, the `prerelease` will work the
same as `prepatch`. It increments the patch version, then makes a
prerelease. If the input version is already a prerelease it simply
increments it.
* `prerelease(v)`: Returns an array of prerelease components, or null
if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
* `major(v)`: Return the major version number.
* `minor(v)`: Return the minor version number.
* `patch(v)`: Return the patch version number.
* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
or comparators intersect.
* `parse(v)`: Attempt to parse a string as a semantic version, returning either
a `SemVer` object or `null`.
### Comparison
* `gt(v1, v2)`: `v1 > v2`
* `gte(v1, v2)`: `v1 >= v2`
* `lt(v1, v2)`: `v1 < v2`
* `lte(v1, v2)`: `v1 <= v2`
* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
even if they're not the exact same string. You already know how to
compare strings.
* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
the corresponding function above. `"==="` and `"!=="` do simple
string comparison, but are included for completeness. Throws if an
invalid comparison string is provided.
* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
in descending order when passed to `Array.sort()`.
* `diff(v1, v2)`: Returns difference between two versions by the release type
(`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
or null if the versions are the same.
### Comparators
* `intersects(comparator)`: Return true if the comparators intersect
### Ranges
* `validRange(range)`: Return the valid range or null if it's not valid
* `satisfies(version, range)`: Return true if the version satisfies the
range.
* `maxSatisfying(versions, range)`: Return the highest version in the list
that satisfies the range, or `null` if none of them do.
* `minSatisfying(versions, range)`: Return the lowest version in the list
that satisfies the range, or `null` if none of them do.
* `minVersion(range)`: Return the lowest version that can possibly match
the given range.
* `gtr(version, range)`: Return `true` if version is greater than all the
versions possible in the range.
* `ltr(version, range)`: Return `true` if version is less than all the
versions possible in the range.
* `outside(version, range, hilo)`: Return true if the version is outside
the bounds of the range in either the high or low direction. The
`hilo` argument must be either the string `'>'` or `'<'`. (This is
the function called by `gtr` and `ltr`.)
* `intersects(range)`: Return true if any of the ranges comparators intersect
Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, *or* satisfy a range! For
example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
until `2.0.0`, so the version `1.2.10` would not be greater than the
range (because `2.0.1` satisfies, which is higher), nor less than the
range (since `1.2.8` satisfies, which is lower), and it also does not
satisfy the range.
If you want to know if a version satisfies or does not satisfy a
range, use the `satisfies(version, range)` function.
### Coercion
* `coerce(version)`: Coerces a string to semver if possible
This aims to provide a very forgiving translation of a non-semver string to
semver. It looks for the first digit in a string, and consumes all
remaining characters which satisfy at least a partial semver (e.g., `1`,
`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
`3.4.0`). Only text which lacks digits will fail coercion (`version one`
is not valid). The maximum length for any semver component considered for
coercion is 16 characters; longer components will be ignored
(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
components are invalid (`9999999999999999.4.7.4` is likely invalid).

View File

@ -1,160 +0,0 @@
#!/usr/bin/env node
// Standalone semver comparison program.
// Exits successfully and prints matching version(s) if
// any supplied version is valid and passes all tests.
var argv = process.argv.slice(2)
var versions = []
var range = []
var inc = null
var version = require('../package.json').version
var loose = false
var includePrerelease = false
var coerce = false
var identifier
var semver = require('../semver')
var reverse = false
var options = {}
main()
function main () {
if (!argv.length) return help()
while (argv.length) {
var a = argv.shift()
var indexOfEqualSign = a.indexOf('=')
if (indexOfEqualSign !== -1) {
a = a.slice(0, indexOfEqualSign)
argv.unshift(a.slice(indexOfEqualSign + 1))
}
switch (a) {
case '-rv': case '-rev': case '--rev': case '--reverse':
reverse = true
break
case '-l': case '--loose':
loose = true
break
case '-p': case '--include-prerelease':
includePrerelease = true
break
case '-v': case '--version':
versions.push(argv.shift())
break
case '-i': case '--inc': case '--increment':
switch (argv[0]) {
case 'major': case 'minor': case 'patch': case 'prerelease':
case 'premajor': case 'preminor': case 'prepatch':
inc = argv.shift()
break
default:
inc = 'patch'
break
}
break
case '--preid':
identifier = argv.shift()
break
case '-r': case '--range':
range.push(argv.shift())
break
case '-c': case '--coerce':
coerce = true
break
case '-h': case '--help': case '-?':
return help()
default:
versions.push(a)
break
}
}
var options = { loose: loose, includePrerelease: includePrerelease }
versions = versions.map(function (v) {
return coerce ? (semver.coerce(v) || { version: v }).version : v
}).filter(function (v) {
return semver.valid(v)
})
if (!versions.length) return fail()
if (inc && (versions.length !== 1 || range.length)) { return failInc() }
for (var i = 0, l = range.length; i < l; i++) {
versions = versions.filter(function (v) {
return semver.satisfies(v, range[i], options)
})
if (!versions.length) return fail()
}
return success(versions)
}
function failInc () {
console.error('--inc can only be used on a single version with no range')
fail()
}
function fail () { process.exit(1) }
function success () {
var compare = reverse ? 'rcompare' : 'compare'
versions.sort(function (a, b) {
return semver[compare](a, b, options)
}).map(function (v) {
return semver.clean(v, options)
}).map(function (v) {
return inc ? semver.inc(v, inc, options, identifier) : v
}).forEach(function (v, i, _) { console.log(v) })
}
function help () {
console.log(['SemVer ' + version,
'',
'A JavaScript implementation of the https://semver.org/ specification',
'Copyright Isaac Z. Schlueter',
'',
'Usage: semver [options] <version> [<version> [...]]',
'Prints valid versions sorted by SemVer precedence',
'',
'Options:',
'-r --range <range>',
' Print versions that match the specified range.',
'',
'-i --increment [<level>]',
' Increment a version by the specified level. Level can',
' be one of: major, minor, patch, premajor, preminor,',
" prepatch, or prerelease. Default level is 'patch'.",
' Only one version may be specified.',
'',
'--preid <identifier>',
' Identifier to be used to prefix premajor, preminor,',
' prepatch or prerelease version increments.',
'',
'-l --loose',
' Interpret versions and ranges loosely',
'',
'-p --include-prerelease',
' Always include prerelease versions in range matching',
'',
'-c --coerce',
' Coerce a string into SemVer if possible',
' (does not imply --loose)',
'',
'Program exits successfully if any valid version satisfies',
'all supplied ranges, and prints all satisfying versions.',
'',
'If no satisfying versions are found, then exits failure.',
'',
'Versions are printed in ascending order, so supplying',
'multiple versions to the utility will just sort them.'
].join('\n'))
}

View File

@ -1,60 +0,0 @@
{
"_from": "semver@^5.4.1",
"_id": "semver@5.7.1",
"_inBundle": false,
"_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"_location": "/@babel/core/semver",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "semver@^5.4.1",
"name": "semver",
"escapedName": "semver",
"rawSpec": "^5.4.1",
"saveSpec": null,
"fetchSpec": "^5.4.1"
},
"_requiredBy": [
"/@babel/core"
],
"_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"_shasum": "a954f931aeba508d307bbf069eff0c01c96116f7",
"_spec": "semver@^5.4.1",
"_where": "/home/runner/work/ghaction-upx/ghaction-upx/node_modules/@babel/core",
"bin": {
"semver": "bin/semver"
},
"bugs": {
"url": "https://github.com/npm/node-semver/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "The semantic version parser used by npm.",
"devDependencies": {
"tap": "^13.0.0-rc.18"
},
"files": [
"bin",
"range.bnf",
"semver.js"
],
"homepage": "https://github.com/npm/node-semver#readme",
"license": "ISC",
"main": "semver.js",
"name": "semver",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/node-semver.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap"
},
"tap": {
"check-coverage": true
},
"version": "5.7.1"
}

View File

@ -1,16 +0,0 @@
range-set ::= range ( logical-or range ) *
logical-or ::= ( ' ' ) * '||' ( ' ' ) *
range ::= hyphen | simple ( ' ' simple ) * | ''
hyphen ::= partial ' - ' partial
simple ::= primitive | partial | tilde | caret
primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
xr ::= 'x' | 'X' | '*' | nr
nr ::= '0' | [1-9] ( [0-9] ) *
tilde ::= '~' partial
caret ::= '^' partial
qualifier ::= ( '-' pre )? ( '+' build )?
pre ::= parts
build ::= parts
parts ::= part ( '.' part ) *
part ::= nr | [-0-9A-Za-z]+

File diff suppressed because it is too large Load Diff

View File

@ -1,301 +0,0 @@
# Change Log
## 0.5.6
* Fix for regression when people were using numbers as names in source maps. See
#236.
## 0.5.5
* Fix "regression" of unsupported, implementation behavior that half the world
happens to have come to depend on. See #235.
* Fix regression involving function hoisting in SpiderMonkey. See #233.
## 0.5.4
* Large performance improvements to source-map serialization. See #228 and #229.
## 0.5.3
* Do not include unnecessary distribution files. See
commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86.
## 0.5.2
* Include browser distributions of the library in package.json's `files`. See
issue #212.
## 0.5.1
* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See
ff05274becc9e6e1295ed60f3ea090d31d843379.
## 0.5.0
* Node 0.8 is no longer supported.
* Use webpack instead of dryice for bundling.
* Big speedups serializing source maps. See pull request #203.
* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that
explicitly start with the source root. See issue #199.
## 0.4.4
* Fix an issue where using a `SourceMapGenerator` after having created a
`SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See
issue #191.
* Fix an issue with where `SourceMapGenerator` would mistakenly consider
different mappings as duplicates of each other and avoid generating them. See
issue #192.
## 0.4.3
* A very large number of performance improvements, particularly when parsing
source maps. Collectively about 75% of time shaved off of the source map
parsing benchmark!
* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy
searching in the presence of a column option. See issue #177.
* Fix a bug with joining a source and its source root when the source is above
the root. See issue #182.
* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to
determine when all sources' contents are inlined into the source map. See
issue #190.
## 0.4.2
* Add an `.npmignore` file so that the benchmarks aren't pulled down by
dependent projects. Issue #169.
* Add an optional `column` argument to
`SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines
with no mappings. Issues #172 and #173.
## 0.4.1
* Fix accidentally defining a global variable. #170.
## 0.4.0
* The default direction for fuzzy searching was changed back to its original
direction. See #164.
* There is now a `bias` option you can supply to `SourceMapConsumer` to control
the fuzzy searching direction. See #167.
* About an 8% speed up in parsing source maps. See #159.
* Added a benchmark for parsing and generating source maps.
## 0.3.0
* Change the default direction that searching for positions fuzzes when there is
not an exact match. See #154.
* Support for environments using json2.js for JSON serialization. See #156.
## 0.2.0
* Support for consuming "indexed" source maps which do not have any remote
sections. See pull request #127. This introduces a minor backwards
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
methods.
## 0.1.43
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
#148 for some discussion and issues #150, #151, and #152 for implementations.
## 0.1.42
* Fix an issue where `SourceNode`s from different versions of the source-map
library couldn't be used in conjunction with each other. See issue #142.
## 0.1.41
* Fix a bug with getting the source content of relative sources with a "./"
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
column span of each mapping.
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
all generated positions associated with a given original source and line.
## 0.1.40
* Performance improvements for parsing source maps in SourceMapConsumer.
## 0.1.39
* Fix a bug where setting a source's contents to null before any source content
had been set before threw a TypeError. See issue #131.
## 0.1.38
* Fix a bug where finding relative paths from an empty path were creating
absolute paths. See issue #129.
## 0.1.37
* Fix a bug where if the source root was an empty string, relative source paths
would turn into absolute source paths. Issue #124.
## 0.1.36
* Allow the `names` mapping property to be an empty string. Issue #121.
## 0.1.35
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
to specify a path that relative sources in the second parameter should be
relative to. Issue #105.
* If no file property is given to a `SourceMapGenerator`, then the resulting
source map will no longer have a `null` file property. The property will
simply not exist. Issue #104.
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
Issue #116.
## 0.1.34
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
* Fix bug involving source contents and the
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
## 0.1.33
* Fix some edge cases surrounding path joining and URL resolution.
* Add a third parameter for relative path to
`SourceMapGenerator.prototype.applySourceMap`.
* Fix issues with mappings and EOLs.
## 0.1.32
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
(issue 92).
* Fixed test runner to actually report number of failed tests as its process
exit code.
* Fixed a typo when reporting bad mappings (issue 87).
## 0.1.31
* Delay parsing the mappings in SourceMapConsumer until queried for a source
location.
* Support Sass source maps (which at the time of writing deviate from the spec
in small ways) in SourceMapConsumer.
## 0.1.30
* Do not join source root with a source, when the source is a data URI.
* Extend the test runner to allow running single specific test files at a time.
* Performance improvements in `SourceNode.prototype.walk` and
`SourceMapConsumer.prototype.eachMapping`.
* Source map browser builds will now work inside Workers.
* Better error messages when attempting to add an invalid mapping to a
`SourceMapGenerator`.
## 0.1.29
* Allow duplicate entries in the `names` and `sources` arrays of source maps
(usually from TypeScript) we are parsing. Fixes github issue 72.
## 0.1.28
* Skip duplicate mappings when creating source maps from SourceNode; github
issue 75.
## 0.1.27
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
we don't use it anyway.
## 0.1.26
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
## 0.1.25
* Make compatible with browserify
## 0.1.24
* Fix issue with absolute paths and `file://` URIs. See
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
## 0.1.23
* Fix issue with absolute paths and sourcesContent, github issue 64.
## 0.1.22
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
## 0.1.21
* Fixed handling of sources that start with a slash so that they are relative to
the source root's host.
## 0.1.20
* Fixed github issue #43: absolute URLs aren't joined with the source root
anymore.
## 0.1.19
* Using Travis CI to run tests.
## 0.1.18
* Fixed a bug in the handling of sourceRoot.
## 0.1.17
* Added SourceNode.fromStringWithSourceMap.
## 0.1.16
* Added missing documentation.
* Fixed the generating of empty mappings in SourceNode.
## 0.1.15
* Added SourceMapGenerator.applySourceMap.
## 0.1.14
* The sourceRoot is now handled consistently.
## 0.1.13
* Added SourceMapGenerator.fromSourceMap.
## 0.1.12
* SourceNode now generates empty mappings too.
## 0.1.11
* Added name support to SourceNode.
## 0.1.10
* Added sourcesContent support to the customer and generator.

View File

@ -1,28 +0,0 @@
Copyright (c) 2009-2011, Mozilla Foundation and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of the Mozilla Foundation nor the names of project
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,729 +0,0 @@
# Source Map
[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map)
[![NPM](https://nodei.co/npm/source-map.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map)
This is a library to generate and consume the source map format
[described here][format].
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
## Use with Node
$ npm install source-map
## Use on the Web
<script src="https://raw.githubusercontent.com/mozilla/source-map/master/dist/source-map.min.js" defer></script>
--------------------------------------------------------------------------------
<!-- `npm run toc` to regenerate the Table of Contents -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
## Table of Contents
- [Examples](#examples)
- [Consuming a source map](#consuming-a-source-map)
- [Generating a source map](#generating-a-source-map)
- [With SourceNode (high level API)](#with-sourcenode-high-level-api)
- [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api)
- [API](#api)
- [SourceMapConsumer](#sourcemapconsumer)
- [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap)
- [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans)
- [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition)
- [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition)
- [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition)
- [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources)
- [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing)
- [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order)
- [SourceMapGenerator](#sourcemapgenerator)
- [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap)
- [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer)
- [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping)
- [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent)
- [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath)
- [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring)
- [SourceNode](#sourcenode)
- [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name)
- [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath)
- [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk)
- [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk)
- [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent)
- [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn)
- [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn)
- [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep)
- [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement)
- [SourceNode.prototype.toString()](#sourcenodeprototypetostring)
- [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Examples
### Consuming a source map
```js
var rawSourceMap = {
version: 3,
file: 'min.js',
names: ['bar', 'baz', 'n'],
sources: ['one.js', 'two.js'],
sourceRoot: 'http://example.com/www/js/',
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
};
var smc = new SourceMapConsumer(rawSourceMap);
console.log(smc.sources);
// [ 'http://example.com/www/js/one.js',
// 'http://example.com/www/js/two.js' ]
console.log(smc.originalPositionFor({
line: 2,
column: 28
}));
// { source: 'http://example.com/www/js/two.js',
// line: 2,
// column: 10,
// name: 'n' }
console.log(smc.generatedPositionFor({
source: 'http://example.com/www/js/two.js',
line: 2,
column: 10
}));
// { line: 2, column: 28 }
smc.eachMapping(function (m) {
// ...
});
```
### Generating a source map
In depth guide:
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
#### With SourceNode (high level API)
```js
function compile(ast) {
switch (ast.type) {
case 'BinaryExpression':
return new SourceNode(
ast.location.line,
ast.location.column,
ast.location.source,
[compile(ast.left), " + ", compile(ast.right)]
);
case 'Literal':
return new SourceNode(
ast.location.line,
ast.location.column,
ast.location.source,
String(ast.value)
);
// ...
default:
throw new Error("Bad AST");
}
}
var ast = parse("40 + 2", "add.js");
console.log(compile(ast).toStringWithSourceMap({
file: 'add.js'
}));
// { code: '40 + 2',
// map: [object SourceMapGenerator] }
```
#### With SourceMapGenerator (low level API)
```js
var map = new SourceMapGenerator({
file: "source-mapped.js"
});
map.addMapping({
generated: {
line: 10,
column: 35
},
source: "foo.js",
original: {
line: 33,
column: 2
},
name: "christopher"
});
console.log(map.toString());
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
```
## API
Get a reference to the module:
```js
// Node.js
var sourceMap = require('source-map');
// Browser builds
var sourceMap = window.sourceMap;
// Inside Firefox
const sourceMap = require("devtools/toolkit/sourcemap/source-map.js");
```
### SourceMapConsumer
A SourceMapConsumer instance represents a parsed source map which we can query
for information about the original file positions by giving it a file position
in the generated source.
#### new SourceMapConsumer(rawSourceMap)
The only parameter is the raw source map (either as a string which can be
`JSON.parse`'d, or an object). According to the spec, source maps have the
following attributes:
* `version`: Which version of the source map spec this map is following.
* `sources`: An array of URLs to the original source files.
* `names`: An array of identifiers which can be referenced by individual
mappings.
* `sourceRoot`: Optional. The URL root from which all sources are relative.
* `sourcesContent`: Optional. An array of contents of the original source files.
* `mappings`: A string of base64 VLQs which contain the actual mappings.
* `file`: Optional. The generated filename this source map is associated with.
```js
var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData);
```
#### SourceMapConsumer.prototype.computeColumnSpans()
Compute the last column for each generated mapping. The last column is
inclusive.
```js
// Before:
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
// [ { line: 2,
// column: 1 },
// { line: 2,
// column: 10 },
// { line: 2,
// column: 20 } ]
consumer.computeColumnSpans();
// After:
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
// [ { line: 2,
// column: 1,
// lastColumn: 9 },
// { line: 2,
// column: 10,
// lastColumn: 19 },
// { line: 2,
// column: 20,
// lastColumn: Infinity } ]
```
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
Returns the original source, line, and column information for the generated
source's line and column positions provided. The only argument is an object with
the following properties:
* `line`: The line number in the generated source.
* `column`: The column number in the generated source.
* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or
`SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest
element that is smaller than or greater than the one we are searching for,
respectively, if the exact element cannot be found. Defaults to
`SourceMapConsumer.GREATEST_LOWER_BOUND`.
and an object is returned with the following properties:
* `source`: The original source file, or null if this information is not
available.
* `line`: The line number in the original source, or null if this information is
not available.
* `column`: The column number in the original source, or null if this
information is not available.
* `name`: The original identifier, or null if this information is not available.
```js
consumer.originalPositionFor({ line: 2, column: 10 })
// { source: 'foo.coffee',
// line: 2,
// column: 2,
// name: null }
consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 })
// { source: null,
// line: null,
// column: null,
// name: null }
```
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
Returns the generated line and column information for the original source,
line, and column positions provided. The only argument is an object with
the following properties:
* `source`: The filename of the original source.
* `line`: The line number in the original source.
* `column`: The column number in the original source.
and an object is returned with the following properties:
* `line`: The line number in the generated source, or null.
* `column`: The column number in the generated source, or null.
```js
consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 })
// { line: 1,
// column: 56 }
```
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
Returns all generated line and column information for the original source, line,
and column provided. If no column is provided, returns all mappings
corresponding to a either the line we are searching for or the next closest line
that has any mappings. Otherwise, returns all mappings corresponding to the
given line and either the column we are searching for or the next closest column
that has any offsets.
The only argument is an object with the following properties:
* `source`: The filename of the original source.
* `line`: The line number in the original source.
* `column`: Optional. The column number in the original source.
and an array of objects is returned, each with the following properties:
* `line`: The line number in the generated source, or null.
* `column`: The column number in the generated source, or null.
```js
consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" })
// [ { line: 2,
// column: 1 },
// { line: 2,
// column: 10 },
// { line: 2,
// column: 20 } ]
```
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
Return true if we have the embedded source content for every source listed in
the source map, false otherwise.
In other words, if this method returns `true`, then
`consumer.sourceContentFor(s)` will succeed for every source `s` in
`consumer.sources`.
```js
// ...
if (consumer.hasContentsOfAllSources()) {
consumerReadyCallback(consumer);
} else {
fetchSources(consumer, consumerReadyCallback);
}
// ...
```
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
Returns the original source content for the source provided. The only
argument is the URL of the original source file.
If the source content for the given source is not found, then an error is
thrown. Optionally, pass `true` as the second param to have `null` returned
instead.
```js
consumer.sources
// [ "my-cool-lib.clj" ]
consumer.sourceContentFor("my-cool-lib.clj")
// "..."
consumer.sourceContentFor("this is not in the source map");
// Error: "this is not in the source map" is not in the source map
consumer.sourceContentFor("this is not in the source map", true);
// null
```
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
Iterate over each mapping between an original source/line/column and a
generated line/column in this source map.
* `callback`: The function that is called with each mapping. Mappings have the
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
name }`
* `context`: Optional. If specified, this object will be the value of `this`
every time that `callback` is called.
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
the mappings sorted by the generated file's line/column order or the
original's source/line/column order, respectively. Defaults to
`SourceMapConsumer.GENERATED_ORDER`.
```js
consumer.eachMapping(function (m) { console.log(m); })
// ...
// { source: 'illmatic.js',
// generatedLine: 1,
// generatedColumn: 0,
// originalLine: 1,
// originalColumn: 0,
// name: null }
// { source: 'illmatic.js',
// generatedLine: 2,
// generatedColumn: 0,
// originalLine: 2,
// originalColumn: 0,
// name: null }
// ...
```
### SourceMapGenerator
An instance of the SourceMapGenerator represents a source map which is being
built incrementally.
#### new SourceMapGenerator([startOfSourceMap])
You may pass an object with the following properties:
* `file`: The filename of the generated source that this source map is
associated with.
* `sourceRoot`: A root for all relative URLs in this source map.
* `skipValidation`: Optional. When `true`, disables validation of mappings as
they are added. This can improve performance but should be used with
discretion, as a last resort. Even then, one should avoid using this flag when
running tests, if possible.
```js
var generator = new sourceMap.SourceMapGenerator({
file: "my-generated-javascript-file.js",
sourceRoot: "http://example.com/app/js/"
});
```
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance.
* `sourceMapConsumer` The SourceMap.
```js
var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer);
```
#### SourceMapGenerator.prototype.addMapping(mapping)
Add a single mapping from original source line and column to the generated
source's line and column for this source map being created. The mapping object
should have the following properties:
* `generated`: An object with the generated line and column positions.
* `original`: An object with the original line and column positions.
* `source`: The original source file (relative to the sourceRoot).
* `name`: An optional original token name for this mapping.
```js
generator.addMapping({
source: "module-one.scm",
original: { line: 128, column: 0 },
generated: { line: 3, column: 456 }
})
```
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
Set the source content for an original source file.
* `sourceFile` the URL of the original source file.
* `sourceContent` the content of the source file.
```js
generator.setSourceContent("module-one.scm",
fs.readFileSync("path/to/module-one.scm"))
```
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
Applies a SourceMap for a source file to the SourceMap.
Each mapping to the supplied source file is rewritten using the
supplied SourceMap. Note: The resolution for the resulting mappings
is the minimum of this map and the supplied map.
* `sourceMapConsumer`: The SourceMap to be applied.
* `sourceFile`: Optional. The filename of the source file.
If omitted, sourceMapConsumer.file will be used, if it exists.
Otherwise an error will be thrown.
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
to be applied. If relative, it is relative to the SourceMap.
This parameter is needed when the two SourceMaps aren't in the same
directory, and the SourceMap to be applied contains relative source
paths. If so, those relative source paths need to be rewritten
relative to the SourceMap.
If omitted, it is assumed that both SourceMaps are in the same directory,
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
#### SourceMapGenerator.prototype.toString()
Renders the source map being generated to a string.
```js
generator.toString()
// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}'
```
### SourceNode
SourceNodes provide a way to abstract over interpolating and/or concatenating
snippets of generated JavaScript source code, while maintaining the line and
column information associated between those snippets and the original source
code. This is useful as the final intermediate representation a compiler might
use before outputting the generated JS and source map.
#### new SourceNode([line, column, source[, chunk[, name]]])
* `line`: The original line number associated with this source node, or null if
it isn't associated with an original line.
* `column`: The original column number associated with this source node, or null
if it isn't associated with an original column.
* `source`: The original source's filename; null if no filename is provided.
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
below.
* `name`: Optional. The original identifier.
```js
var node = new SourceNode(1, 2, "a.cpp", [
new SourceNode(3, 4, "b.cpp", "extern int status;\n"),
new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"),
new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"),
]);
```
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
Creates a SourceNode from generated code and a SourceMapConsumer.
* `code`: The generated code
* `sourceMapConsumer` The SourceMap for the generated code
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
should be relative to.
```js
var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8"));
var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"),
consumer);
```
#### SourceNode.prototype.add(chunk)
Add a chunk of generated JS to this source node.
* `chunk`: A string snippet of generated JS code, another instance of
`SourceNode`, or an array where each member is one of those things.
```js
node.add(" + ");
node.add(otherNode);
node.add([leftHandOperandNode, " + ", rightHandOperandNode]);
```
#### SourceNode.prototype.prepend(chunk)
Prepend a chunk of generated JS to this source node.
* `chunk`: A string snippet of generated JS code, another instance of
`SourceNode`, or an array where each member is one of those things.
```js
node.prepend("/** Build Id: f783haef86324gf **/\n\n");
```
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
Set the source content for a source file. This will be added to the
`SourceMap` in the `sourcesContent` field.
* `sourceFile`: The filename of the source file
* `sourceContent`: The content of the source file
```js
node.setSourceContent("module-one.scm",
fs.readFileSync("path/to/module-one.scm"))
```
#### SourceNode.prototype.walk(fn)
Walk over the tree of JS snippets in this node and its children. The walking
function is called once for each snippet of JS and is passed that snippet and
the its original associated source's line/column location.
* `fn`: The traversal function.
```js
var node = new SourceNode(1, 2, "a.js", [
new SourceNode(3, 4, "b.js", "uno"),
"dos",
[
"tres",
new SourceNode(5, 6, "c.js", "quatro")
]
]);
node.walk(function (code, loc) { console.log("WALK:", code, loc); })
// WALK: uno { source: 'b.js', line: 3, column: 4, name: null }
// WALK: dos { source: 'a.js', line: 1, column: 2, name: null }
// WALK: tres { source: 'a.js', line: 1, column: 2, name: null }
// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null }
```
#### SourceNode.prototype.walkSourceContents(fn)
Walk over the tree of SourceNodes. The walking function is called for each
source file content and is passed the filename and source content.
* `fn`: The traversal function.
```js
var a = new SourceNode(1, 2, "a.js", "generated from a");
a.setSourceContent("a.js", "original a");
var b = new SourceNode(1, 2, "b.js", "generated from b");
b.setSourceContent("b.js", "original b");
var c = new SourceNode(1, 2, "c.js", "generated from c");
c.setSourceContent("c.js", "original c");
var node = new SourceNode(null, null, null, [a, b, c]);
node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); })
// WALK: a.js : original a
// WALK: b.js : original b
// WALK: c.js : original c
```
#### SourceNode.prototype.join(sep)
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
between each of this source node's children.
* `sep`: The separator.
```js
var lhs = new SourceNode(1, 2, "a.rs", "my_copy");
var operand = new SourceNode(3, 4, "a.rs", "=");
var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()");
var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]);
var joinedNode = node.join(" ");
```
#### SourceNode.prototype.replaceRight(pattern, replacement)
Call `String.prototype.replace` on the very right-most source snippet. Useful
for trimming white space from the end of a source node, etc.
* `pattern`: The pattern to replace.
* `replacement`: The thing to replace the pattern with.
```js
// Trim trailing white space.
node.replaceRight(/\s*$/, "");
```
#### SourceNode.prototype.toString()
Return the string representation of this source node. Walks over the tree and
concatenates all the various snippets together to one string.
```js
var node = new SourceNode(1, 2, "a.js", [
new SourceNode(3, 4, "b.js", "uno"),
"dos",
[
"tres",
new SourceNode(5, 6, "c.js", "quatro")
]
]);
node.toString()
// 'unodostresquatro'
```
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
Returns the string representation of this tree of source nodes, plus a
SourceMapGenerator which contains all the mappings between the generated and
original sources.
The arguments are the same as those to `new SourceMapGenerator`.
```js
var node = new SourceNode(1, 2, "a.js", [
new SourceNode(3, 4, "b.js", "uno"),
"dos",
[
"tres",
new SourceNode(5, 6, "c.js", "quatro")
]
]);
node.toStringWithSourceMap({ file: "my-output-file.js" })
// { code: 'unodostresquatro',
// map: [object SourceMapGenerator] }
```

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,121 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var util = require('./util');
var has = Object.prototype.hasOwnProperty;
var hasNativeMap = typeof Map !== "undefined";
/**
* A data structure which is a combination of an array and a set. Adding a new
* member is O(1), testing for membership is O(1), and finding the index of an
* element is O(1). Removing elements from the set is not supported. Only
* strings are supported for membership.
*/
function ArraySet() {
this._array = [];
this._set = hasNativeMap ? new Map() : Object.create(null);
}
/**
* Static method for creating ArraySet instances from an existing array.
*/
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
var set = new ArraySet();
for (var i = 0, len = aArray.length; i < len; i++) {
set.add(aArray[i], aAllowDuplicates);
}
return set;
};
/**
* Return how many unique items are in this ArraySet. If duplicates have been
* added, than those do not count towards the size.
*
* @returns Number
*/
ArraySet.prototype.size = function ArraySet_size() {
return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;
};
/**
* Add the given string to this set.
*
* @param String aStr
*/
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
var sStr = hasNativeMap ? aStr : util.toSetString(aStr);
var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);
var idx = this._array.length;
if (!isDuplicate || aAllowDuplicates) {
this._array.push(aStr);
}
if (!isDuplicate) {
if (hasNativeMap) {
this._set.set(aStr, idx);
} else {
this._set[sStr] = idx;
}
}
};
/**
* Is the given string a member of this set?
*
* @param String aStr
*/
ArraySet.prototype.has = function ArraySet_has(aStr) {
if (hasNativeMap) {
return this._set.has(aStr);
} else {
var sStr = util.toSetString(aStr);
return has.call(this._set, sStr);
}
};
/**
* What is the index of the given string in the array?
*
* @param String aStr
*/
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
if (hasNativeMap) {
var idx = this._set.get(aStr);
if (idx >= 0) {
return idx;
}
} else {
var sStr = util.toSetString(aStr);
if (has.call(this._set, sStr)) {
return this._set[sStr];
}
}
throw new Error('"' + aStr + '" is not in the set.');
};
/**
* What is the element at the given index?
*
* @param Number aIdx
*/
ArraySet.prototype.at = function ArraySet_at(aIdx) {
if (aIdx >= 0 && aIdx < this._array.length) {
return this._array[aIdx];
}
throw new Error('No element indexed by ' + aIdx);
};
/**
* Returns the array representation of this set (which has the proper indices
* indicated by indexOf). Note that this is a copy of the internal array used
* for storing the members so that no one can mess with internal state.
*/
ArraySet.prototype.toArray = function ArraySet_toArray() {
return this._array.slice();
};
exports.ArraySet = ArraySet;

View File

@ -1,140 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*
* Based on the Base 64 VLQ implementation in Closure Compiler:
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
*
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var base64 = require('./base64');
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
// length quantities we use in the source map spec, the first bit is the sign,
// the next four bits are the actual value, and the 6th bit is the
// continuation bit. The continuation bit tells us whether there are more
// digits in this value following this digit.
//
// Continuation
// | Sign
// | |
// V V
// 101011
var VLQ_BASE_SHIFT = 5;
// binary: 100000
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
// binary: 011111
var VLQ_BASE_MASK = VLQ_BASE - 1;
// binary: 100000
var VLQ_CONTINUATION_BIT = VLQ_BASE;
/**
* Converts from a two-complement value to a value where the sign bit is
* placed in the least significant bit. For example, as decimals:
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
*/
function toVLQSigned(aValue) {
return aValue < 0
? ((-aValue) << 1) + 1
: (aValue << 1) + 0;
}
/**
* Converts to a two-complement value from a value where the sign bit is
* placed in the least significant bit. For example, as decimals:
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
*/
function fromVLQSigned(aValue) {
var isNegative = (aValue & 1) === 1;
var shifted = aValue >> 1;
return isNegative
? -shifted
: shifted;
}
/**
* Returns the base 64 VLQ encoded value.
*/
exports.encode = function base64VLQ_encode(aValue) {
var encoded = "";
var digit;
var vlq = toVLQSigned(aValue);
do {
digit = vlq & VLQ_BASE_MASK;
vlq >>>= VLQ_BASE_SHIFT;
if (vlq > 0) {
// There are still more digits in this value, so we must make sure the
// continuation bit is marked.
digit |= VLQ_CONTINUATION_BIT;
}
encoded += base64.encode(digit);
} while (vlq > 0);
return encoded;
};
/**
* Decodes the next base 64 VLQ value from the given string and returns the
* value and the rest of the string via the out parameter.
*/
exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {
var strLen = aStr.length;
var result = 0;
var shift = 0;
var continuation, digit;
do {
if (aIndex >= strLen) {
throw new Error("Expected more digits in base 64 VLQ value.");
}
digit = base64.decode(aStr.charCodeAt(aIndex++));
if (digit === -1) {
throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1));
}
continuation = !!(digit & VLQ_CONTINUATION_BIT);
digit &= VLQ_BASE_MASK;
result = result + (digit << shift);
shift += VLQ_BASE_SHIFT;
} while (continuation);
aOutParam.value = fromVLQSigned(result);
aOutParam.rest = aIndex;
};

View File

@ -1,67 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');
/**
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
*/
exports.encode = function (number) {
if (0 <= number && number < intToCharMap.length) {
return intToCharMap[number];
}
throw new TypeError("Must be between 0 and 63: " + number);
};
/**
* Decode a single base 64 character code digit to an integer. Returns -1 on
* failure.
*/
exports.decode = function (charCode) {
var bigA = 65; // 'A'
var bigZ = 90; // 'Z'
var littleA = 97; // 'a'
var littleZ = 122; // 'z'
var zero = 48; // '0'
var nine = 57; // '9'
var plus = 43; // '+'
var slash = 47; // '/'
var littleOffset = 26;
var numberOffset = 52;
// 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ
if (bigA <= charCode && charCode <= bigZ) {
return (charCode - bigA);
}
// 26 - 51: abcdefghijklmnopqrstuvwxyz
if (littleA <= charCode && charCode <= littleZ) {
return (charCode - littleA + littleOffset);
}
// 52 - 61: 0123456789
if (zero <= charCode && charCode <= nine) {
return (charCode - zero + numberOffset);
}
// 62: +
if (charCode == plus) {
return 62;
}
// 63: /
if (charCode == slash) {
return 63;
}
// Invalid base64 digit.
return -1;
};

View File

@ -1,111 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
exports.GREATEST_LOWER_BOUND = 1;
exports.LEAST_UPPER_BOUND = 2;
/**
* Recursive implementation of binary search.
*
* @param aLow Indices here and lower do not contain the needle.
* @param aHigh Indices here and higher do not contain the needle.
* @param aNeedle The element being searched for.
* @param aHaystack The non-empty array being searched.
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
*/
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
// This function terminates when one of the following is true:
//
// 1. We find the exact element we are looking for.
//
// 2. We did not find the exact element, but we can return the index of
// the next-closest element.
//
// 3. We did not find the exact element, and there is no next-closest
// element than the one we are searching for, so we return -1.
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
var cmp = aCompare(aNeedle, aHaystack[mid], true);
if (cmp === 0) {
// Found the element we are looking for.
return mid;
}
else if (cmp > 0) {
// Our needle is greater than aHaystack[mid].
if (aHigh - mid > 1) {
// The element is in the upper half.
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
}
// The exact needle element was not found in this haystack. Determine if
// we are in termination case (3) or (2) and return the appropriate thing.
if (aBias == exports.LEAST_UPPER_BOUND) {
return aHigh < aHaystack.length ? aHigh : -1;
} else {
return mid;
}
}
else {
// Our needle is less than aHaystack[mid].
if (mid - aLow > 1) {
// The element is in the lower half.
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
}
// we are in termination case (3) or (2) and return the appropriate thing.
if (aBias == exports.LEAST_UPPER_BOUND) {
return mid;
} else {
return aLow < 0 ? -1 : aLow;
}
}
}
/**
* This is an implementation of binary search which will always try and return
* the index of the closest element if there is no exact hit. This is because
* mappings between original and generated line/col pairs are single points,
* and there is an implicit region between each of them, so a miss just means
* that you aren't on the very start of a region.
*
* @param aNeedle The element you are looking for.
* @param aHaystack The array that is being searched.
* @param aCompare A function which takes the needle and an element in the
* array and returns -1, 0, or 1 depending on whether the needle is less
* than, equal to, or greater than the element, respectively.
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
*/
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
if (aHaystack.length === 0) {
return -1;
}
var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
if (index < 0) {
return -1;
}
// We have found either the exact element, or the next-closest element than
// the one we are searching for. However, there may be more than one such
// element. Make sure we always return the smallest of these.
while (index - 1 >= 0) {
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
break;
}
--index;
}
return index;
};

View File

@ -1,79 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2014 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var util = require('./util');
/**
* Determine whether mappingB is after mappingA with respect to generated
* position.
*/
function generatedPositionAfter(mappingA, mappingB) {
// Optimized for most common case
var lineA = mappingA.generatedLine;
var lineB = mappingB.generatedLine;
var columnA = mappingA.generatedColumn;
var columnB = mappingB.generatedColumn;
return lineB > lineA || lineB == lineA && columnB >= columnA ||
util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;
}
/**
* A data structure to provide a sorted view of accumulated mappings in a
* performance conscious manner. It trades a neglibable overhead in general
* case for a large speedup in case of mappings being added in order.
*/
function MappingList() {
this._array = [];
this._sorted = true;
// Serves as infimum
this._last = {generatedLine: -1, generatedColumn: 0};
}
/**
* Iterate through internal items. This method takes the same arguments that
* `Array.prototype.forEach` takes.
*
* NOTE: The order of the mappings is NOT guaranteed.
*/
MappingList.prototype.unsortedForEach =
function MappingList_forEach(aCallback, aThisArg) {
this._array.forEach(aCallback, aThisArg);
};
/**
* Add the given source mapping.
*
* @param Object aMapping
*/
MappingList.prototype.add = function MappingList_add(aMapping) {
if (generatedPositionAfter(this._last, aMapping)) {
this._last = aMapping;
this._array.push(aMapping);
} else {
this._sorted = false;
this._array.push(aMapping);
}
};
/**
* Returns the flat, sorted array of mappings. The mappings are sorted by
* generated position.
*
* WARNING: This method returns internal data without copying, for
* performance. The return value must NOT be mutated, and should be treated as
* an immutable borrow. If you want to take ownership, you must make your own
* copy.
*/
MappingList.prototype.toArray = function MappingList_toArray() {
if (!this._sorted) {
this._array.sort(util.compareByGeneratedPositionsInflated);
this._sorted = true;
}
return this._array;
};
exports.MappingList = MappingList;

View File

@ -1,114 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
// It turns out that some (most?) JavaScript engines don't self-host
// `Array.prototype.sort`. This makes sense because C++ will likely remain
// faster than JS when doing raw CPU-intensive sorting. However, when using a
// custom comparator function, calling back and forth between the VM's C++ and
// JIT'd JS is rather slow *and* loses JIT type information, resulting in
// worse generated code for the comparator function than would be optimal. In
// fact, when sorting with a comparator, these costs outweigh the benefits of
// sorting in C++. By using our own JS-implemented Quick Sort (below), we get
// a ~3500ms mean speed-up in `bench/bench.html`.
/**
* Swap the elements indexed by `x` and `y` in the array `ary`.
*
* @param {Array} ary
* The array.
* @param {Number} x
* The index of the first item.
* @param {Number} y
* The index of the second item.
*/
function swap(ary, x, y) {
var temp = ary[x];
ary[x] = ary[y];
ary[y] = temp;
}
/**
* Returns a random integer within the range `low .. high` inclusive.
*
* @param {Number} low
* The lower bound on the range.
* @param {Number} high
* The upper bound on the range.
*/
function randomIntInRange(low, high) {
return Math.round(low + (Math.random() * (high - low)));
}
/**
* The Quick Sort algorithm.
*
* @param {Array} ary
* An array to sort.
* @param {function} comparator
* Function to use to compare two items.
* @param {Number} p
* Start index of the array
* @param {Number} r
* End index of the array
*/
function doQuickSort(ary, comparator, p, r) {
// If our lower bound is less than our upper bound, we (1) partition the
// array into two pieces and (2) recurse on each half. If it is not, this is
// the empty array and our base case.
if (p < r) {
// (1) Partitioning.
//
// The partitioning chooses a pivot between `p` and `r` and moves all
// elements that are less than or equal to the pivot to the before it, and
// all the elements that are greater than it after it. The effect is that
// once partition is done, the pivot is in the exact place it will be when
// the array is put in sorted order, and it will not need to be moved
// again. This runs in O(n) time.
// Always choose a random pivot so that an input array which is reverse
// sorted does not cause O(n^2) running time.
var pivotIndex = randomIntInRange(p, r);
var i = p - 1;
swap(ary, pivotIndex, r);
var pivot = ary[r];
// Immediately after `j` is incremented in this loop, the following hold
// true:
//
// * Every element in `ary[p .. i]` is less than or equal to the pivot.
//
// * Every element in `ary[i+1 .. j-1]` is greater than the pivot.
for (var j = p; j < r; j++) {
if (comparator(ary[j], pivot) <= 0) {
i += 1;
swap(ary, i, j);
}
}
swap(ary, i + 1, j);
var q = i + 1;
// (2) Recurse on each half.
doQuickSort(ary, comparator, p, q - 1);
doQuickSort(ary, comparator, q + 1, r);
}
}
/**
* Sort the given array in-place with the given comparator function.
*
* @param {Array} ary
* An array to sort.
* @param {function} comparator
* Function to use to compare two items.
*/
exports.quickSort = function (ary, comparator) {
doQuickSort(ary, comparator, 0, ary.length - 1);
};

File diff suppressed because it is too large Load Diff

View File

@ -1,416 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var base64VLQ = require('./base64-vlq');
var util = require('./util');
var ArraySet = require('./array-set').ArraySet;
var MappingList = require('./mapping-list').MappingList;
/**
* An instance of the SourceMapGenerator represents a source map which is
* being built incrementally. You may pass an object with the following
* properties:
*
* - file: The filename of the generated source.
* - sourceRoot: A root for all relative URLs in this source map.
*/
function SourceMapGenerator(aArgs) {
if (!aArgs) {
aArgs = {};
}
this._file = util.getArg(aArgs, 'file', null);
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
this._skipValidation = util.getArg(aArgs, 'skipValidation', false);
this._sources = new ArraySet();
this._names = new ArraySet();
this._mappings = new MappingList();
this._sourcesContents = null;
}
SourceMapGenerator.prototype._version = 3;
/**
* Creates a new SourceMapGenerator based on a SourceMapConsumer
*
* @param aSourceMapConsumer The SourceMap.
*/
SourceMapGenerator.fromSourceMap =
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
var sourceRoot = aSourceMapConsumer.sourceRoot;
var generator = new SourceMapGenerator({
file: aSourceMapConsumer.file,
sourceRoot: sourceRoot
});
aSourceMapConsumer.eachMapping(function (mapping) {
var newMapping = {
generated: {
line: mapping.generatedLine,
column: mapping.generatedColumn
}
};
if (mapping.source != null) {
newMapping.source = mapping.source;
if (sourceRoot != null) {
newMapping.source = util.relative(sourceRoot, newMapping.source);
}
newMapping.original = {
line: mapping.originalLine,
column: mapping.originalColumn
};
if (mapping.name != null) {
newMapping.name = mapping.name;
}
}
generator.addMapping(newMapping);
});
aSourceMapConsumer.sources.forEach(function (sourceFile) {
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
if (content != null) {
generator.setSourceContent(sourceFile, content);
}
});
return generator;
};
/**
* Add a single mapping from original source line and column to the generated
* source's line and column for this source map being created. The mapping
* object should have the following properties:
*
* - generated: An object with the generated line and column positions.
* - original: An object with the original line and column positions.
* - source: The original source file (relative to the sourceRoot).
* - name: An optional original token name for this mapping.
*/
SourceMapGenerator.prototype.addMapping =
function SourceMapGenerator_addMapping(aArgs) {
var generated = util.getArg(aArgs, 'generated');
var original = util.getArg(aArgs, 'original', null);
var source = util.getArg(aArgs, 'source', null);
var name = util.getArg(aArgs, 'name', null);
if (!this._skipValidation) {
this._validateMapping(generated, original, source, name);
}
if (source != null) {
source = String(source);
if (!this._sources.has(source)) {
this._sources.add(source);
}
}
if (name != null) {
name = String(name);
if (!this._names.has(name)) {
this._names.add(name);
}
}
this._mappings.add({
generatedLine: generated.line,
generatedColumn: generated.column,
originalLine: original != null && original.line,
originalColumn: original != null && original.column,
source: source,
name: name
});
};
/**
* Set the source content for a source file.
*/
SourceMapGenerator.prototype.setSourceContent =
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
var source = aSourceFile;
if (this._sourceRoot != null) {
source = util.relative(this._sourceRoot, source);
}
if (aSourceContent != null) {
// Add the source content to the _sourcesContents map.
// Create a new _sourcesContents map if the property is null.
if (!this._sourcesContents) {
this._sourcesContents = Object.create(null);
}
this._sourcesContents[util.toSetString(source)] = aSourceContent;
} else if (this._sourcesContents) {
// Remove the source file from the _sourcesContents map.
// If the _sourcesContents map is empty, set the property to null.
delete this._sourcesContents[util.toSetString(source)];
if (Object.keys(this._sourcesContents).length === 0) {
this._sourcesContents = null;
}
}
};
/**
* Applies the mappings of a sub-source-map for a specific source file to the
* source map being generated. Each mapping to the supplied source file is
* rewritten using the supplied source map. Note: The resolution for the
* resulting mappings is the minimium of this map and the supplied map.
*
* @param aSourceMapConsumer The source map to be applied.
* @param aSourceFile Optional. The filename of the source file.
* If omitted, SourceMapConsumer's file property will be used.
* @param aSourceMapPath Optional. The dirname of the path to the source map
* to be applied. If relative, it is relative to the SourceMapConsumer.
* This parameter is needed when the two source maps aren't in the same
* directory, and the source map to be applied contains relative source
* paths. If so, those relative source paths need to be rewritten
* relative to the SourceMapGenerator.
*/
SourceMapGenerator.prototype.applySourceMap =
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
var sourceFile = aSourceFile;
// If aSourceFile is omitted, we will use the file property of the SourceMap
if (aSourceFile == null) {
if (aSourceMapConsumer.file == null) {
throw new Error(
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
'or the source map\'s "file" property. Both were omitted.'
);
}
sourceFile = aSourceMapConsumer.file;
}
var sourceRoot = this._sourceRoot;
// Make "sourceFile" relative if an absolute Url is passed.
if (sourceRoot != null) {
sourceFile = util.relative(sourceRoot, sourceFile);
}
// Applying the SourceMap can add and remove items from the sources and
// the names array.
var newSources = new ArraySet();
var newNames = new ArraySet();
// Find mappings for the "sourceFile"
this._mappings.unsortedForEach(function (mapping) {
if (mapping.source === sourceFile && mapping.originalLine != null) {
// Check if it can be mapped by the source map, then update the mapping.
var original = aSourceMapConsumer.originalPositionFor({
line: mapping.originalLine,
column: mapping.originalColumn
});
if (original.source != null) {
// Copy mapping
mapping.source = original.source;
if (aSourceMapPath != null) {
mapping.source = util.join(aSourceMapPath, mapping.source)
}
if (sourceRoot != null) {
mapping.source = util.relative(sourceRoot, mapping.source);
}
mapping.originalLine = original.line;
mapping.originalColumn = original.column;
if (original.name != null) {
mapping.name = original.name;
}
}
}
var source = mapping.source;
if (source != null && !newSources.has(source)) {
newSources.add(source);
}
var name = mapping.name;
if (name != null && !newNames.has(name)) {
newNames.add(name);
}
}, this);
this._sources = newSources;
this._names = newNames;
// Copy sourcesContents of applied map.
aSourceMapConsumer.sources.forEach(function (sourceFile) {
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
if (content != null) {
if (aSourceMapPath != null) {
sourceFile = util.join(aSourceMapPath, sourceFile);
}
if (sourceRoot != null) {
sourceFile = util.relative(sourceRoot, sourceFile);
}
this.setSourceContent(sourceFile, content);
}
}, this);
};
/**
* A mapping can have one of the three levels of data:
*
* 1. Just the generated position.
* 2. The Generated position, original position, and original source.
* 3. Generated and original position, original source, as well as a name
* token.
*
* To maintain consistency, we validate that any new mapping being added falls
* in to one of these categories.
*/
SourceMapGenerator.prototype._validateMapping =
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
aName) {
// When aOriginal is truthy but has empty values for .line and .column,
// it is most likely a programmer error. In this case we throw a very
// specific error message to try to guide them the right way.
// For example: https://github.com/Polymer/polymer-bundler/pull/519
if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') {
throw new Error(
'original.line and original.column are not numbers -- you probably meant to omit ' +
'the original mapping entirely and only map the generated position. If so, pass ' +
'null for the original mapping instead of an object with empty or null values.'
);
}
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
&& aGenerated.line > 0 && aGenerated.column >= 0
&& !aOriginal && !aSource && !aName) {
// Case 1.
return;
}
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
&& aGenerated.line > 0 && aGenerated.column >= 0
&& aOriginal.line > 0 && aOriginal.column >= 0
&& aSource) {
// Cases 2 and 3.
return;
}
else {
throw new Error('Invalid mapping: ' + JSON.stringify({
generated: aGenerated,
source: aSource,
original: aOriginal,
name: aName
}));
}
};
/**
* Serialize the accumulated mappings in to the stream of base 64 VLQs
* specified by the source map format.
*/
SourceMapGenerator.prototype._serializeMappings =
function SourceMapGenerator_serializeMappings() {
var previousGeneratedColumn = 0;
var previousGeneratedLine = 1;
var previousOriginalColumn = 0;
var previousOriginalLine = 0;
var previousName = 0;
var previousSource = 0;
var result = '';
var next;
var mapping;
var nameIdx;
var sourceIdx;
var mappings = this._mappings.toArray();
for (var i = 0, len = mappings.length; i < len; i++) {
mapping = mappings[i];
next = ''
if (mapping.generatedLine !== previousGeneratedLine) {
previousGeneratedColumn = 0;
while (mapping.generatedLine !== previousGeneratedLine) {
next += ';';
previousGeneratedLine++;
}
}
else {
if (i > 0) {
if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {
continue;
}
next += ',';
}
}
next += base64VLQ.encode(mapping.generatedColumn
- previousGeneratedColumn);
previousGeneratedColumn = mapping.generatedColumn;
if (mapping.source != null) {
sourceIdx = this._sources.indexOf(mapping.source);
next += base64VLQ.encode(sourceIdx - previousSource);
previousSource = sourceIdx;
// lines are stored 0-based in SourceMap spec version 3
next += base64VLQ.encode(mapping.originalLine - 1
- previousOriginalLine);
previousOriginalLine = mapping.originalLine - 1;
next += base64VLQ.encode(mapping.originalColumn
- previousOriginalColumn);
previousOriginalColumn = mapping.originalColumn;
if (mapping.name != null) {
nameIdx = this._names.indexOf(mapping.name);
next += base64VLQ.encode(nameIdx - previousName);
previousName = nameIdx;
}
}
result += next;
}
return result;
};
SourceMapGenerator.prototype._generateSourcesContent =
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
return aSources.map(function (source) {
if (!this._sourcesContents) {
return null;
}
if (aSourceRoot != null) {
source = util.relative(aSourceRoot, source);
}
var key = util.toSetString(source);
return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)
? this._sourcesContents[key]
: null;
}, this);
};
/**
* Externalize the source map.
*/
SourceMapGenerator.prototype.toJSON =
function SourceMapGenerator_toJSON() {
var map = {
version: this._version,
sources: this._sources.toArray(),
names: this._names.toArray(),
mappings: this._serializeMappings()
};
if (this._file != null) {
map.file = this._file;
}
if (this._sourceRoot != null) {
map.sourceRoot = this._sourceRoot;
}
if (this._sourcesContents) {
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
}
return map;
};
/**
* Render the source map being generated to a string.
*/
SourceMapGenerator.prototype.toString =
function SourceMapGenerator_toString() {
return JSON.stringify(this.toJSON());
};
exports.SourceMapGenerator = SourceMapGenerator;

View File

@ -1,413 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
var util = require('./util');
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
// operating systems these days (capturing the result).
var REGEX_NEWLINE = /(\r?\n)/;
// Newline character code for charCodeAt() comparisons
var NEWLINE_CODE = 10;
// Private symbol for identifying `SourceNode`s when multiple versions of
// the source-map library are loaded. This MUST NOT CHANGE across
// versions!
var isSourceNode = "$$$isSourceNode$$$";
/**
* SourceNodes provide a way to abstract over interpolating/concatenating
* snippets of generated JavaScript source code while maintaining the line and
* column information associated with the original source code.
*
* @param aLine The original line number.
* @param aColumn The original column number.
* @param aSource The original source's filename.
* @param aChunks Optional. An array of strings which are snippets of
* generated JS, or other SourceNodes.
* @param aName The original identifier.
*/
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
this.children = [];
this.sourceContents = {};
this.line = aLine == null ? null : aLine;
this.column = aColumn == null ? null : aColumn;
this.source = aSource == null ? null : aSource;
this.name = aName == null ? null : aName;
this[isSourceNode] = true;
if (aChunks != null) this.add(aChunks);
}
/**
* Creates a SourceNode from generated code and a SourceMapConsumer.
*
* @param aGeneratedCode The generated code
* @param aSourceMapConsumer The SourceMap for the generated code
* @param aRelativePath Optional. The path that relative sources in the
* SourceMapConsumer should be relative to.
*/
SourceNode.fromStringWithSourceMap =
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
// The SourceNode we want to fill with the generated code
// and the SourceMap
var node = new SourceNode();
// All even indices of this array are one line of the generated code,
// while all odd indices are the newlines between two adjacent lines
// (since `REGEX_NEWLINE` captures its match).
// Processed fragments are accessed by calling `shiftNextLine`.
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
var remainingLinesIndex = 0;
var shiftNextLine = function() {
var lineContents = getNextLine();
// The last line of a file might not have a newline.
var newLine = getNextLine() || "";
return lineContents + newLine;
function getNextLine() {
return remainingLinesIndex < remainingLines.length ?
remainingLines[remainingLinesIndex++] : undefined;
}
};
// We need to remember the position of "remainingLines"
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
// The generate SourceNodes we need a code range.
// To extract it current and last mapping is used.
// Here we store the last mapping.
var lastMapping = null;
aSourceMapConsumer.eachMapping(function (mapping) {
if (lastMapping !== null) {
// We add the code from "lastMapping" to "mapping":
// First check if there is a new line in between.
if (lastGeneratedLine < mapping.generatedLine) {
// Associate first line with "lastMapping"
addMappingWithCode(lastMapping, shiftNextLine());
lastGeneratedLine++;
lastGeneratedColumn = 0;
// The remaining code is added without mapping
} else {
// There is no new line in between.
// Associate the code between "lastGeneratedColumn" and
// "mapping.generatedColumn" with "lastMapping"
var nextLine = remainingLines[remainingLinesIndex];
var code = nextLine.substr(0, mapping.generatedColumn -
lastGeneratedColumn);
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -
lastGeneratedColumn);
lastGeneratedColumn = mapping.generatedColumn;
addMappingWithCode(lastMapping, code);
// No more remaining code, continue
lastMapping = mapping;
return;
}
}
// We add the generated code until the first mapping
// to the SourceNode without any mapping.
// Each line is added as separate string.
while (lastGeneratedLine < mapping.generatedLine) {
node.add(shiftNextLine());
lastGeneratedLine++;
}
if (lastGeneratedColumn < mapping.generatedColumn) {
var nextLine = remainingLines[remainingLinesIndex];
node.add(nextLine.substr(0, mapping.generatedColumn));
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);
lastGeneratedColumn = mapping.generatedColumn;
}
lastMapping = mapping;
}, this);
// We have processed all mappings.
if (remainingLinesIndex < remainingLines.length) {
if (lastMapping) {
// Associate the remaining code in the current line with "lastMapping"
addMappingWithCode(lastMapping, shiftNextLine());
}
// and add the remaining lines without any mapping
node.add(remainingLines.splice(remainingLinesIndex).join(""));
}
// Copy sourcesContent into SourceNode
aSourceMapConsumer.sources.forEach(function (sourceFile) {
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
if (content != null) {
if (aRelativePath != null) {
sourceFile = util.join(aRelativePath, sourceFile);
}
node.setSourceContent(sourceFile, content);
}
});
return node;
function addMappingWithCode(mapping, code) {
if (mapping === null || mapping.source === undefined) {
node.add(code);
} else {
var source = aRelativePath
? util.join(aRelativePath, mapping.source)
: mapping.source;
node.add(new SourceNode(mapping.originalLine,
mapping.originalColumn,
source,
code,
mapping.name));
}
}
};
/**
* Add a chunk of generated JS to this source node.
*
* @param aChunk A string snippet of generated JS code, another instance of
* SourceNode, or an array where each member is one of those things.
*/
SourceNode.prototype.add = function SourceNode_add(aChunk) {
if (Array.isArray(aChunk)) {
aChunk.forEach(function (chunk) {
this.add(chunk);
}, this);
}
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
if (aChunk) {
this.children.push(aChunk);
}
}
else {
throw new TypeError(
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
);
}
return this;
};
/**
* Add a chunk of generated JS to the beginning of this source node.
*
* @param aChunk A string snippet of generated JS code, another instance of
* SourceNode, or an array where each member is one of those things.
*/
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
if (Array.isArray(aChunk)) {
for (var i = aChunk.length-1; i >= 0; i--) {
this.prepend(aChunk[i]);
}
}
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
this.children.unshift(aChunk);
}
else {
throw new TypeError(
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
);
}
return this;
};
/**
* Walk over the tree of JS snippets in this node and its children. The
* walking function is called once for each snippet of JS and is passed that
* snippet and the its original associated source's line/column location.
*
* @param aFn The traversal function.
*/
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
var chunk;
for (var i = 0, len = this.children.length; i < len; i++) {
chunk = this.children[i];
if (chunk[isSourceNode]) {
chunk.walk(aFn);
}
else {
if (chunk !== '') {
aFn(chunk, { source: this.source,
line: this.line,
column: this.column,
name: this.name });
}
}
}
};
/**
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
* each of `this.children`.
*
* @param aSep The separator.
*/
SourceNode.prototype.join = function SourceNode_join(aSep) {
var newChildren;
var i;
var len = this.children.length;
if (len > 0) {
newChildren = [];
for (i = 0; i < len-1; i++) {
newChildren.push(this.children[i]);
newChildren.push(aSep);
}
newChildren.push(this.children[i]);
this.children = newChildren;
}
return this;
};
/**
* Call String.prototype.replace on the very right-most source snippet. Useful
* for trimming whitespace from the end of a source node, etc.
*
* @param aPattern The pattern to replace.
* @param aReplacement The thing to replace the pattern with.
*/
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
var lastChild = this.children[this.children.length - 1];
if (lastChild[isSourceNode]) {
lastChild.replaceRight(aPattern, aReplacement);
}
else if (typeof lastChild === 'string') {
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
}
else {
this.children.push(''.replace(aPattern, aReplacement));
}
return this;
};
/**
* Set the source content for a source file. This will be added to the SourceMapGenerator
* in the sourcesContent field.
*
* @param aSourceFile The filename of the source file
* @param aSourceContent The content of the source file
*/
SourceNode.prototype.setSourceContent =
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
};
/**
* Walk over the tree of SourceNodes. The walking function is called for each
* source file content and is passed the filename and source content.
*
* @param aFn The traversal function.
*/
SourceNode.prototype.walkSourceContents =
function SourceNode_walkSourceContents(aFn) {
for (var i = 0, len = this.children.length; i < len; i++) {
if (this.children[i][isSourceNode]) {
this.children[i].walkSourceContents(aFn);
}
}
var sources = Object.keys(this.sourceContents);
for (var i = 0, len = sources.length; i < len; i++) {
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
}
};
/**
* Return the string representation of this source node. Walks over the tree
* and concatenates all the various snippets together to one string.
*/
SourceNode.prototype.toString = function SourceNode_toString() {
var str = "";
this.walk(function (chunk) {
str += chunk;
});
return str;
};
/**
* Returns the string representation of this source node along with a source
* map.
*/
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
var generated = {
code: "",
line: 1,
column: 0
};
var map = new SourceMapGenerator(aArgs);
var sourceMappingActive = false;
var lastOriginalSource = null;
var lastOriginalLine = null;
var lastOriginalColumn = null;
var lastOriginalName = null;
this.walk(function (chunk, original) {
generated.code += chunk;
if (original.source !== null
&& original.line !== null
&& original.column !== null) {
if(lastOriginalSource !== original.source
|| lastOriginalLine !== original.line
|| lastOriginalColumn !== original.column
|| lastOriginalName !== original.name) {
map.addMapping({
source: original.source,
original: {
line: original.line,
column: original.column
},
generated: {
line: generated.line,
column: generated.column
},
name: original.name
});
}
lastOriginalSource = original.source;
lastOriginalLine = original.line;
lastOriginalColumn = original.column;
lastOriginalName = original.name;
sourceMappingActive = true;
} else if (sourceMappingActive) {
map.addMapping({
generated: {
line: generated.line,
column: generated.column
}
});
lastOriginalSource = null;
sourceMappingActive = false;
}
for (var idx = 0, length = chunk.length; idx < length; idx++) {
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
generated.line++;
generated.column = 0;
// Mappings end at eol
if (idx + 1 === length) {
lastOriginalSource = null;
sourceMappingActive = false;
} else if (sourceMappingActive) {
map.addMapping({
source: original.source,
original: {
line: original.line,
column: original.column
},
generated: {
line: generated.line,
column: generated.column
},
name: original.name
});
}
} else {
generated.column++;
}
}
});
this.walkSourceContents(function (sourceFile, sourceContent) {
map.setSourceContent(sourceFile, sourceContent);
});
return { code: generated.code, map: map };
};
exports.SourceNode = SourceNode;

View File

@ -1,417 +0,0 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
/**
* This is a helper function for getting values from parameter/options
* objects.
*
* @param args The object we are extracting values from
* @param name The name of the property we are getting.
* @param defaultValue An optional value to return if the property is missing
* from the object. If this is not specified and the property is missing, an
* error will be thrown.
*/
function getArg(aArgs, aName, aDefaultValue) {
if (aName in aArgs) {
return aArgs[aName];
} else if (arguments.length === 3) {
return aDefaultValue;
} else {
throw new Error('"' + aName + '" is a required argument.');
}
}
exports.getArg = getArg;
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
var dataUrlRegexp = /^data:.+\,.+$/;
function urlParse(aUrl) {
var match = aUrl.match(urlRegexp);
if (!match) {
return null;
}
return {
scheme: match[1],
auth: match[2],
host: match[3],
port: match[4],
path: match[5]
};
}
exports.urlParse = urlParse;
function urlGenerate(aParsedUrl) {
var url = '';
if (aParsedUrl.scheme) {
url += aParsedUrl.scheme + ':';
}
url += '//';
if (aParsedUrl.auth) {
url += aParsedUrl.auth + '@';
}
if (aParsedUrl.host) {
url += aParsedUrl.host;
}
if (aParsedUrl.port) {
url += ":" + aParsedUrl.port
}
if (aParsedUrl.path) {
url += aParsedUrl.path;
}
return url;
}
exports.urlGenerate = urlGenerate;
/**
* Normalizes a path, or the path portion of a URL:
*
* - Replaces consecutive slashes with one slash.
* - Removes unnecessary '.' parts.
* - Removes unnecessary '<dir>/..' parts.
*
* Based on code in the Node.js 'path' core module.
*
* @param aPath The path or url to normalize.
*/
function normalize(aPath) {
var path = aPath;
var url = urlParse(aPath);
if (url) {
if (!url.path) {
return aPath;
}
path = url.path;
}
var isAbsolute = exports.isAbsolute(path);
var parts = path.split(/\/+/);
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
part = parts[i];
if (part === '.') {
parts.splice(i, 1);
} else if (part === '..') {
up++;
} else if (up > 0) {
if (part === '') {
// The first part is blank if the path is absolute. Trying to go
// above the root is a no-op. Therefore we can remove all '..' parts
// directly after the root.
parts.splice(i + 1, up);
up = 0;
} else {
parts.splice(i, 2);
up--;
}
}
}
path = parts.join('/');
if (path === '') {
path = isAbsolute ? '/' : '.';
}
if (url) {
url.path = path;
return urlGenerate(url);
}
return path;
}
exports.normalize = normalize;
/**
* Joins two paths/URLs.
*
* @param aRoot The root path or URL.
* @param aPath The path or URL to be joined with the root.
*
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
* first.
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
* is updated with the result and aRoot is returned. Otherwise the result
* is returned.
* - If aPath is absolute, the result is aPath.
* - Otherwise the two paths are joined with a slash.
* - Joining for example 'http://' and 'www.example.com' is also supported.
*/
function join(aRoot, aPath) {
if (aRoot === "") {
aRoot = ".";
}
if (aPath === "") {
aPath = ".";
}
var aPathUrl = urlParse(aPath);
var aRootUrl = urlParse(aRoot);
if (aRootUrl) {
aRoot = aRootUrl.path || '/';
}
// `join(foo, '//www.example.org')`
if (aPathUrl && !aPathUrl.scheme) {
if (aRootUrl) {
aPathUrl.scheme = aRootUrl.scheme;
}
return urlGenerate(aPathUrl);
}
if (aPathUrl || aPath.match(dataUrlRegexp)) {
return aPath;
}
// `join('http://', 'www.example.com')`
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
aRootUrl.host = aPath;
return urlGenerate(aRootUrl);
}
var joined = aPath.charAt(0) === '/'
? aPath
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
if (aRootUrl) {
aRootUrl.path = joined;
return urlGenerate(aRootUrl);
}
return joined;
}
exports.join = join;
exports.isAbsolute = function (aPath) {
return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);
};
/**
* Make a path relative to a URL or another path.
*
* @param aRoot The root path or URL.
* @param aPath The path or URL to be made relative to aRoot.
*/
function relative(aRoot, aPath) {
if (aRoot === "") {
aRoot = ".";
}
aRoot = aRoot.replace(/\/$/, '');
// It is possible for the path to be above the root. In this case, simply
// checking whether the root is a prefix of the path won't work. Instead, we
// need to remove components from the root one by one, until either we find
// a prefix that fits, or we run out of components to remove.
var level = 0;
while (aPath.indexOf(aRoot + '/') !== 0) {
var index = aRoot.lastIndexOf("/");
if (index < 0) {
return aPath;
}
// If the only part of the root that is left is the scheme (i.e. http://,
// file:///, etc.), one or more slashes (/), or simply nothing at all, we
// have exhausted all components, so the path is not relative to the root.
aRoot = aRoot.slice(0, index);
if (aRoot.match(/^([^\/]+:\/)?\/*$/)) {
return aPath;
}
++level;
}
// Make sure we add a "../" for each component we removed from the root.
return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1);
}
exports.relative = relative;
var supportsNullProto = (function () {
var obj = Object.create(null);
return !('__proto__' in obj);
}());
function identity (s) {
return s;
}
/**
* Because behavior goes wacky when you set `__proto__` on objects, we
* have to prefix all the strings in our set with an arbitrary character.
*
* See https://github.com/mozilla/source-map/pull/31 and
* https://github.com/mozilla/source-map/issues/30
*
* @param String aStr
*/
function toSetString(aStr) {
if (isProtoString(aStr)) {
return '$' + aStr;
}
return aStr;
}
exports.toSetString = supportsNullProto ? identity : toSetString;
function fromSetString(aStr) {
if (isProtoString(aStr)) {
return aStr.slice(1);
}
return aStr;
}
exports.fromSetString = supportsNullProto ? identity : fromSetString;
function isProtoString(s) {
if (!s) {
return false;
}
var length = s.length;
if (length < 9 /* "__proto__".length */) {
return false;
}
if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||
s.charCodeAt(length - 2) !== 95 /* '_' */ ||
s.charCodeAt(length - 3) !== 111 /* 'o' */ ||
s.charCodeAt(length - 4) !== 116 /* 't' */ ||
s.charCodeAt(length - 5) !== 111 /* 'o' */ ||
s.charCodeAt(length - 6) !== 114 /* 'r' */ ||
s.charCodeAt(length - 7) !== 112 /* 'p' */ ||
s.charCodeAt(length - 8) !== 95 /* '_' */ ||
s.charCodeAt(length - 9) !== 95 /* '_' */) {
return false;
}
for (var i = length - 10; i >= 0; i--) {
if (s.charCodeAt(i) !== 36 /* '$' */) {
return false;
}
}
return true;
}
/**
* Comparator between two mappings where the original positions are compared.
*
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
* mappings with the same original source/line/column, but different generated
* line and column the same. Useful when searching for a mapping with a
* stubbed out mapping.
*/
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
var cmp = mappingA.source - mappingB.source;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalLine - mappingB.originalLine;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalColumn - mappingB.originalColumn;
if (cmp !== 0 || onlyCompareOriginal) {
return cmp;
}
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.generatedLine - mappingB.generatedLine;
if (cmp !== 0) {
return cmp;
}
return mappingA.name - mappingB.name;
}
exports.compareByOriginalPositions = compareByOriginalPositions;
/**
* Comparator between two mappings with deflated source and name indices where
* the generated positions are compared.
*
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
* mappings with the same generated line and column, but different
* source/name/original line and column the same. Useful when searching for a
* mapping with a stubbed out mapping.
*/
function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {
var cmp = mappingA.generatedLine - mappingB.generatedLine;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
if (cmp !== 0 || onlyCompareGenerated) {
return cmp;
}
cmp = mappingA.source - mappingB.source;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalLine - mappingB.originalLine;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalColumn - mappingB.originalColumn;
if (cmp !== 0) {
return cmp;
}
return mappingA.name - mappingB.name;
}
exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;
function strcmp(aStr1, aStr2) {
if (aStr1 === aStr2) {
return 0;
}
if (aStr1 > aStr2) {
return 1;
}
return -1;
}
/**
* Comparator between two mappings with inflated source and name strings where
* the generated positions are compared.
*/
function compareByGeneratedPositionsInflated(mappingA, mappingB) {
var cmp = mappingA.generatedLine - mappingB.generatedLine;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
if (cmp !== 0) {
return cmp;
}
cmp = strcmp(mappingA.source, mappingB.source);
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalLine - mappingB.originalLine;
if (cmp !== 0) {
return cmp;
}
cmp = mappingA.originalColumn - mappingB.originalColumn;
if (cmp !== 0) {
return cmp;
}
return strcmp(mappingA.name, mappingB.name);
}
exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;

View File

@ -1,211 +0,0 @@
{
"_from": "source-map@^0.5.0",
"_id": "source-map@0.5.7",
"_inBundle": false,
"_integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
"_location": "/@babel/core/source-map",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "source-map@^0.5.0",
"name": "source-map",
"escapedName": "source-map",
"rawSpec": "^0.5.0",
"saveSpec": null,
"fetchSpec": "^0.5.0"
},
"_requiredBy": [
"/@babel/core"
],
"_resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
"_shasum": "8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc",
"_spec": "source-map@^0.5.0",
"_where": "/home/runner/work/ghaction-upx/ghaction-upx/node_modules/@babel/core",
"author": {
"name": "Nick Fitzgerald",
"email": "nfitzgerald@mozilla.com"
},
"bugs": {
"url": "https://github.com/mozilla/source-map/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Tobias Koppers",
"email": "tobias.koppers@googlemail.com"
},
{
"name": "Duncan Beevers",
"email": "duncan@dweebd.com"
},
{
"name": "Stephen Crane",
"email": "scrane@mozilla.com"
},
{
"name": "Ryan Seddon",
"email": "seddon.ryan@gmail.com"
},
{
"name": "Miles Elam",
"email": "miles.elam@deem.com"
},
{
"name": "Mihai Bazon",
"email": "mihai.bazon@gmail.com"
},
{
"name": "Michael Ficarra",
"email": "github.public.email@michael.ficarra.me"
},
{
"name": "Todd Wolfson",
"email": "todd@twolfson.com"
},
{
"name": "Alexander Solovyov",
"email": "alexander@solovyov.net"
},
{
"name": "Felix Gnass",
"email": "fgnass@gmail.com"
},
{
"name": "Conrad Irwin",
"email": "conrad.irwin@gmail.com"
},
{
"name": "usrbincc",
"email": "usrbincc@yahoo.com"
},
{
"name": "David Glasser",
"email": "glasser@davidglasser.net"
},
{
"name": "Chase Douglas",
"email": "chase@newrelic.com"
},
{
"name": "Evan Wallace",
"email": "evan.exe@gmail.com"
},
{
"name": "Heather Arthur",
"email": "fayearthur@gmail.com"
},
{
"name": "Hugh Kennedy",
"email": "hughskennedy@gmail.com"
},
{
"name": "David Glasser",
"email": "glasser@davidglasser.net"
},
{
"name": "Simon Lydell",
"email": "simon.lydell@gmail.com"
},
{
"name": "Jmeas Smith",
"email": "jellyes2@gmail.com"
},
{
"name": "Michael Z Goddard",
"email": "mzgoddard@gmail.com"
},
{
"name": "azu",
"email": "azu@users.noreply.github.com"
},
{
"name": "John Gozde",
"email": "john@gozde.ca"
},
{
"name": "Adam Kirkton",
"email": "akirkton@truefitinnovation.com"
},
{
"name": "Chris Montgomery",
"email": "christopher.montgomery@dowjones.com"
},
{
"name": "J. Ryan Stinnett",
"email": "jryans@gmail.com"
},
{
"name": "Jack Herrington",
"email": "jherrington@walmartlabs.com"
},
{
"name": "Chris Truter",
"email": "jeffpalentine@gmail.com"
},
{
"name": "Daniel Espeset",
"email": "daniel@danielespeset.com"
},
{
"name": "Jamie Wong",
"email": "jamie.lf.wong@gmail.com"
},
{
"name": "Eddy Bruël",
"email": "ejpbruel@mozilla.com"
},
{
"name": "Hawken Rives",
"email": "hawkrives@gmail.com"
},
{
"name": "Gilad Peleg",
"email": "giladp007@gmail.com"
},
{
"name": "djchie",
"email": "djchie.dev@gmail.com"
},
{
"name": "Gary Ye",
"email": "garysye@gmail.com"
},
{
"name": "Nicolas Lalevée",
"email": "nicolas.lalevee@hibnet.org"
}
],
"deprecated": false,
"description": "Generates and consumes source maps",
"devDependencies": {
"doctoc": "^0.15.0",
"webpack": "^1.12.0"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"source-map.js",
"lib/",
"dist/source-map.debug.js",
"dist/source-map.js",
"dist/source-map.min.js",
"dist/source-map.min.js.map"
],
"homepage": "https://github.com/mozilla/source-map",
"license": "BSD-3-Clause",
"main": "./source-map.js",
"name": "source-map",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/mozilla/source-map.git"
},
"scripts": {
"build": "webpack --color",
"test": "npm run build && node test/run-tests.js",
"toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md"
},
"typings": "source-map",
"version": "0.5.7"
}

View File

@ -1,8 +0,0 @@
/*
* Copyright 2009-2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE.txt or:
* http://opensource.org/licenses/BSD-3-Clause
*/
exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;
exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;
exports.SourceNode = require('./lib/source-node').SourceNode;

View File

@ -1,96 +0,0 @@
{
"_from": "@babel/core@^7.1.0",
"_id": "@babel/core@7.8.3",
"_inBundle": false,
"_integrity": "sha512-4XFkf8AwyrEG7Ziu3L2L0Cv+WyY47Tcsp70JFmpftbAA1K7YL/sgE9jh9HyNj08Y/U50ItUchpN0w6HxAoX1rA==",
"_location": "/@babel/core",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@babel/core@^7.1.0",
"name": "@babel/core",
"escapedName": "@babel%2fcore",
"scope": "@babel",
"rawSpec": "^7.1.0",
"saveSpec": null,
"fetchSpec": "^7.1.0"
},
"_requiredBy": [
"/@jest/transform",
"/istanbul-lib-instrument",
"/jest-config"
],
"_resolved": "https://registry.npmjs.org/@babel/core/-/core-7.8.3.tgz",
"_shasum": "30b0ebb4dd1585de6923a0b4d179e0b9f5d82941",
"_spec": "@babel/core@^7.1.0",
"_where": "/home/runner/work/ghaction-upx/ghaction-upx/node_modules/@jest/transform",
"author": {
"name": "Sebastian McKenzie",
"email": "sebmck@gmail.com"
},
"browser": {
"./lib/config/files/index.js": "./lib/config/files/index-browser.js",
"./lib/transform-file.js": "./lib/transform-file-browser.js",
"./src/config/files/index.js": "./src/config/files/index-browser.js",
"./src/transform-file.js": "./src/transform-file-browser.js"
},
"bundleDependencies": false,
"dependencies": {
"@babel/code-frame": "^7.8.3",
"@babel/generator": "^7.8.3",
"@babel/helpers": "^7.8.3",
"@babel/parser": "^7.8.3",
"@babel/template": "^7.8.3",
"@babel/traverse": "^7.8.3",
"@babel/types": "^7.8.3",
"convert-source-map": "^1.7.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.1",
"json5": "^2.1.0",
"lodash": "^4.17.13",
"resolve": "^1.3.2",
"semver": "^5.4.1",
"source-map": "^0.5.0"
},
"deprecated": false,
"description": "Babel compiler core.",
"devDependencies": {
"@babel/helper-transform-fixture-test-runner": "^7.8.3"
},
"engines": {
"node": ">=6.9.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/babel"
},
"gitHead": "a7620bd266ae1345975767bbc7abf09034437017",
"homepage": "https://babeljs.io/",
"keywords": [
"6to5",
"babel",
"classes",
"const",
"es6",
"harmony",
"let",
"modules",
"transpile",
"transpiler",
"var",
"babel-core",
"compiler"
],
"license": "MIT",
"main": "lib/index.js",
"name": "@babel/core",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "https://github.com/babel/babel/tree/master/packages/babel-core"
},
"version": "7.8.3"
}

View File

@ -1,22 +0,0 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,19 +0,0 @@
# @babel/generator
> Turns an AST into code.
See our website [@babel/generator](https://babeljs.io/docs/en/next/babel-generator.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20generator%22+is%3Aopen) associated with this package.
## Install
Using npm:
```sh
npm install --save-dev @babel/generator
```
or using yarn:
```sh
yarn add @babel/generator --dev
```

Some files were not shown because too many files have changed in this diff Show More