mirror of
https://github.com/smogon/pokemon-showdown-client.git
synced 2026-03-21 17:50:29 -05:00
Remove babel-cli dependency
At this point, we might as well use babel-core directly. New system supports source maps for `battledata` and `graphics`! Woo! It also logs the compile step.
This commit is contained in:
parent
8aa6db2d81
commit
2cd13d9de2
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x]
|
||||
node-version: [14.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
# @babel/cli
|
||||
|
||||
> Babel command line.
|
||||
|
||||
See our website [@babel/cli](https://babeljs.io/docs/en/babel-cli) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20cli%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/cli
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/cli --dev
|
||||
```
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require("../lib/babel");
|
||||
|
|
@ -1,208 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _default;
|
||||
|
||||
var util = _interopRequireWildcard(require("./util"));
|
||||
|
||||
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
||||
|
||||
const debounce = require("lodash/debounce");
|
||||
|
||||
const slash = require("slash");
|
||||
|
||||
const path = require("path");
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
const FILE_TYPE = Object.freeze({
|
||||
NON_COMPILABLE: "NON_COMPILABLE",
|
||||
COMPILED: "COMPILED",
|
||||
IGNORED: "IGNORED",
|
||||
ERR_COMPILATION: "ERR_COMPILATION",
|
||||
NO_REBUILD_NEEDED: "NO_REBUILD_NEEDED"
|
||||
});
|
||||
|
||||
function outputFileSync(filePath, data) {
|
||||
fs.mkdirSync(path.dirname(filePath), {
|
||||
recursive: true
|
||||
});
|
||||
fs.writeFileSync(filePath, data);
|
||||
}
|
||||
|
||||
async function _default({
|
||||
cliOptions,
|
||||
babelOptions
|
||||
}) {
|
||||
const filenames = cliOptions.filenames;
|
||||
|
||||
async function write(src, base) {
|
||||
let relative = path.relative(base, src);
|
||||
|
||||
if (!util.isCompilableExtension(relative, cliOptions.extensions)) {
|
||||
return FILE_TYPE.NON_COMPILABLE;
|
||||
}
|
||||
|
||||
relative = util.withExtension(relative, cliOptions.keepFileExtension ? path.extname(relative) : cliOptions.outFileExtension);
|
||||
const dest = getDest(relative, base);
|
||||
if (noRebuildNeeded(src, dest)) return FILE_TYPE.NO_REBUILD_NEEDED;
|
||||
|
||||
try {
|
||||
const res = await util.compile(src, Object.assign({}, babelOptions, {
|
||||
sourceFileName: slash(path.relative(dest + "/..", src))
|
||||
}));
|
||||
if (!res) return FILE_TYPE.IGNORED;
|
||||
|
||||
if (res.map && babelOptions.sourceMaps && babelOptions.sourceMaps !== "inline") {
|
||||
const mapLoc = dest + ".map";
|
||||
res.code = util.addSourceMappingUrl(res.code, mapLoc);
|
||||
res.map.file = path.basename(relative);
|
||||
outputFileSync(mapLoc, JSON.stringify(res.map));
|
||||
}
|
||||
|
||||
outputFileSync(dest, res.code);
|
||||
util.chmod(src, dest);
|
||||
|
||||
if (cliOptions.verbose) {
|
||||
console.log(src + " -> " + dest);
|
||||
}
|
||||
|
||||
return FILE_TYPE.COMPILED;
|
||||
} catch (err) {
|
||||
if (cliOptions.watch) {
|
||||
console.error(err);
|
||||
return FILE_TYPE.ERR_COMPILATION;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function getDest(filename, base) {
|
||||
if (cliOptions.relative) {
|
||||
return path.join(base, cliOptions.outDir, filename);
|
||||
}
|
||||
|
||||
return path.join(cliOptions.outDir, filename);
|
||||
}
|
||||
|
||||
function noRebuildNeeded(src, dest) {
|
||||
if (!cliOptions.incremental) return false;
|
||||
|
||||
try {
|
||||
const srcStat = fs.statSync(src);
|
||||
const destStat = fs.statSync(dest);
|
||||
if (srcStat.ctimeMs < destStat.ctimeMs) return true;
|
||||
} catch (e) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function handleFile(src, base) {
|
||||
const written = await write(src, base);
|
||||
|
||||
if (cliOptions.copyFiles && written === FILE_TYPE.NON_COMPILABLE || cliOptions.copyIgnored && written === FILE_TYPE.IGNORED) {
|
||||
const filename = path.relative(base, src);
|
||||
const dest = getDest(filename, base);
|
||||
if (noRebuildNeeded(src, dest)) return false;
|
||||
outputFileSync(dest, fs.readFileSync(src));
|
||||
util.chmod(src, dest);
|
||||
return false;
|
||||
}
|
||||
|
||||
return written === FILE_TYPE.COMPILED;
|
||||
}
|
||||
|
||||
async function handle(filenameOrDir) {
|
||||
if (!fs.existsSync(filenameOrDir)) return 0;
|
||||
const stat = fs.statSync(filenameOrDir);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
const dirname = filenameOrDir;
|
||||
let count = 0;
|
||||
const files = util.readdir(dirname, cliOptions.includeDotfiles);
|
||||
|
||||
for (const filename of files) {
|
||||
const src = path.join(dirname, filename);
|
||||
const written = await handleFile(src, dirname);
|
||||
if (written) count += 1;
|
||||
}
|
||||
|
||||
return count;
|
||||
} else {
|
||||
const filename = filenameOrDir;
|
||||
const written = await handleFile(filename, path.dirname(filename));
|
||||
return written ? 1 : 0;
|
||||
}
|
||||
}
|
||||
|
||||
let compiledFiles = 0;
|
||||
let startTime = null;
|
||||
const logSuccess = debounce(function () {
|
||||
if (startTime === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const diff = process.hrtime(startTime);
|
||||
console.log(`Successfully compiled ${compiledFiles} ${compiledFiles !== 1 ? "files" : "file"} with Babel (${diff[0] * 1e3 + Math.round(diff[1] / 1e6)}ms).`);
|
||||
compiledFiles = 0;
|
||||
startTime = null;
|
||||
}, 100, {
|
||||
trailing: true
|
||||
});
|
||||
|
||||
if (!cliOptions.skipInitialBuild) {
|
||||
if (cliOptions.deleteDirOnStart) {
|
||||
util.deleteDir(cliOptions.outDir);
|
||||
}
|
||||
|
||||
fs.mkdirSync(cliOptions.outDir, {
|
||||
recursive: true
|
||||
});
|
||||
startTime = process.hrtime();
|
||||
|
||||
for (const filename of cliOptions.filenames) {
|
||||
compiledFiles += await handle(filename);
|
||||
}
|
||||
|
||||
if (!cliOptions.quiet) {
|
||||
logSuccess();
|
||||
logSuccess.flush();
|
||||
}
|
||||
}
|
||||
|
||||
if (cliOptions.watch) {
|
||||
const chokidar = util.requireChokidar();
|
||||
filenames.forEach(function (filenameOrDir) {
|
||||
const watcher = chokidar.watch(filenameOrDir, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 50,
|
||||
pollInterval: 10
|
||||
}
|
||||
});
|
||||
let processing = 0;
|
||||
["add", "change"].forEach(function (type) {
|
||||
watcher.on(type, async function (filename) {
|
||||
processing++;
|
||||
if (startTime === null) startTime = process.hrtime();
|
||||
|
||||
try {
|
||||
await handleFile(filename, filename === filenameOrDir ? path.dirname(filenameOrDir) : filenameOrDir);
|
||||
compiledFiles++;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
|
||||
processing--;
|
||||
if (processing === 0 && !cliOptions.quiet) logSuccess();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -1,200 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _default;
|
||||
|
||||
var util = _interopRequireWildcard(require("./util"));
|
||||
|
||||
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
||||
|
||||
const convertSourceMap = require("convert-source-map");
|
||||
|
||||
const sourceMap = require("source-map");
|
||||
|
||||
const slash = require("slash");
|
||||
|
||||
const path = require("path");
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
async function _default({
|
||||
cliOptions,
|
||||
babelOptions
|
||||
}) {
|
||||
function buildResult(fileResults) {
|
||||
const map = new sourceMap.SourceMapGenerator({
|
||||
file: cliOptions.sourceMapTarget || path.basename(cliOptions.outFile || "") || "stdout",
|
||||
sourceRoot: babelOptions.sourceRoot
|
||||
});
|
||||
let code = "";
|
||||
let offset = 0;
|
||||
|
||||
for (const result of fileResults) {
|
||||
if (!result) continue;
|
||||
code += result.code + "\n";
|
||||
|
||||
if (result.map) {
|
||||
const consumer = new sourceMap.SourceMapConsumer(result.map);
|
||||
const sources = new Set();
|
||||
consumer.eachMapping(function (mapping) {
|
||||
if (mapping.source != null) sources.add(mapping.source);
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: mapping.generatedLine + offset,
|
||||
column: mapping.generatedColumn
|
||||
},
|
||||
source: mapping.source,
|
||||
original: mapping.source == null ? null : {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
}
|
||||
});
|
||||
});
|
||||
sources.forEach(source => {
|
||||
const content = consumer.sourceContentFor(source, true);
|
||||
|
||||
if (content !== null) {
|
||||
map.setSourceContent(source, content);
|
||||
}
|
||||
});
|
||||
offset = code.split("\n").length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (babelOptions.sourceMaps === "inline" || !cliOptions.outFile && babelOptions.sourceMaps) {
|
||||
code += "\n" + convertSourceMap.fromObject(map).toComment();
|
||||
}
|
||||
|
||||
return {
|
||||
map: map,
|
||||
code: code
|
||||
};
|
||||
}
|
||||
|
||||
function output(fileResults) {
|
||||
const result = buildResult(fileResults);
|
||||
|
||||
if (cliOptions.outFile) {
|
||||
fs.mkdirSync(path.dirname(cliOptions.outFile), {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
if (babelOptions.sourceMaps && babelOptions.sourceMaps !== "inline") {
|
||||
const mapLoc = cliOptions.outFile + ".map";
|
||||
result.code = util.addSourceMappingUrl(result.code, mapLoc);
|
||||
fs.writeFileSync(mapLoc, JSON.stringify(result.map));
|
||||
}
|
||||
|
||||
fs.writeFileSync(cliOptions.outFile, result.code);
|
||||
} else {
|
||||
process.stdout.write(result.code + "\n");
|
||||
}
|
||||
}
|
||||
|
||||
function readStdin() {
|
||||
return new Promise((resolve, reject) => {
|
||||
let code = "";
|
||||
process.stdin.setEncoding("utf8");
|
||||
process.stdin.on("readable", function () {
|
||||
const chunk = process.stdin.read();
|
||||
if (chunk !== null) code += chunk;
|
||||
});
|
||||
process.stdin.on("end", function () {
|
||||
resolve(code);
|
||||
});
|
||||
process.stdin.on("error", reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function stdin() {
|
||||
const code = await readStdin();
|
||||
const res = await util.transform(cliOptions.filename, code, Object.assign({}, babelOptions, {
|
||||
sourceFileName: "stdin"
|
||||
}));
|
||||
output([res]);
|
||||
}
|
||||
|
||||
async function walk(filenames) {
|
||||
const _filenames = [];
|
||||
filenames.forEach(function (filename) {
|
||||
if (!fs.existsSync(filename)) return;
|
||||
const stat = fs.statSync(filename);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
const dirname = filename;
|
||||
util.readdirForCompilable(filename, cliOptions.includeDotfiles, cliOptions.extensions).forEach(function (filename) {
|
||||
_filenames.push(path.join(dirname, filename));
|
||||
});
|
||||
} else {
|
||||
_filenames.push(filename);
|
||||
}
|
||||
});
|
||||
const results = await Promise.all(_filenames.map(async function (filename) {
|
||||
let sourceFilename = filename;
|
||||
|
||||
if (cliOptions.outFile) {
|
||||
sourceFilename = path.relative(path.dirname(cliOptions.outFile), sourceFilename);
|
||||
}
|
||||
|
||||
sourceFilename = slash(sourceFilename);
|
||||
|
||||
try {
|
||||
return await util.compile(filename, Object.assign({}, babelOptions, {
|
||||
sourceFileName: sourceFilename,
|
||||
sourceMaps: babelOptions.sourceMaps === "inline" ? true : babelOptions.sourceMaps
|
||||
}));
|
||||
} catch (err) {
|
||||
if (!cliOptions.watch) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.error(err);
|
||||
return null;
|
||||
}
|
||||
}));
|
||||
output(results);
|
||||
}
|
||||
|
||||
async function files(filenames) {
|
||||
if (!cliOptions.skipInitialBuild) {
|
||||
await walk(filenames);
|
||||
}
|
||||
|
||||
if (cliOptions.watch) {
|
||||
const chokidar = util.requireChokidar();
|
||||
chokidar.watch(filenames, {
|
||||
disableGlobbing: true,
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 50,
|
||||
pollInterval: 10
|
||||
}
|
||||
}).on("all", function (type, filename) {
|
||||
if (!util.isCompilableExtension(filename, cliOptions.extensions) && !filenames.includes(filename)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (type === "add" || type === "change") {
|
||||
if (cliOptions.verbose) {
|
||||
console.log(type + " " + filename);
|
||||
}
|
||||
|
||||
walk(filenames).catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (cliOptions.filenames.length) {
|
||||
await files(cliOptions.filenames);
|
||||
} else {
|
||||
await stdin();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
|
||||
var _options = _interopRequireDefault(require("./options"));
|
||||
|
||||
var _dir = _interopRequireDefault(require("./dir"));
|
||||
|
||||
var _file = _interopRequireDefault(require("./file"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const opts = (0, _options.default)(process.argv);
|
||||
|
||||
if (opts) {
|
||||
const fn = opts.cliOptions.outDir ? _dir.default : _file.default;
|
||||
fn(opts).catch(err => {
|
||||
console.error(err);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
} else {
|
||||
process.exitCode = 2;
|
||||
}
|
||||
|
|
@ -1,218 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = parseArgv;
|
||||
|
||||
function _core() {
|
||||
const data = require("@babel/core");
|
||||
|
||||
_core = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
const commander = require("commander");
|
||||
|
||||
const glob = require("glob");
|
||||
|
||||
commander.option("-f, --filename [filename]", "The filename to use when reading from stdin. This will be used in source-maps, errors etc.");
|
||||
commander.option("--presets [list]", "A comma-separated list of preset names.", collect);
|
||||
commander.option("--plugins [list]", "A comma-separated list of plugin names.", collect);
|
||||
commander.option("--config-file [path]", "Path to a .babelrc file to use.");
|
||||
commander.option("--env-name [name]", "The name of the 'env' to use when loading configs and plugins. " + "Defaults to the value of BABEL_ENV, or else NODE_ENV, or else 'development'.");
|
||||
commander.option("--root-mode [mode]", "The project-root resolution mode. " + "One of 'root' (the default), 'upward', or 'upward-optional'.");
|
||||
commander.option("--source-type [script|module]", "");
|
||||
commander.option("--no-babelrc", "Whether or not to look up .babelrc and .babelignore files.");
|
||||
commander.option("--ignore [list]", "List of glob paths to **not** compile.", collect);
|
||||
commander.option("--only [list]", "List of glob paths to **only** compile.", collect);
|
||||
commander.option("--no-highlight-code", "Enable or disable ANSI syntax highlighting of code frames. (on by default)");
|
||||
commander.option("--no-comments", "Write comments to generated output. (true by default)");
|
||||
commander.option("--retain-lines", "Retain line numbers. This will result in really ugly code.");
|
||||
commander.option("--compact [true|false|auto]", "Do not include superfluous whitespace characters and line terminators.", booleanify);
|
||||
commander.option("--minified", "Save as many bytes when printing. (false by default)");
|
||||
commander.option("--auxiliary-comment-before [string]", "Print a comment before any injected non-user code.");
|
||||
commander.option("--auxiliary-comment-after [string]", "Print a comment after any injected non-user code.");
|
||||
commander.option("-s, --source-maps [true|false|inline|both]", "", booleanify);
|
||||
commander.option("--source-map-target [string]", "Set `file` on returned source map.");
|
||||
commander.option("--source-file-name [string]", "Set `sources[0]` on returned source map.");
|
||||
commander.option("--source-root [filename]", "The root from which all sources are relative.");
|
||||
|
||||
if (!process.env.BABEL_8_BREAKING) {
|
||||
commander.option("--module-root [filename]", "Optional prefix for the AMD module formatter that will be prepended to the filename on module definitions.");
|
||||
commander.option("-M, --module-ids", "Insert an explicit id for modules.");
|
||||
commander.option("--module-id [string]", "Specify a custom name for module ids.");
|
||||
}
|
||||
|
||||
commander.option("-x, --extensions [extensions]", "List of extensions to compile when a directory has been the input. [.es6,.js,.es,.jsx,.mjs]", collect);
|
||||
commander.option("--keep-file-extension", "Preserve the file extensions of the input files.");
|
||||
commander.option("-w, --watch", "Recompile files on changes.");
|
||||
commander.option("--skip-initial-build", "Do not compile files before watching.");
|
||||
commander.option("--incremental", "Only compile files with modification time before corresponding output file");
|
||||
commander.option("-o, --out-file [out]", "Compile all input files into a single file.");
|
||||
commander.option("-d, --out-dir [out]", "Compile an input directory of modules into an output directory.");
|
||||
commander.option("--relative", "Compile into an output directory relative to input directory or file. Requires --out-dir [out]");
|
||||
commander.option("-D, --copy-files", "When compiling a directory copy over non-compilable files.");
|
||||
commander.option("--include-dotfiles", "Include dotfiles when compiling and copying non-compilable files.");
|
||||
commander.option("--no-copy-ignored", "Exclude ignored files when copying non-compilable files.");
|
||||
commander.option("--verbose", "Log everything. This option conflicts with --quiet");
|
||||
commander.option("--quiet", "Don't log anything. This option conflicts with --verbose");
|
||||
commander.option("--delete-dir-on-start", "Delete the out directory before compilation.");
|
||||
commander.option("--out-file-extension [string]", "Use a specific extension for the output files");
|
||||
commander.version("7.13.14" + " (@babel/core " + _core().version + ")");
|
||||
commander.usage("[options] <files ...>");
|
||||
commander.action(() => {});
|
||||
|
||||
function parseArgv(args) {
|
||||
commander.parse(args);
|
||||
const errors = [];
|
||||
let filenames = commander.args.reduce(function (globbed, input) {
|
||||
let files = glob.sync(input);
|
||||
if (!files.length) files = [input];
|
||||
return globbed.concat(files);
|
||||
}, []);
|
||||
filenames = Array.from(new Set(filenames));
|
||||
filenames.forEach(function (filename) {
|
||||
if (!fs.existsSync(filename)) {
|
||||
errors.push(filename + " does not exist");
|
||||
}
|
||||
});
|
||||
|
||||
if (commander.outDir && !filenames.length) {
|
||||
errors.push("--out-dir requires filenames");
|
||||
}
|
||||
|
||||
if (commander.outFile && commander.outDir) {
|
||||
errors.push("--out-file and --out-dir cannot be used together");
|
||||
}
|
||||
|
||||
if (commander.relative && !commander.outDir) {
|
||||
errors.push("--relative requires --out-dir usage");
|
||||
}
|
||||
|
||||
if (commander.watch) {
|
||||
if (!commander.outFile && !commander.outDir) {
|
||||
errors.push("--watch requires --out-file or --out-dir");
|
||||
}
|
||||
|
||||
if (!filenames.length) {
|
||||
errors.push("--watch requires filenames");
|
||||
}
|
||||
}
|
||||
|
||||
if (commander.skipInitialBuild && !commander.watch) {
|
||||
errors.push("--skip-initial-build requires --watch");
|
||||
}
|
||||
|
||||
if (commander.incremental && !commander.outDir) {
|
||||
errors.push("--incremental requires --out-dir");
|
||||
}
|
||||
|
||||
if (commander.deleteDirOnStart && !commander.outDir) {
|
||||
errors.push("--delete-dir-on-start requires --out-dir");
|
||||
}
|
||||
|
||||
if (commander.verbose && commander.quiet) {
|
||||
errors.push("--verbose and --quiet cannot be used together");
|
||||
}
|
||||
|
||||
if (!commander.outDir && filenames.length === 0 && typeof commander.filename !== "string" && commander.babelrc !== false) {
|
||||
errors.push("stdin compilation requires either -f/--filename [filename] or --no-babelrc");
|
||||
}
|
||||
|
||||
if (commander.keepFileExtension && commander.outFileExtension) {
|
||||
errors.push("--out-file-extension cannot be used with --keep-file-extension");
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.error("babel:");
|
||||
errors.forEach(function (e) {
|
||||
console.error(" " + e);
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
const opts = commander.opts();
|
||||
const babelOptions = {
|
||||
presets: opts.presets,
|
||||
plugins: opts.plugins,
|
||||
rootMode: opts.rootMode,
|
||||
configFile: opts.configFile,
|
||||
envName: opts.envName,
|
||||
sourceType: opts.sourceType,
|
||||
ignore: opts.ignore,
|
||||
only: opts.only,
|
||||
retainLines: opts.retainLines,
|
||||
compact: opts.compact,
|
||||
minified: opts.minified,
|
||||
auxiliaryCommentBefore: opts.auxiliaryCommentBefore,
|
||||
auxiliaryCommentAfter: opts.auxiliaryCommentAfter,
|
||||
sourceMaps: opts.sourceMaps,
|
||||
sourceFileName: opts.sourceFileName,
|
||||
sourceRoot: opts.sourceRoot,
|
||||
babelrc: opts.babelrc === true ? undefined : opts.babelrc,
|
||||
highlightCode: opts.highlightCode === true ? undefined : opts.highlightCode,
|
||||
comments: opts.comments === true ? undefined : opts.comments
|
||||
};
|
||||
|
||||
if (!process.env.BABEL_8_BREAKING) {
|
||||
Object.assign(babelOptions, {
|
||||
moduleRoot: opts.moduleRoot,
|
||||
moduleIds: opts.moduleIds,
|
||||
moduleId: opts.moduleId
|
||||
});
|
||||
}
|
||||
|
||||
for (const key of Object.keys(babelOptions)) {
|
||||
if (babelOptions[key] === undefined) {
|
||||
delete babelOptions[key];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
babelOptions,
|
||||
cliOptions: {
|
||||
filename: opts.filename,
|
||||
filenames,
|
||||
extensions: opts.extensions,
|
||||
keepFileExtension: opts.keepFileExtension,
|
||||
outFileExtension: opts.outFileExtension,
|
||||
watch: opts.watch,
|
||||
skipInitialBuild: opts.skipInitialBuild,
|
||||
incremental: opts.incremental,
|
||||
outFile: opts.outFile,
|
||||
outDir: opts.outDir,
|
||||
relative: opts.relative,
|
||||
copyFiles: opts.copyFiles,
|
||||
copyIgnored: opts.copyFiles && opts.copyIgnored,
|
||||
includeDotfiles: opts.includeDotfiles,
|
||||
verbose: opts.verbose,
|
||||
quiet: opts.quiet,
|
||||
deleteDirOnStart: opts.deleteDirOnStart,
|
||||
sourceMapTarget: opts.sourceMapTarget
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function booleanify(val) {
|
||||
if (val === "true" || val == 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (val === "false" || val == 0 || !val) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
function collect(value, previousValue) {
|
||||
if (typeof value !== "string") return previousValue;
|
||||
const values = value.split(",");
|
||||
return previousValue ? previousValue.concat(values) : values;
|
||||
}
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.chmod = chmod;
|
||||
exports.readdir = readdir;
|
||||
exports.readdirForCompilable = readdirForCompilable;
|
||||
exports.isCompilableExtension = isCompilableExtension;
|
||||
exports.addSourceMappingUrl = addSourceMappingUrl;
|
||||
exports.transform = transform;
|
||||
exports.compile = compile;
|
||||
exports.deleteDir = deleteDir;
|
||||
exports.requireChokidar = requireChokidar;
|
||||
exports.withExtension = withExtension;
|
||||
|
||||
function babel() {
|
||||
const data = _interopRequireWildcard(require("@babel/core"));
|
||||
|
||||
babel = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
||||
|
||||
const readdirRecursive = require("fs-readdir-recursive");
|
||||
|
||||
const path = require("path");
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
function chmod(src, dest) {
|
||||
try {
|
||||
fs.chmodSync(dest, fs.statSync(src).mode);
|
||||
} catch (err) {
|
||||
console.warn(`Cannot change permissions of ${dest}`);
|
||||
}
|
||||
}
|
||||
|
||||
function readdir(dirname, includeDotfiles, filter) {
|
||||
return readdirRecursive(dirname, (filename, _index, currentDirectory) => {
|
||||
const stat = fs.statSync(path.join(currentDirectory, filename));
|
||||
if (stat.isDirectory()) return true;
|
||||
return (includeDotfiles || filename[0] !== ".") && (!filter || filter(filename));
|
||||
});
|
||||
}
|
||||
|
||||
function readdirForCompilable(dirname, includeDotfiles, altExts) {
|
||||
return readdir(dirname, includeDotfiles, function (filename) {
|
||||
return isCompilableExtension(filename, altExts);
|
||||
});
|
||||
}
|
||||
|
||||
function isCompilableExtension(filename, altExts) {
|
||||
const exts = altExts || babel().DEFAULT_EXTENSIONS;
|
||||
const ext = path.extname(filename);
|
||||
return exts.includes(ext);
|
||||
}
|
||||
|
||||
function addSourceMappingUrl(code, loc) {
|
||||
return code + "\n//# sourceMappingURL=" + path.basename(loc);
|
||||
}
|
||||
|
||||
const CALLER = {
|
||||
name: "@babel/cli"
|
||||
};
|
||||
|
||||
function transform(filename, code, opts) {
|
||||
opts = Object.assign({}, opts, {
|
||||
caller: CALLER,
|
||||
filename
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
babel().transform(code, opts, (err, result) => {
|
||||
if (err) reject(err);else resolve(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function compile(filename, opts) {
|
||||
opts = Object.assign({}, opts, {
|
||||
caller: CALLER
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
babel().transformFile(filename, opts, (err, result) => {
|
||||
if (err) reject(err);else resolve(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function deleteDir(path) {
|
||||
if (fs.existsSync(path)) {
|
||||
fs.readdirSync(path).forEach(function (file) {
|
||||
const curPath = path + "/" + file;
|
||||
|
||||
if (fs.lstatSync(curPath).isDirectory()) {
|
||||
deleteDir(curPath);
|
||||
} else {
|
||||
fs.unlinkSync(curPath);
|
||||
}
|
||||
});
|
||||
fs.rmdirSync(path);
|
||||
}
|
||||
}
|
||||
|
||||
process.on("uncaughtException", function (err) {
|
||||
console.error(err);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
|
||||
function requireChokidar() {
|
||||
try {
|
||||
return parseInt(process.versions.node) >= 8 ? require("chokidar") : require("@nicolo-ribaudo/chokidar-2");
|
||||
} catch (err) {
|
||||
console.error("The optional dependency chokidar failed to install and is required for " + "--watch. Chokidar is likely not supported on your platform.");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function withExtension(filename, ext = ".js") {
|
||||
const newBasename = path.basename(filename, path.extname(filename)) + ext;
|
||||
return path.join(path.dirname(filename), newBasename);
|
||||
}
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
{
|
||||
"name": "@babel/cli",
|
||||
"version": "7.13.14",
|
||||
"description": "Babel command line.",
|
||||
"author": "Sebastian McKenzie <sebmck@gmail.com>",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-cli",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20cli%22+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-cli"
|
||||
},
|
||||
"keywords": [
|
||||
"6to5",
|
||||
"babel",
|
||||
"es6",
|
||||
"transpile",
|
||||
"transpiler",
|
||||
"babel-cli",
|
||||
"compiler"
|
||||
],
|
||||
"dependencies": {
|
||||
"commander": "^4.0.1",
|
||||
"convert-source-map": "^1.1.0",
|
||||
"fs-readdir-recursive": "^1.1.0",
|
||||
"glob": "^7.0.0",
|
||||
"lodash": "^4.17.19",
|
||||
"make-dir": "^2.1.0",
|
||||
"slash": "condition:BABEL_8_BREAKING ? ^3.0.0 : ^2.0.0",
|
||||
"source-map": "^0.5.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@nicolo-ribaudo/chokidar-2": "condition:BABEL_8_BREAKING ? : 2.1.8-no-fsevents",
|
||||
"chokidar": "^3.4.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@babel/core": "^7.0.0-0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "workspace:*",
|
||||
"@babel/helper-fixtures": "workspace:*",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"babel": "./bin/babel.js",
|
||||
"babel-external-helpers": "./bin/babel-external-helpers.js"
|
||||
}
|
||||
}
|
||||
224
build-tools/compiler.js
Normal file
224
build-tools/compiler.js
Normal file
|
|
@ -0,0 +1,224 @@
|
|||
/**
|
||||
* Tiny wrapper around babel/core to do most of the things babel-cli does,
|
||||
* plus incremental compilation
|
||||
*
|
||||
* Adds one option in addition to babel's built-in options: `incremental`
|
||||
*
|
||||
* Heavily copied from `babel-cli`: https://github.com/babel/babel/tree/main/packages/babel-cli
|
||||
*
|
||||
* @author Guangcong Luo <guangcongluo@gmail.com>
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
const babel = require('@babel/core');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sourceMap = require('source-map');
|
||||
|
||||
const VERBOSE = false;
|
||||
|
||||
function outputFileSync(filePath, res, opts) {
|
||||
fs.mkdirSync(path.dirname(filePath), {recursive: true});
|
||||
|
||||
// we've requested explicit sourcemaps to be written to disk
|
||||
if (
|
||||
res.map &&
|
||||
opts.sourceMaps &&
|
||||
opts.sourceMaps !== "inline"
|
||||
) {
|
||||
const mapLoc = filePath + ".map";
|
||||
res.code += "\n//# sourceMappingURL=" + path.basename(mapLoc);
|
||||
res.map.file = path.basename(filePath);
|
||||
fs.writeFileSync(mapLoc, JSON.stringify(res.map));
|
||||
}
|
||||
|
||||
fs.writeFileSync(filePath, res.code);
|
||||
}
|
||||
|
||||
function slash(path) {
|
||||
const isExtendedLengthPath = /^\\\\\?\\/.test(path);
|
||||
const hasNonAscii = /[^\u0000-\u0080]+/.test(path);
|
||||
|
||||
if (isExtendedLengthPath || hasNonAscii) {
|
||||
return path;
|
||||
}
|
||||
|
||||
return path.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
function combineResults(fileResults, sourceMapOptions, opts) {
|
||||
let map = null;
|
||||
if (fileResults.some(result => result?.map)) {
|
||||
map = new sourceMap.SourceMapGenerator(sourceMapOptions);
|
||||
}
|
||||
|
||||
let code = "";
|
||||
let offset = 0;
|
||||
|
||||
for (const result of fileResults) {
|
||||
if (!result) continue;
|
||||
|
||||
code += result.code + "\n";
|
||||
|
||||
if (result.map) {
|
||||
const consumer = new sourceMap.SourceMapConsumer(result.map);
|
||||
const sources = new Set();
|
||||
|
||||
consumer.eachMapping(function (mapping) {
|
||||
if (mapping.source != null) sources.add(mapping.source);
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: mapping.generatedLine + offset,
|
||||
column: mapping.generatedColumn,
|
||||
},
|
||||
source: mapping.source,
|
||||
original:
|
||||
mapping.source == null
|
||||
? null
|
||||
: {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
for (const source of sources) {
|
||||
const content = consumer.sourceContentFor(source, true);
|
||||
if (content !== null) {
|
||||
map.setSourceContent(source, content);
|
||||
}
|
||||
}
|
||||
|
||||
offset = code.split("\n").length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.sourceMaps === "inline") {
|
||||
const json = JSON.stringify(map);
|
||||
const base64 = Buffer.from(json, 'utf8').toString('base64');
|
||||
code += "\n//# sourceMappingURL=data:application/json;charset=utf-8;base64," + base64;
|
||||
}
|
||||
|
||||
return {
|
||||
map: map,
|
||||
code: code,
|
||||
};
|
||||
}
|
||||
|
||||
function noRebuildNeeded(src, dest) {
|
||||
try {
|
||||
const srcStat = fs.statSync(src, {throwIfNoEntry: false});
|
||||
if (!srcStat) return true;
|
||||
const destStat = fs.statSync(dest);
|
||||
if (srcStat.ctimeMs < destStat.ctimeMs) return true;
|
||||
} catch (e) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function compileToDir(srcDir, destDir, opts = {}) {
|
||||
const incremental = opts.incremental;
|
||||
delete opts.incremental;
|
||||
|
||||
function handleFile(src, base) {
|
||||
let relative = path.relative(base, src);
|
||||
|
||||
if (!relative.endsWith('.ts') && !relative.endsWith('.tsx')) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
relative = relative.slice(0, relative.endsWith('.tsx') ? -4 : -3) + '.js';
|
||||
|
||||
const dest = path.join(destDir, relative);
|
||||
|
||||
if (incremental && noRebuildNeeded(src, dest)) return 0;
|
||||
|
||||
const res = babel.transformFileSync(src, {
|
||||
...opts,
|
||||
sourceFileName: slash(path.relative(dest + "/..", src)),
|
||||
});
|
||||
|
||||
if (!res) return 0;
|
||||
|
||||
outputFileSync(dest, res, opts);
|
||||
fs.chmodSync(dest, fs.statSync(src).mode);
|
||||
|
||||
if (VERBOSE) {
|
||||
console.log(src + " -> " + dest);
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
function handle(src, base) {
|
||||
const stat = fs.statSync(src, {throwIfNoEntry: false});
|
||||
|
||||
if (!stat) return 0;
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
if (!base) base = src;
|
||||
|
||||
let count = 0;
|
||||
|
||||
const files = fs.readdirSync(src);
|
||||
for (const filename of files) {
|
||||
if (filename.startsWith('.')) continue;
|
||||
|
||||
const srcFile = path.join(src, filename);
|
||||
|
||||
count += handle(srcFile, base);
|
||||
}
|
||||
|
||||
return count;
|
||||
} else {
|
||||
if (!base) base = path.dirname(src);
|
||||
return handleFile(src, base);
|
||||
}
|
||||
}
|
||||
|
||||
let total = 0;
|
||||
fs.mkdirSync(destDir, {recursive: true});
|
||||
const srcDirs = typeof srcDir === 'string' ? [srcDir] : srcDir;
|
||||
for (const dir of srcDirs) total += handle(dir);
|
||||
if (incremental) opts.incremental = true; // incredibly dumb hack to preserve the option
|
||||
return total;
|
||||
}
|
||||
|
||||
function compileToFile(srcFile, destFile, opts) {
|
||||
const incremental = opts.incremental;
|
||||
delete opts.incremental;
|
||||
|
||||
const srcFiles = typeof srcFile === 'string' ? [srcFile] : srcFile;
|
||||
|
||||
if (incremental && srcFiles.every(src => noRebuildNeeded(src, destFile))) {
|
||||
opts.incremental = true; // incredibly dumb hack to preserve the option
|
||||
return 0;
|
||||
}
|
||||
|
||||
const results = [];
|
||||
|
||||
for (const src of srcFiles) {
|
||||
if (!fs.existsSync(src)) continue;
|
||||
|
||||
const res = babel.transformFileSync(src, opts);
|
||||
|
||||
if (res) results.push(res);
|
||||
|
||||
if (VERBOSE) console.log(src + " ->");
|
||||
}
|
||||
|
||||
const combined = combineResults(results, {
|
||||
file: path.basename(destFile),
|
||||
sourceRoot: opts.sourceRoot,
|
||||
}, opts);
|
||||
outputFileSync(destFile, combined, opts);
|
||||
|
||||
if (VERBOSE) console.log("-> " + destFile);
|
||||
if (incremental) opts.incremental = true; // incredibly dumb hack to preserve the option
|
||||
return results.length;
|
||||
}
|
||||
|
||||
exports.compileToDir = compileToDir;
|
||||
|
||||
exports.compileToFile = compileToFile;
|
||||
|
|
@ -12,6 +12,7 @@ const path = require('path');
|
|||
const fs = require('fs');
|
||||
const crypto = require('crypto');
|
||||
const child_process = require('child_process');
|
||||
const compiler = require('./compiler');
|
||||
|
||||
const thisDir = __dirname;
|
||||
const rootDir = path.resolve(thisDir, '..');
|
||||
|
|
@ -71,46 +72,54 @@ console.log("DONE");
|
|||
* Compile TS files
|
||||
*********************************************************/
|
||||
|
||||
let ignoreGraphics = ' --ignore "src/battle-animations.js","src/battle-animations-moves.js"';
|
||||
process.stdout.write("Compiling TS files... ");
|
||||
|
||||
let compileStartTime = process.hrtime();
|
||||
let compiledFiles = 0;
|
||||
|
||||
let compileOpts = {
|
||||
incremental: true,
|
||||
ignore: ['src/battle-animations.js', 'src/battle-animations-moves.js'],
|
||||
};
|
||||
if (process.argv[2] === 'full') {
|
||||
ignoreGraphics = '';
|
||||
delete compileOpts.ignore;
|
||||
} else {
|
||||
try {
|
||||
fs.statSync('data/graphics.js');
|
||||
// graphics.js exists, recompile it
|
||||
ignoreGraphics = '';
|
||||
delete compileOpts.ignore;
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
child_process.execSync(`node build-tools/babel-cli/bin/babel.js src --out-dir js --extensions ".ts,.tsx" --incremental${ignoreGraphics} --source-maps`);
|
||||
compiledFiles += compiler.compileToDir(`src`, `js`, compileOpts);
|
||||
|
||||
let textData = '';
|
||||
try {
|
||||
textData = fs.readFileSync('data/text.js');
|
||||
} catch (e) {}
|
||||
compiledFiles += compiler.compileToDir(`src`, `js`, compileOpts);
|
||||
|
||||
fs.writeFileSync(
|
||||
compiledFiles += compiler.compileToFile(
|
||||
['src/battle-dex.ts', 'src/battle-dex-data.ts', 'src/battle-log.ts', 'src/battle-log-misc.js', 'data/text.js', 'src/battle-text-parser.ts'],
|
||||
'js/battledata.js',
|
||||
fs.readFileSync('js/battle-dex.js') + '\n\n' +
|
||||
fs.readFileSync('js/battle-dex-data.js') + '\n\n' +
|
||||
fs.readFileSync('js/battle-log.js') + '\n\n' +
|
||||
fs.readFileSync('src/battle-log-misc.js') +
|
||||
textData + '\n\n' +
|
||||
fs.readFileSync('js/battle-text-parser.js')
|
||||
compileOpts
|
||||
);
|
||||
|
||||
if (!ignoreGraphics) {
|
||||
fs.writeFileSync(
|
||||
if (!compileOpts.ignore) {
|
||||
compiledFiles += compiler.compileToFile(
|
||||
['src/battle-animations.ts', 'src/battle-animations-moves.ts'],
|
||||
'data/graphics.js',
|
||||
fs.readFileSync('js/battle-animations.js') + '\n\n' +
|
||||
fs.readFileSync('js/battle-animations-moves.js')
|
||||
compileOpts
|
||||
);
|
||||
}
|
||||
|
||||
const diff = process.hrtime(compileStartTime);
|
||||
console.log(
|
||||
`(${compiledFiles} ${compiledFiles !== 1 ? "files" : "file"} in ${diff[0] + Math.round(diff[1] / 1e6) / 1e3}s) DONE`
|
||||
);
|
||||
|
||||
/*********************************************************
|
||||
* Update cachebuster and News
|
||||
*********************************************************/
|
||||
|
||||
process.stdout.write("Updating cachebuster and URLs... ");
|
||||
|
||||
const URL_REGEX = /(src|href)="\/(.*?)(\?[a-z0-9]*?)?"/g;
|
||||
|
||||
function updateURL(a, b, c, d) {
|
||||
|
|
@ -134,19 +143,15 @@ function updateURL(a, b, c, d) {
|
|||
}
|
||||
|
||||
function writeFiles(indexContents, preactIndexContents, crossprotocolContents, replayEmbedContents) {
|
||||
process.stdout.write("Writing new HTML files... ");
|
||||
fs.writeFileSync('index.html', indexContents);
|
||||
fs.writeFileSync('preactalpha.html', preactIndexContents);
|
||||
fs.writeFileSync('crossprotocol.html', crossprotocolContents);
|
||||
console.log("DONE");
|
||||
process.stdout.write("Writing replay-embed.js... ");
|
||||
fs.writeFileSync('js/replay-embed.js', replayEmbedContents);
|
||||
console.log("DONE");
|
||||
}
|
||||
|
||||
function updateFiles() {
|
||||
// add hashes to js and css files and rewrite URLs
|
||||
process.stdout.write("Updating hashes and URLs... ");
|
||||
let indexContents = fs.readFileSync('index.template.html', {encoding: 'utf8'});
|
||||
indexContents = indexContents.replace(URL_REGEX, updateURL);
|
||||
let preactIndexContents = fs.readFileSync('preactalpha.template.html', {encoding: 'utf8'});
|
||||
|
|
@ -155,11 +160,10 @@ function updateFiles() {
|
|||
crossprotocolContents = crossprotocolContents.replace(URL_REGEX, updateURL);
|
||||
let replayEmbedContents = fs.readFileSync('js/replay-embed.template.js', {encoding: 'utf8'});
|
||||
replayEmbedContents = replayEmbedContents.replace(/play\.pokemonshowdown\.com/g, routes.client);
|
||||
console.log("DONE");
|
||||
|
||||
// add news, only if it's actually likely to exist
|
||||
if (__dirname.endsWith('play.pokemonshowdown.com/build-tools')) {
|
||||
process.stdout.write("Updating news... ");
|
||||
process.stdout.write("and news... ");
|
||||
child_process.exec('php ' + path.resolve(thisDir, 'news-data.php'), function (error, stdout, stderr) {
|
||||
let newsData = [0, '[failed to retrieve news]'];
|
||||
if (!error && !stderr) {
|
||||
|
|
@ -174,7 +178,6 @@ function updateFiles() {
|
|||
|
||||
indexContents = indexContents.replace(/<!-- newsid -->/g, newsData[0]);
|
||||
indexContents = indexContents.replace(/<!-- news -->/g, newsData[1]);
|
||||
console.log("DONE");
|
||||
|
||||
writeFiles(indexContents, preactIndexContents, crossprotocolContents, replayEmbedContents);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@
|
|||
"build-full": "node build full"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/cli": "^7.13.14",
|
||||
"@babel/core": "^7.13.15",
|
||||
"@babel/plugin-proposal-class-properties": "^7.13.0",
|
||||
"@babel/plugin-transform-react-jsx": "^7.13.12",
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user