]> git.proxmox.com Git - pve-eslint.git/blob - eslint/Makefile.js
import 8.23.1 source
[pve-eslint.git] / eslint / Makefile.js
1 /**
2 * @fileoverview Build file
3 * @author nzakas
4 */
5
6 /* eslint no-use-before-define: "off", no-console: "off" -- CLI */
7 "use strict";
8
9 //------------------------------------------------------------------------------
10 // Requirements
11 //------------------------------------------------------------------------------
12
13 const checker = require("npm-license"),
14 ReleaseOps = require("eslint-release"),
15 fs = require("fs"),
16 glob = require("glob"),
17 marked = require("marked"),
18 markdownlint = require("markdownlint"),
19 os = require("os"),
20 path = require("path"),
21 semver = require("semver"),
22 ejs = require("ejs"),
23 loadPerf = require("load-perf"),
24 yaml = require("js-yaml"),
25 ignore = require("ignore"),
26 { CLIEngine } = require("./lib/cli-engine"),
27 builtinRules = require("./lib/rules/index");
28
29 require("shelljs/make");
30 /* global target -- global.target is declared in `shelljs/make.js` */
31 /**
32 * global.target = {};
33 * @see https://github.com/shelljs/shelljs/blob/124d3349af42cb794ae8f78fc9b0b538109f7ca7/make.js#L4
34 * @see https://github.com/DefinitelyTyped/DefinitelyTyped/blob/3aa2d09b6408380598cfb802743b07e1edb725f3/types/shelljs/make.d.ts#L8-L11
35 */
36 const { cat, cd, echo, exec, exit, find, ls, mkdir, pwd, test } = require("shelljs");
37
38 //------------------------------------------------------------------------------
39 // Settings
40 //------------------------------------------------------------------------------
41
42 /*
43 * A little bit fuzzy. My computer has a first CPU speed of 3392 and the perf test
44 * always completes in < 3800ms. However, Travis is less predictable due to
45 * multiple different VM types. So I'm fudging this for now in the hopes that it
46 * at least provides some sort of useful signal.
47 */
48 const PERF_MULTIPLIER = 13e6;
49
50 const OPEN_SOURCE_LICENSES = [
51 /MIT/u, /BSD/u, /Apache/u, /ISC/u, /WTF/u, /Public Domain/u, /LGPL/u, /Python/u
52 ];
53
54 //------------------------------------------------------------------------------
55 // Data
56 //------------------------------------------------------------------------------
57
58 const NODE = "node ", // intentional extra space
59 NODE_MODULES = "./node_modules/",
60 TEMP_DIR = "./tmp/",
61 DEBUG_DIR = "./debug/",
62 BUILD_DIR = "build",
63 SITE_DIR = "../eslint.org",
64 DOCS_DIR = "./docs",
65 DOCS_SRC_DIR = path.join(DOCS_DIR, "src"),
66 DOCS_DATA_DIR = path.join(DOCS_SRC_DIR, "_data"),
67 PERF_TMP_DIR = path.join(TEMP_DIR, "eslint", "performance"),
68
69 // Utilities - intentional extra space at the end of each string
70 MOCHA = `${NODE_MODULES}mocha/bin/_mocha `,
71 ESLINT = `${NODE} bin/eslint.js --report-unused-disable-directives `,
72
73 // Files
74 RULE_FILES = glob.sync("lib/rules/*.js").filter(filePath => path.basename(filePath) !== "index.js"),
75 JSON_FILES = find("conf/").filter(fileType("json")),
76 MARKDOWNLINT_IGNORE_INSTANCE = ignore().add(fs.readFileSync(path.join(__dirname, ".markdownlintignore"), "utf-8")),
77 MARKDOWN_FILES_ARRAY = MARKDOWNLINT_IGNORE_INSTANCE.filter(find("docs/").concat(ls(".")).filter(fileType("md"))),
78 TEST_FILES = "\"tests/{bin,conf,lib,tools}/**/*.js\"",
79 PERF_ESLINTRC = path.join(PERF_TMP_DIR, "eslint.config.js"),
80 PERF_MULTIFILES_TARGET_DIR = path.join(PERF_TMP_DIR, "eslint"),
81
82 /*
83 * glob arguments with Windows separator `\` don't work:
84 * https://github.com/eslint/eslint/issues/16259
85 */
86 PERF_MULTIFILES_TARGETS = `"${TEMP_DIR}eslint/performance/eslint/{lib,tests/lib}/**/*.js"`,
87
88 // Settings
89 MOCHA_TIMEOUT = parseInt(process.env.ESLINT_MOCHA_TIMEOUT, 10) || 10000;
90
91 //------------------------------------------------------------------------------
92 // Helpers
93 //------------------------------------------------------------------------------
94
95 /**
96 * Simple JSON file validation that relies on ES JSON parser.
97 * @param {string} filePath Path to JSON.
98 * @throws Error If file contents is invalid JSON.
99 * @returns {undefined}
100 */
101 function validateJsonFile(filePath) {
102 const contents = fs.readFileSync(filePath, "utf8");
103
104 JSON.parse(contents);
105 }
106
107 /**
108 * Generates a function that matches files with a particular extension.
109 * @param {string} extension The file extension (i.e. "js")
110 * @returns {Function} The function to pass into a filter method.
111 * @private
112 */
113 function fileType(extension) {
114 return function(filename) {
115 return filename.slice(filename.lastIndexOf(".") + 1) === extension;
116 };
117 }
118
119 /**
120 * Executes a command and returns the output instead of printing it to stdout.
121 * @param {string} cmd The command string to execute.
122 * @returns {string} The result of the executed command.
123 */
124 function execSilent(cmd) {
125 return exec(cmd, { silent: true }).stdout;
126 }
127
128 /**
129 * Generates a release blog post for eslint.org
130 * @param {Object} releaseInfo The release metadata.
131 * @param {string} [prereleaseMajorVersion] If this is a prerelease, the next major version after this prerelease
132 * @returns {void}
133 * @private
134 */
135 function generateBlogPost(releaseInfo, prereleaseMajorVersion) {
136 const ruleList = RULE_FILES
137
138 // Strip the .js extension
139 .map(ruleFileName => path.basename(ruleFileName, ".js"))
140
141 /*
142 * Sort by length descending. This ensures that rule names which are substrings of other rule names are not
143 * matched incorrectly. For example, the string "no-undefined" should get matched with the `no-undefined` rule,
144 * instead of getting matched with the `no-undef` rule followed by the string "ined".
145 */
146 .sort((ruleA, ruleB) => ruleB.length - ruleA.length);
147
148 const renderContext = Object.assign({ prereleaseMajorVersion, ruleList }, releaseInfo);
149
150 const output = ejs.render(cat("./templates/blogpost.md.ejs"), renderContext),
151 now = new Date(),
152 month = now.getMonth() + 1,
153 day = now.getDate(),
154 filename = path.join(SITE_DIR, `src/content/blog/${now.getFullYear()}-${
155 month < 10 ? `0${month}` : month}-${
156 day < 10 ? `0${day}` : day}-eslint-v${
157 releaseInfo.version}-released.md`);
158
159 output.to(filename);
160 }
161
162 /**
163 * Generates a doc page with formatter result examples
164 * @param {Object} formatterInfo Linting results from each formatter
165 * @returns {void}
166 */
167 function generateFormatterExamples(formatterInfo) {
168 const output = ejs.render(cat("./templates/formatter-examples.md.ejs"), formatterInfo);
169 const outputDir = path.join(DOCS_SRC_DIR, "user-guide/formatters/"),
170 filename = path.join(outputDir, "index.md"),
171 htmlFilename = path.join(outputDir, "html-formatter-example.html");
172
173 if (!test("-d", outputDir)) {
174 mkdir(outputDir);
175 }
176
177 output.to(filename);
178 formatterInfo.formatterResults.html.result.to(htmlFilename);
179 }
180
181 /**
182 * Generate a doc page that lists all of the rules and links to them
183 * @returns {void}
184 */
185 function generateRuleIndexPage() {
186 const docsSiteOutputFile = path.join(DOCS_DATA_DIR, "rules.json"),
187 docsSiteMetaOutputFile = path.join(DOCS_DATA_DIR, "rules_meta.json"),
188 ruleTypes = "conf/rule-type-list.json",
189 ruleTypesData = JSON.parse(cat(path.resolve(ruleTypes)));
190
191 const meta = {};
192
193 RULE_FILES
194 .map(filename => [filename, path.basename(filename, ".js")])
195 .sort((a, b) => a[1].localeCompare(b[1]))
196 .forEach(pair => {
197 const filename = pair[0];
198 const basename = pair[1];
199 const rule = require(path.resolve(filename));
200
201 /*
202 * Eleventy interprets the {{ }} in messages as being variables,
203 * which can cause an error if there's syntax it doesn't expect.
204 * Because we don't use this info in the website anyway, it's safer
205 * to just remove it.
206 *
207 * Also removing the schema because we don't need it.
208 */
209 meta[basename] = {
210 ...rule.meta,
211 schema: void 0,
212 messages: void 0
213 };
214
215 if (rule.meta.deprecated) {
216 ruleTypesData.deprecated.rules.push({
217 name: basename,
218 replacedBy: rule.meta.replacedBy || []
219 });
220 } else {
221 const output = {
222 name: basename,
223 description: rule.meta.docs.description,
224 recommended: rule.meta.docs.recommended || false,
225 fixable: !!rule.meta.fixable,
226 hasSuggestions: !!rule.meta.hasSuggestions
227 },
228 ruleType = ruleTypesData.types.find(c => c.name === rule.meta.type);
229
230 if (!ruleType.rules) {
231 ruleType.rules = [];
232 }
233
234 ruleType.rules.push(output);
235 }
236 });
237
238 // `.rules` will be `undefined` if all rules in category are deprecated.
239 ruleTypesData.types = ruleTypesData.types.filter(ruleType => !!ruleType.rules);
240
241 JSON.stringify(ruleTypesData, null, 4).to(docsSiteOutputFile);
242 JSON.stringify(meta, null, 4).to(docsSiteMetaOutputFile);
243
244 }
245
246 /**
247 * Creates a git commit and tag in an adjacent `website` repository, without pushing it to
248 * the remote. This assumes that the repository has already been modified somehow (e.g. by adding a blogpost).
249 * @param {string} [tag] The string to tag the commit with
250 * @returns {void}
251 */
252 function commitSiteToGit(tag) {
253 const currentDir = pwd();
254
255 cd(SITE_DIR);
256 exec("git add -A .");
257 exec(`git commit -m "Added release blog post for ${tag}"`);
258 exec(`git tag ${tag}`);
259 exec("git fetch origin && git rebase origin/main");
260 cd(currentDir);
261 }
262
263 /**
264 * Publishes the changes in an adjacent `eslint.org` repository to the remote. The
265 * site should already have local commits (e.g. from running `commitSiteToGit`).
266 * @returns {void}
267 */
268 function publishSite() {
269 const currentDir = pwd();
270
271 cd(SITE_DIR);
272 exec("git push origin HEAD --tags");
273 cd(currentDir);
274 }
275
276 /**
277 * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag,
278 * and generates the site in an adjacent `website` folder.
279 * @returns {void}
280 */
281 function generateRelease() {
282 ReleaseOps.generateRelease();
283 const releaseInfo = JSON.parse(cat(".eslint-release-info.json"));
284
285 echo("Generating site");
286 target.gensite();
287 generateBlogPost(releaseInfo);
288 commitSiteToGit(`v${releaseInfo.version}`);
289
290 echo("Updating version in docs package");
291 const docsPackagePath = path.join(__dirname, "docs", "package.json");
292 const docsPackage = require(docsPackagePath);
293
294 docsPackage.version = releaseInfo.version;
295 fs.writeFileSync(docsPackagePath, `${JSON.stringify(docsPackage, null, 4)}\n`);
296
297 echo("Updating commit with docs data");
298 exec("git add docs/ && git commit --amend --no-edit");
299 exec(`git tag -a -f v${releaseInfo.version} -m ${releaseInfo.version}`);
300 }
301
302 /**
303 * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag,
304 * and generates the site in an adjacent `website` folder.
305 * @param {string} prereleaseId The prerelease identifier (alpha, beta, etc.)
306 * @returns {void}
307 */
308 function generatePrerelease(prereleaseId) {
309 ReleaseOps.generateRelease(prereleaseId);
310 const releaseInfo = JSON.parse(cat(".eslint-release-info.json"));
311 const nextMajor = semver.inc(releaseInfo.version, "major");
312
313 echo("Generating site");
314
315 // always write docs into the next major directory (so 2.0.0-alpha.0 writes to 2.0.0)
316 target.gensite(nextMajor);
317
318 /*
319 * Premajor release should have identical "next major version".
320 * Preminor and prepatch release will not.
321 * 5.0.0-alpha.0 --> next major = 5, current major = 5
322 * 4.4.0-alpha.0 --> next major = 5, current major = 4
323 * 4.0.1-alpha.0 --> next major = 5, current major = 4
324 */
325 if (semver.major(releaseInfo.version) === semver.major(nextMajor)) {
326
327 /*
328 * This prerelease is for a major release (not preminor/prepatch).
329 * Blog post generation logic needs to be aware of this (as well as
330 * know what the next major version is actually supposed to be).
331 */
332 generateBlogPost(releaseInfo, nextMajor);
333 } else {
334 generateBlogPost(releaseInfo);
335 }
336
337 commitSiteToGit(`v${releaseInfo.version}`);
338 }
339
340 /**
341 * Publishes a generated release to npm and GitHub, and pushes changes to the adjacent `website` repo
342 * to remote repo.
343 * @returns {void}
344 */
345 function publishRelease() {
346 ReleaseOps.publishRelease();
347
348 // push to latest branch to trigger docs deploy
349 exec("git push origin HEAD:latest -f");
350
351 publishSite();
352 }
353
354 /**
355 * Splits a command result to separate lines.
356 * @param {string} result The command result string.
357 * @returns {Array} The separated lines.
358 */
359 function splitCommandResultToLines(result) {
360 return result.trim().split("\n");
361 }
362
363 /**
364 * Gets the first commit sha of the given file.
365 * @param {string} filePath The file path which should be checked.
366 * @returns {string} The commit sha.
367 */
368 function getFirstCommitOfFile(filePath) {
369 let commits = execSilent(`git rev-list HEAD -- ${filePath}`);
370
371 commits = splitCommandResultToLines(commits);
372 return commits[commits.length - 1].trim();
373 }
374
375 /**
376 * Gets the tag name where a given file was introduced first.
377 * @param {string} filePath The file path to check.
378 * @returns {string} The tag name.
379 */
380 function getFirstVersionOfFile(filePath) {
381 const firstCommit = getFirstCommitOfFile(filePath);
382 let tags = execSilent(`git tag --contains ${firstCommit}`);
383
384 tags = splitCommandResultToLines(tags);
385 return tags.reduce((list, version) => {
386 const validatedVersion = semver.valid(version.trim());
387
388 if (validatedVersion) {
389 list.push(validatedVersion);
390 }
391 return list;
392 }, []).sort(semver.compare)[0];
393 }
394
395 /**
396 * Gets the commit that deleted a file.
397 * @param {string} filePath The path to the deleted file.
398 * @returns {string} The commit sha.
399 */
400 function getCommitDeletingFile(filePath) {
401 const commits = execSilent(`git rev-list HEAD -- ${filePath}`);
402
403 return splitCommandResultToLines(commits)[0];
404 }
405
406 /**
407 * Gets the first version number where a given file is no longer present.
408 * @param {string} filePath The path to the deleted file.
409 * @returns {string} The version number.
410 */
411 function getFirstVersionOfDeletion(filePath) {
412 const deletionCommit = getCommitDeletingFile(filePath),
413 tags = execSilent(`git tag --contains ${deletionCommit}`);
414
415 return splitCommandResultToLines(tags)
416 .map(version => semver.valid(version.trim()))
417 .filter(version => version)
418 .sort(semver.compare)[0];
419 }
420
421 /**
422 * Lints Markdown files.
423 * @param {Array} files Array of file names to lint.
424 * @returns {Object} exec-style exit code object.
425 * @private
426 */
427 function lintMarkdown(files) {
428 const config = yaml.load(fs.readFileSync(path.join(__dirname, "./.markdownlint.yml"), "utf8")),
429 result = markdownlint.sync({
430 files,
431 config,
432 resultVersion: 1
433 }),
434 resultString = result.toString(),
435 returnCode = resultString ? 1 : 0;
436
437 if (resultString) {
438 console.error(resultString);
439 }
440 return { code: returnCode };
441 }
442
443 /**
444 * Gets linting results from every formatter, based on a hard-coded snippet and config
445 * @returns {Object} Output from each formatter
446 */
447 function getFormatterResults() {
448 const stripAnsi = require("strip-ansi");
449
450 const formatterFiles = fs.readdirSync("./lib/cli-engine/formatters/"),
451 rules = {
452 "no-else-return": "warn",
453 indent: ["warn", 4],
454 "space-unary-ops": "error",
455 semi: ["warn", "always"],
456 "consistent-return": "error"
457 },
458 cli = new CLIEngine({
459 useEslintrc: false,
460 baseConfig: { extends: "eslint:recommended" },
461 rules
462 }),
463 codeString = [
464 "function addOne(i) {",
465 " if (i != NaN) {",
466 " return i ++",
467 " } else {",
468 " return",
469 " }",
470 "};"
471 ].join("\n"),
472 rawMessages = cli.executeOnText(codeString, "fullOfProblems.js", true),
473 rulesMap = cli.getRules(),
474 rulesMeta = {};
475
476 Object.keys(rules).forEach(ruleId => {
477 rulesMeta[ruleId] = rulesMap.get(ruleId).meta;
478 });
479
480 return formatterFiles.reduce((data, filename) => {
481 const fileExt = path.extname(filename),
482 name = path.basename(filename, fileExt);
483
484 if (fileExt === ".js") {
485 const formattedOutput = cli.getFormatter(name)(
486 rawMessages.results,
487 { rulesMeta }
488 );
489
490 data.formatterResults[name] = {
491 result: stripAnsi(formattedOutput)
492 };
493 }
494 return data;
495 }, { formatterResults: {} });
496 }
497
498 /**
499 * Gets a path to an executable in node_modules/.bin
500 * @param {string} command The executable name
501 * @returns {string} The executable path
502 */
503 function getBinFile(command) {
504 return path.join("node_modules", ".bin", command);
505 }
506
507 //------------------------------------------------------------------------------
508 // Tasks
509 //------------------------------------------------------------------------------
510
511 target.lint = function([fix = false] = []) {
512 let errors = 0,
513 lastReturn;
514
515 /*
516 * In order to successfully lint JavaScript files in the `docs` directory, dependencies declared in `docs/package.json`
517 * would have to be installed in `docs/node_modules`. In particular, eslint-plugin-node rules examine `docs/node_modules`
518 * when analyzing `require()` calls from CJS modules in the `docs` directory. Since our release process does not run `npm install`
519 * in the `docs` directory, linting would fail and break the release. Also, working on the main `eslint` package does not require
520 * installing dependencies declared in `docs/package.json`, so most contributors will not have `docs/node_modules` locally.
521 * Therefore, we add `--ignore-pattern docs` to exclude linting the `docs` directory from this command.
522 * There is a separate command `target.lintDocsJS` for linting JavaScript files in the `docs` directory.
523 */
524 echo("Validating JavaScript files");
525 lastReturn = exec(`${ESLINT}${fix ? "--fix" : ""} . --ignore-pattern docs`);
526 if (lastReturn.code !== 0) {
527 errors++;
528 }
529
530 echo("Validating JSON Files");
531 JSON_FILES.forEach(validateJsonFile);
532
533 echo("Validating Markdown Files");
534 lastReturn = lintMarkdown(MARKDOWN_FILES_ARRAY);
535 if (lastReturn.code !== 0) {
536 errors++;
537 }
538
539 if (errors) {
540 exit(1);
541 }
542 };
543
544 target.lintDocsJS = function([fix = false] = []) {
545 let errors = 0;
546
547 echo("Validating JavaScript files in the docs directory");
548 const lastReturn = exec(`${ESLINT}${fix ? "--fix" : ""} docs/.eleventy.js`);
549
550 if (lastReturn.code !== 0) {
551 errors++;
552 }
553
554 if (errors) {
555 exit(1);
556 }
557 };
558
559 target.fuzz = function({ amount = 1000, fuzzBrokenAutofixes = false } = {}) {
560 const fuzzerRunner = require("./tools/fuzzer-runner");
561 const fuzzResults = fuzzerRunner.run({ amount, fuzzBrokenAutofixes });
562
563 if (fuzzResults.length) {
564
565 const uniqueStackTraceCount = new Set(fuzzResults.map(result => result.error)).size;
566
567 echo(`The fuzzer reported ${fuzzResults.length} error${fuzzResults.length === 1 ? "" : "s"} with a total of ${uniqueStackTraceCount} unique stack trace${uniqueStackTraceCount === 1 ? "" : "s"}.`);
568
569 const formattedResults = JSON.stringify({ results: fuzzResults }, null, 4);
570
571 if (process.env.CI) {
572 echo("More details can be found below.");
573 echo(formattedResults);
574 } else {
575 if (!test("-d", DEBUG_DIR)) {
576 mkdir(DEBUG_DIR);
577 }
578
579 let fuzzLogPath;
580 let fileSuffix = 0;
581
582 // To avoid overwriting any existing fuzzer log files, append a numeric suffix to the end of the filename.
583 do {
584 fuzzLogPath = path.join(DEBUG_DIR, `fuzzer-log-${fileSuffix}.json`);
585 fileSuffix++;
586 } while (test("-f", fuzzLogPath));
587
588 formattedResults.to(fuzzLogPath);
589
590 // TODO: (not-an-aardvark) Create a better way to isolate and test individual fuzzer errors from the log file
591 echo(`More details can be found in ${fuzzLogPath}.`);
592 }
593
594 exit(1);
595 }
596 };
597
598 target.mocha = () => {
599 let errors = 0,
600 lastReturn;
601
602 echo("Running unit tests");
603
604 lastReturn = exec(`${getBinFile("c8")} -- ${MOCHA} --forbid-only -R progress -t ${MOCHA_TIMEOUT} -c ${TEST_FILES}`);
605 if (lastReturn.code !== 0) {
606 errors++;
607 }
608
609 lastReturn = exec(`${getBinFile("c8")} check-coverage --statement 98 --branch 97 --function 98 --lines 98`);
610 if (lastReturn.code !== 0) {
611 errors++;
612 }
613
614 if (errors) {
615 exit(1);
616 }
617 };
618
619 target.karma = () => {
620 echo("Running unit tests on browsers");
621
622 target.webpack("production");
623
624 const lastReturn = exec(`${getBinFile("karma")} start karma.conf.js`);
625
626 if (lastReturn.code !== 0) {
627 exit(1);
628 }
629 };
630
631 target.test = function() {
632 target.lint();
633 target.checkRuleFiles();
634 target.mocha();
635 target.karma();
636 target.fuzz({ amount: 150, fuzzBrokenAutofixes: false });
637 target.checkLicenses();
638 };
639
640 target.gensite = function() {
641 echo("Generating documentation");
642
643 const DOCS_RULES_DIR = path.join(DOCS_SRC_DIR, "rules");
644 const RULE_VERSIONS_FILE = path.join(DOCS_SRC_DIR, "_data/rule_versions.json");
645
646 // Set up rule version information
647 let versions = test("-f", RULE_VERSIONS_FILE) ? JSON.parse(cat(RULE_VERSIONS_FILE)) : {};
648
649 if (!versions.added) {
650 versions = {
651 added: versions,
652 removed: {}
653 };
654 }
655
656 // 1. Update rule meta data by checking rule docs - important to catch removed rules
657 echo("> Updating rule version meta data (Step 1)");
658 const ruleDocsFiles = find(DOCS_RULES_DIR);
659
660 ruleDocsFiles.forEach((filename, i) => {
661 if (test("-f", filename) && path.extname(filename) === ".md") {
662
663 echo(`> Updating rule version meta data (Step 1: ${i + 1}/${ruleDocsFiles.length}): ${filename}`);
664
665 const baseName = path.basename(filename, ".md"),
666 sourceBaseName = `${baseName}.js`,
667 sourcePath = path.join("lib/rules", sourceBaseName);
668
669 if (!versions.added[baseName]) {
670 versions.added[baseName] = getFirstVersionOfFile(sourcePath);
671 }
672
673 if (!versions.removed[baseName] && !test("-f", sourcePath)) {
674 versions.removed[baseName] = getFirstVersionOfDeletion(sourcePath);
675 }
676
677 }
678 });
679
680 JSON.stringify(versions, null, 4).to(RULE_VERSIONS_FILE);
681
682 // 2. Generate rules index page meta data
683 echo("> Generating the rules index page (Step 2)");
684 generateRuleIndexPage();
685
686 // 3. Create Example Formatter Output Page
687 echo("> Creating the formatter examples (Step 3)");
688 generateFormatterExamples(getFormatterResults());
689
690 echo("Done generating documentation");
691 };
692
693 target.generateRuleIndexPage = generateRuleIndexPage;
694
695 target.webpack = function(mode = "none") {
696 exec(`${getBinFile("webpack")} --mode=${mode} --output-path=${BUILD_DIR}`);
697 };
698
699 target.checkRuleFiles = function() {
700
701 echo("Validating rules");
702
703 const ruleTypes = require("./tools/rule-types.json");
704 let errors = 0;
705
706 RULE_FILES.forEach(filename => {
707 const basename = path.basename(filename, ".js");
708 const docFilename = `docs/src/rules/${basename}.md`;
709 const docText = cat(docFilename);
710 const docMarkdown = marked.lexer(docText, { gfm: true, silent: false });
711 const ruleCode = cat(filename);
712 const knownHeaders = ["Rule Details", "Options", "Environments", "Examples", "Known Limitations", "When Not To Use It", "Compatibility"];
713
714 /**
715 * Check if basename is present in rule-types.json file.
716 * @returns {boolean} true if present
717 * @private
718 */
719 function isInRuleTypes() {
720 return Object.prototype.hasOwnProperty.call(ruleTypes, basename);
721 }
722
723 /**
724 * Check if id is present in title
725 * @param {string} id id to check for
726 * @returns {boolean} true if present
727 * @private
728 * @todo Will remove this check when the main heading is automatically generated from rule metadata.
729 */
730 function hasIdInTitle(id) {
731 return new RegExp(`title: ${id}`, "u").test(docText);
732 }
733
734 /**
735 * Check if all H2 headers are known and in the expected order
736 * Only H2 headers are checked as H1 and H3 are variable and/or rule specific.
737 * @returns {boolean} true if all headers are known and in the right order
738 */
739 function hasKnownHeaders() {
740 const headers = docMarkdown.filter(token => token.type === "heading" && token.depth === 2).map(header => header.text);
741
742 for (const header of headers) {
743 if (!knownHeaders.includes(header)) {
744 return false;
745 }
746 }
747
748 /*
749 * Check only the subset of used headers for the correct order
750 */
751 const presentHeaders = knownHeaders.filter(header => headers.includes(header));
752
753 for (let i = 0; i < presentHeaders.length; ++i) {
754 if (presentHeaders[i] !== headers[i]) {
755 return false;
756 }
757 }
758
759 return true;
760 }
761
762 /**
763 * Check if deprecated information is in rule code and README.md.
764 * @returns {boolean} true if present
765 * @private
766 */
767 function hasDeprecatedInfo() {
768 const deprecatedTagRegExp = /@deprecated in ESLint/u;
769 const deprecatedInfoRegExp = /This rule was .+deprecated.+in ESLint/u;
770
771 return deprecatedTagRegExp.test(ruleCode) && deprecatedInfoRegExp.test(docText);
772 }
773
774 /**
775 * Check if the rule code has the jsdoc comment with the rule type annotation.
776 * @returns {boolean} true if present
777 * @private
778 */
779 function hasRuleTypeJSDocComment() {
780 const comment = "/** @type {import('../shared/types').Rule} */";
781
782 return ruleCode.includes(comment);
783 }
784
785 // check for docs
786 if (!test("-f", docFilename)) {
787 console.error("Missing documentation for rule %s", basename);
788 errors++;
789 } else {
790
791 // check for proper doc h1 format
792 if (!hasIdInTitle(basename)) {
793 console.error("Missing id in the doc page's title of rule %s", basename);
794 errors++;
795 }
796
797 // check for proper doc headers
798 if (!hasKnownHeaders()) {
799 console.error("Unknown or misplaced header in the doc page of rule %s, allowed headers (and their order) are: '%s'", basename, knownHeaders.join("', '"));
800 errors++;
801 }
802 }
803
804 // check for recommended configuration
805 if (!isInRuleTypes()) {
806 console.error("Missing setting for %s in tools/rule-types.json", basename);
807 errors++;
808 }
809
810 // check parity between rules index file and rules directory
811 const ruleIdsInIndex = require("./lib/rules/index");
812 const ruleDef = ruleIdsInIndex.get(basename);
813
814 if (!ruleDef) {
815 console.error(`Missing rule from index (./lib/rules/index.js): ${basename}. If you just added a new rule then add an entry for it in this file.`);
816 errors++;
817 } else {
818
819 // check deprecated
820 if (ruleDef.meta.deprecated && !hasDeprecatedInfo()) {
821 console.error(`Missing deprecated information in ${basename} rule code or README.md. Please write @deprecated tag in code or 「This rule was deprecated in ESLint ...」 in README.md.`);
822 errors++;
823 }
824
825 // check eslint:recommended
826 const recommended = require("./conf/eslint-recommended");
827
828 if (ruleDef.meta.docs.recommended) {
829 if (recommended.rules[basename] !== "error") {
830 console.error(`Missing rule from eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just made a rule recommended then add an entry for it in this file.`);
831 errors++;
832 }
833 } else {
834 if (basename in recommended.rules) {
835 console.error(`Extra rule in eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just added a rule then don't add an entry for it in this file.`);
836 errors++;
837 }
838 }
839
840 if (!hasRuleTypeJSDocComment()) {
841 console.error(`Missing rule type JSDoc comment from ${basename} rule code.`);
842 errors++;
843 }
844 }
845
846 // check for tests
847 if (!test("-f", `tests/lib/rules/${basename}.js`)) {
848 console.error("Missing tests for rule %s", basename);
849 errors++;
850 }
851
852 });
853
854 if (errors) {
855 exit(1);
856 }
857
858 };
859
860 target.checkLicenses = function() {
861
862 /**
863 * Check if a dependency is eligible to be used by us
864 * @param {Object} dependency dependency to check
865 * @returns {boolean} true if we have permission
866 * @private
867 */
868 function isPermissible(dependency) {
869 const licenses = dependency.licenses;
870
871 if (Array.isArray(licenses)) {
872 return licenses.some(license => isPermissible({
873 name: dependency.name,
874 licenses: license
875 }));
876 }
877
878 return OPEN_SOURCE_LICENSES.some(license => license.test(licenses));
879 }
880
881 echo("Validating licenses");
882
883 checker.init({
884 start: __dirname
885 }, deps => {
886 const impermissible = Object.keys(deps).map(dependency => ({
887 name: dependency,
888 licenses: deps[dependency].licenses
889 })).filter(dependency => !isPermissible(dependency));
890
891 if (impermissible.length) {
892 impermissible.forEach(dependency => {
893 console.error(
894 "%s license for %s is impermissible.",
895 dependency.licenses,
896 dependency.name
897 );
898 });
899 exit(1);
900 }
901 });
902 };
903
904 /**
905 * Downloads a repository which has many js files to test performance with multi files.
906 * Here, it's eslint@1.10.3 (450 files)
907 * @param {Function} cb A callback function.
908 * @returns {void}
909 */
910 function downloadMultifilesTestTarget(cb) {
911 if (test("-d", PERF_MULTIFILES_TARGET_DIR)) {
912 process.nextTick(cb);
913 } else {
914 mkdir("-p", PERF_MULTIFILES_TARGET_DIR);
915 echo("Downloading the repository of multi-files performance test target.");
916 exec(`git clone -b v1.10.3 --depth 1 https://github.com/eslint/eslint.git "${PERF_MULTIFILES_TARGET_DIR}"`, { silent: true }, cb);
917 }
918 }
919
920 /**
921 * Creates a config file to use performance tests.
922 * This config is turning all core rules on.
923 * @returns {void}
924 */
925 function createConfigForPerformanceTest() {
926 let rules = "";
927
928 for (const [ruleId] of builtinRules) {
929 rules += (` "${ruleId}": 1,\n`);
930 }
931
932 const content = `
933 module.exports = [{
934 "languageOptions": {sourceType: "commonjs"},
935 "rules": {
936 ${rules}
937 }
938 }];`;
939
940 content.to(PERF_ESLINTRC);
941 }
942
943 /**
944 * @callback TimeCallback
945 * @param {?int[]} results
946 * @returns {void}
947 */
948
949 /**
950 * Calculates the time for each run for performance
951 * @param {string} cmd cmd
952 * @param {int} runs Total number of runs to do
953 * @param {int} runNumber Current run number
954 * @param {int[]} results Collection results from each run
955 * @param {TimeCallback} cb Function to call when everything is done
956 * @returns {void} calls the cb with all the results
957 * @private
958 */
959 function time(cmd, runs, runNumber, results, cb) {
960 const start = process.hrtime();
961
962 exec(cmd, { maxBuffer: 64 * 1024 * 1024, silent: true }, (code, stdout, stderr) => {
963 const diff = process.hrtime(start),
964 actual = (diff[0] * 1e3 + diff[1] / 1e6); // ms
965
966 if (code) {
967 echo(` Performance Run #${runNumber} failed.`);
968 if (stdout) {
969 echo(`STDOUT:\n${stdout}\n\n`);
970 }
971
972 if (stderr) {
973 echo(`STDERR:\n${stderr}\n\n`);
974 }
975 return cb(null);
976 }
977
978 results.push(actual);
979 echo(` Performance Run #${runNumber}: %dms`, actual);
980 if (runs > 1) {
981 return time(cmd, runs - 1, runNumber + 1, results, cb);
982 }
983 return cb(results);
984
985 });
986
987 }
988
989 /**
990 * Run a performance test.
991 * @param {string} title A title.
992 * @param {string} targets Test targets.
993 * @param {number} multiplier A multiplier for limitation.
994 * @param {Function} cb A callback function.
995 * @returns {void}
996 */
997 function runPerformanceTest(title, targets, multiplier, cb) {
998 const cpuSpeed = os.cpus()[0].speed,
999 max = multiplier / cpuSpeed,
1000 cmd = `${ESLINT}--config "${PERF_ESLINTRC}" --no-config-lookup --no-ignore ${targets}`;
1001
1002 echo("");
1003 echo(title);
1004 echo(" CPU Speed is %d with multiplier %d", cpuSpeed, multiplier);
1005
1006 time(cmd, 5, 1, [], results => {
1007 if (!results || results.length === 0) { // No results? Something is wrong.
1008 throw new Error("Performance test failed.");
1009 }
1010
1011 results.sort((a, b) => a - b);
1012
1013 const median = results[~~(results.length / 2)];
1014
1015 echo("");
1016 if (median > max) {
1017 echo(" Performance budget exceeded: %dms (limit: %dms)", median, max);
1018 } else {
1019 echo(" Performance budget ok: %dms (limit: %dms)", median, max);
1020 }
1021 echo("");
1022 cb();
1023 });
1024 }
1025
1026 /**
1027 * Run the load performance for eslint
1028 * @returns {void}
1029 * @private
1030 */
1031 function loadPerformance() {
1032 echo("");
1033 echo("Loading:");
1034
1035 const results = [];
1036
1037 for (let cnt = 0; cnt < 5; cnt++) {
1038 const loadPerfData = loadPerf({
1039 checkDependencies: false
1040 });
1041
1042 echo(` Load performance Run #${cnt + 1}: %dms`, loadPerfData.loadTime);
1043 results.push(loadPerfData.loadTime);
1044 }
1045
1046 results.sort((a, b) => a - b);
1047 const median = results[~~(results.length / 2)];
1048
1049 echo("");
1050 echo(" Load Performance median: %dms", median);
1051 echo("");
1052 }
1053
1054 target.perf = function() {
1055 downloadMultifilesTestTarget(() => {
1056 createConfigForPerformanceTest();
1057
1058 loadPerformance();
1059
1060 runPerformanceTest(
1061 "Single File:",
1062 "tests/performance/jshint.js",
1063 PERF_MULTIPLIER,
1064 () => {
1065
1066 // Count test target files.
1067 const count = glob.sync(
1068 (
1069 process.platform === "win32"
1070 ? PERF_MULTIFILES_TARGETS.replace(/\\/gu, "/")
1071 : PERF_MULTIFILES_TARGETS
1072 )
1073 .slice(1, -1) // strip quotes
1074 ).length;
1075
1076 runPerformanceTest(
1077 `Multi Files (${count} files):`,
1078 PERF_MULTIFILES_TARGETS,
1079 3 * PERF_MULTIPLIER,
1080 () => {}
1081 );
1082 }
1083 );
1084 });
1085 };
1086
1087 target.generateRelease = generateRelease;
1088 target.generatePrerelease = ([prereleaseType]) => generatePrerelease(prereleaseType);
1089 target.publishRelease = publishRelease;