]>
Commit | Line | Data |
---|---|---|
eb39fafa DC |
1 | /** |
2 | * @fileoverview Build file | |
3 | * @author nzakas | |
4 | */ | |
5 | ||
6 | /* global target */ | |
7 | /* eslint no-use-before-define: "off", no-console: "off" */ | |
8 | "use strict"; | |
9 | ||
10 | //------------------------------------------------------------------------------ | |
11 | // Requirements | |
12 | //------------------------------------------------------------------------------ | |
13 | ||
14 | require("shelljs/make"); | |
15 | ||
5422a9cc | 16 | const checker = require("npm-license"), |
eb39fafa DC |
17 | ReleaseOps = require("eslint-release"), |
18 | dateformat = require("dateformat"), | |
19 | fs = require("fs"), | |
20 | glob = require("glob"), | |
21 | markdownlint = require("markdownlint"), | |
22 | os = require("os"), | |
23 | path = require("path"), | |
24 | semver = require("semver"), | |
25 | ejs = require("ejs"), | |
26 | loadPerf = require("load-perf"), | |
27 | yaml = require("js-yaml"), | |
28 | { CLIEngine } = require("./lib/cli-engine"), | |
29 | builtinRules = require("./lib/rules/index"); | |
30 | ||
31 | const { cat, cd, cp, echo, exec, exit, find, ls, mkdir, pwd, rm, test } = require("shelljs"); | |
32 | ||
33 | //------------------------------------------------------------------------------ | |
34 | // Settings | |
35 | //------------------------------------------------------------------------------ | |
36 | ||
37 | /* | |
38 | * A little bit fuzzy. My computer has a first CPU speed of 3392 and the perf test | |
39 | * always completes in < 3800ms. However, Travis is less predictable due to | |
40 | * multiple different VM types. So I'm fudging this for now in the hopes that it | |
41 | * at least provides some sort of useful signal. | |
42 | */ | |
43 | const PERF_MULTIPLIER = 13e6; | |
44 | ||
45 | const OPEN_SOURCE_LICENSES = [ | |
46 | /MIT/u, /BSD/u, /Apache/u, /ISC/u, /WTF/u, /Public Domain/u, /LGPL/u | |
47 | ]; | |
48 | ||
49 | //------------------------------------------------------------------------------ | |
50 | // Data | |
51 | //------------------------------------------------------------------------------ | |
52 | ||
53 | const NODE = "node ", // intentional extra space | |
54 | NODE_MODULES = "./node_modules/", | |
55 | TEMP_DIR = "./tmp/", | |
56 | DEBUG_DIR = "./debug/", | |
57 | BUILD_DIR = "build", | |
58 | DOCS_DIR = "../website/docs", | |
59 | SITE_DIR = "../website/", | |
60 | PERF_TMP_DIR = path.join(TEMP_DIR, "eslint", "performance"), | |
61 | ||
62 | // Utilities - intentional extra space at the end of each string | |
63 | MOCHA = `${NODE_MODULES}mocha/bin/_mocha `, | |
64 | ESLINT = `${NODE} bin/eslint.js --report-unused-disable-directives `, | |
65 | ||
66 | // Files | |
67 | RULE_FILES = glob.sync("lib/rules/*.js").filter(filePath => path.basename(filePath) !== "index.js"), | |
68 | JSON_FILES = find("conf/").filter(fileType("json")), | |
69 | MARKDOWN_FILES_ARRAY = find("docs/").concat(ls(".")).filter(fileType("md")), | |
70 | TEST_FILES = "\"tests/{bin,lib,tools}/**/*.js\"", | |
71 | PERF_ESLINTRC = path.join(PERF_TMP_DIR, "eslintrc.yml"), | |
72 | PERF_MULTIFILES_TARGET_DIR = path.join(PERF_TMP_DIR, "eslint"), | |
73 | PERF_MULTIFILES_TARGETS = `"${PERF_MULTIFILES_TARGET_DIR + path.sep}{lib,tests${path.sep}lib}${path.sep}**${path.sep}*.js"`, | |
74 | ||
75 | // Settings | |
76 | MOCHA_TIMEOUT = 10000; | |
77 | ||
78 | //------------------------------------------------------------------------------ | |
79 | // Helpers | |
80 | //------------------------------------------------------------------------------ | |
81 | ||
82 | /** | |
83 | * Simple JSON file validation that relies on ES JSON parser. | |
84 | * @param {string} filePath Path to JSON. | |
85 | * @throws Error If file contents is invalid JSON. | |
86 | * @returns {undefined} | |
87 | */ | |
88 | function validateJsonFile(filePath) { | |
89 | const contents = fs.readFileSync(filePath, "utf8"); | |
90 | ||
91 | JSON.parse(contents); | |
92 | } | |
93 | ||
94 | /** | |
95 | * Generates a function that matches files with a particular extension. | |
96 | * @param {string} extension The file extension (i.e. "js") | |
97 | * @returns {Function} The function to pass into a filter method. | |
98 | * @private | |
99 | */ | |
100 | function fileType(extension) { | |
101 | return function(filename) { | |
102 | return filename.slice(filename.lastIndexOf(".") + 1) === extension; | |
103 | }; | |
104 | } | |
105 | ||
106 | /** | |
107 | * Executes a command and returns the output instead of printing it to stdout. | |
108 | * @param {string} cmd The command string to execute. | |
109 | * @returns {string} The result of the executed command. | |
110 | */ | |
111 | function execSilent(cmd) { | |
112 | return exec(cmd, { silent: true }).stdout; | |
113 | } | |
114 | ||
115 | /** | |
116 | * Generates a release blog post for eslint.org | |
117 | * @param {Object} releaseInfo The release metadata. | |
118 | * @param {string} [prereleaseMajorVersion] If this is a prerelease, the next major version after this prerelease | |
119 | * @returns {void} | |
120 | * @private | |
121 | */ | |
122 | function generateBlogPost(releaseInfo, prereleaseMajorVersion) { | |
123 | const ruleList = RULE_FILES | |
124 | ||
125 | // Strip the .js extension | |
126 | .map(ruleFileName => path.basename(ruleFileName, ".js")) | |
127 | ||
128 | /* | |
129 | * Sort by length descending. This ensures that rule names which are substrings of other rule names are not | |
130 | * matched incorrectly. For example, the string "no-undefined" should get matched with the `no-undefined` rule, | |
131 | * instead of getting matched with the `no-undef` rule followed by the string "ined". | |
132 | */ | |
133 | .sort((ruleA, ruleB) => ruleB.length - ruleA.length); | |
134 | ||
135 | const renderContext = Object.assign({ prereleaseMajorVersion, ruleList }, releaseInfo); | |
136 | ||
137 | const output = ejs.render(cat("./templates/blogpost.md.ejs"), renderContext), | |
138 | now = new Date(), | |
139 | month = now.getMonth() + 1, | |
140 | day = now.getDate(), | |
141 | filename = `../website/_posts/${now.getFullYear()}-${ | |
142 | month < 10 ? `0${month}` : month}-${ | |
143 | day < 10 ? `0${day}` : day}-eslint-v${ | |
144 | releaseInfo.version}-released.md`; | |
145 | ||
146 | output.to(filename); | |
147 | } | |
148 | ||
149 | /** | |
150 | * Generates a doc page with formatter result examples | |
151 | * @param {Object} formatterInfo Linting results from each formatter | |
152 | * @param {string} [prereleaseVersion] The version used for a prerelease. This | |
153 | * changes where the output is stored. | |
154 | * @returns {void} | |
155 | */ | |
156 | function generateFormatterExamples(formatterInfo, prereleaseVersion) { | |
157 | const output = ejs.render(cat("./templates/formatter-examples.md.ejs"), formatterInfo); | |
158 | let filename = "../website/docs/user-guide/formatters/index.md", | |
159 | htmlFilename = "../website/docs/user-guide/formatters/html-formatter-example.html"; | |
160 | ||
161 | if (prereleaseVersion) { | |
162 | filename = filename.replace("/docs", `/docs/${prereleaseVersion}`); | |
163 | htmlFilename = htmlFilename.replace("/docs", `/docs/${prereleaseVersion}`); | |
164 | if (!test("-d", path.dirname(filename))) { | |
165 | mkdir(path.dirname(filename)); | |
166 | } | |
167 | } | |
168 | ||
169 | output.to(filename); | |
170 | formatterInfo.formatterResults.html.result.to(htmlFilename); | |
171 | } | |
172 | ||
173 | /** | |
174 | * Generate a doc page that lists all of the rules and links to them | |
175 | * @returns {void} | |
176 | */ | |
177 | function generateRuleIndexPage() { | |
178 | const outputFile = "../website/_data/rules.yml", | |
179 | categoryList = "conf/category-list.json", | |
180 | categoriesData = JSON.parse(cat(path.resolve(categoryList))); | |
181 | ||
182 | RULE_FILES | |
183 | .map(filename => [filename, path.basename(filename, ".js")]) | |
184 | .sort((a, b) => a[1].localeCompare(b[1])) | |
185 | .forEach(pair => { | |
186 | const filename = pair[0]; | |
187 | const basename = pair[1]; | |
188 | const rule = require(path.resolve(filename)); | |
189 | ||
190 | if (rule.meta.deprecated) { | |
191 | categoriesData.deprecated.rules.push({ | |
192 | name: basename, | |
193 | replacedBy: rule.meta.replacedBy || [] | |
194 | }); | |
195 | } else { | |
196 | const output = { | |
197 | name: basename, | |
198 | description: rule.meta.docs.description, | |
199 | recommended: rule.meta.docs.recommended || false, | |
200 | fixable: !!rule.meta.fixable | |
201 | }, | |
5422a9cc | 202 | category = categoriesData.categories.find(c => c.name === rule.meta.docs.category); |
eb39fafa DC |
203 | |
204 | if (!category.rules) { | |
205 | category.rules = []; | |
206 | } | |
207 | ||
208 | category.rules.push(output); | |
209 | } | |
210 | }); | |
211 | ||
56c4a2cb DC |
212 | // `.rules` will be `undefined` if all rules in category are deprecated. |
213 | categoriesData.categories = categoriesData.categories.filter(category => !!category.rules); | |
214 | ||
eb39fafa DC |
215 | const output = yaml.safeDump(categoriesData, { sortKeys: true }); |
216 | ||
217 | output.to(outputFile); | |
218 | } | |
219 | ||
220 | /** | |
221 | * Creates a git commit and tag in an adjacent `website` repository, without pushing it to | |
222 | * the remote. This assumes that the repository has already been modified somehow (e.g. by adding a blogpost). | |
223 | * @param {string} [tag] The string to tag the commit with | |
224 | * @returns {void} | |
225 | */ | |
226 | function commitSiteToGit(tag) { | |
227 | const currentDir = pwd(); | |
228 | ||
229 | cd(SITE_DIR); | |
230 | exec("git add -A ."); | |
231 | exec(`git commit -m "Autogenerated new docs and demo at ${dateformat(new Date())}"`); | |
232 | ||
233 | if (tag) { | |
234 | exec(`git tag ${tag}`); | |
235 | } | |
236 | ||
237 | exec("git fetch origin && git rebase origin/master"); | |
238 | cd(currentDir); | |
239 | } | |
240 | ||
241 | /** | |
242 | * Publishes the changes in an adjacent `website` repository to the remote. The | |
243 | * site should already have local commits (e.g. from running `commitSiteToGit`). | |
244 | * @returns {void} | |
245 | */ | |
246 | function publishSite() { | |
247 | const currentDir = pwd(); | |
248 | ||
249 | cd(SITE_DIR); | |
250 | exec("git push origin master --tags"); | |
251 | cd(currentDir); | |
252 | } | |
253 | ||
254 | /** | |
255 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
256 | * and generates the site in an adjacent `website` folder. | |
257 | * @returns {void} | |
258 | */ | |
259 | function generateRelease() { | |
260 | ReleaseOps.generateRelease(); | |
261 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
262 | ||
263 | echo("Generating site"); | |
264 | target.gensite(); | |
265 | generateBlogPost(releaseInfo); | |
266 | commitSiteToGit(`v${releaseInfo.version}`); | |
267 | } | |
268 | ||
269 | /** | |
270 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
271 | * and generates the site in an adjacent `website` folder. | |
272 | * @param {string} prereleaseId The prerelease identifier (alpha, beta, etc.) | |
273 | * @returns {void} | |
274 | */ | |
275 | function generatePrerelease(prereleaseId) { | |
276 | ReleaseOps.generateRelease(prereleaseId); | |
277 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
278 | const nextMajor = semver.inc(releaseInfo.version, "major"); | |
279 | ||
280 | echo("Generating site"); | |
281 | ||
282 | // always write docs into the next major directory (so 2.0.0-alpha.0 writes to 2.0.0) | |
283 | target.gensite(nextMajor); | |
284 | ||
285 | /* | |
286 | * Premajor release should have identical "next major version". | |
287 | * Preminor and prepatch release will not. | |
288 | * 5.0.0-alpha.0 --> next major = 5, current major = 5 | |
289 | * 4.4.0-alpha.0 --> next major = 5, current major = 4 | |
290 | * 4.0.1-alpha.0 --> next major = 5, current major = 4 | |
291 | */ | |
292 | if (semver.major(releaseInfo.version) === semver.major(nextMajor)) { | |
293 | ||
294 | /* | |
295 | * This prerelease is for a major release (not preminor/prepatch). | |
296 | * Blog post generation logic needs to be aware of this (as well as | |
297 | * know what the next major version is actually supposed to be). | |
298 | */ | |
299 | generateBlogPost(releaseInfo, nextMajor); | |
300 | } else { | |
301 | generateBlogPost(releaseInfo); | |
302 | } | |
303 | ||
304 | commitSiteToGit(`v${releaseInfo.version}`); | |
305 | } | |
306 | ||
307 | /** | |
308 | * Publishes a generated release to npm and GitHub, and pushes changes to the adjacent `website` repo | |
309 | * to remote repo. | |
310 | * @returns {void} | |
311 | */ | |
312 | function publishRelease() { | |
313 | ReleaseOps.publishRelease(); | |
314 | publishSite(); | |
315 | } | |
316 | ||
317 | /** | |
318 | * Splits a command result to separate lines. | |
319 | * @param {string} result The command result string. | |
320 | * @returns {Array} The separated lines. | |
321 | */ | |
322 | function splitCommandResultToLines(result) { | |
323 | return result.trim().split("\n"); | |
324 | } | |
325 | ||
326 | /** | |
327 | * Gets the first commit sha of the given file. | |
328 | * @param {string} filePath The file path which should be checked. | |
329 | * @returns {string} The commit sha. | |
330 | */ | |
331 | function getFirstCommitOfFile(filePath) { | |
332 | let commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
333 | ||
334 | commits = splitCommandResultToLines(commits); | |
335 | return commits[commits.length - 1].trim(); | |
336 | } | |
337 | ||
338 | /** | |
339 | * Gets the tag name where a given file was introduced first. | |
340 | * @param {string} filePath The file path to check. | |
341 | * @returns {string} The tag name. | |
342 | */ | |
343 | function getFirstVersionOfFile(filePath) { | |
344 | const firstCommit = getFirstCommitOfFile(filePath); | |
345 | let tags = execSilent(`git tag --contains ${firstCommit}`); | |
346 | ||
347 | tags = splitCommandResultToLines(tags); | |
348 | return tags.reduce((list, version) => { | |
349 | const validatedVersion = semver.valid(version.trim()); | |
350 | ||
351 | if (validatedVersion) { | |
352 | list.push(validatedVersion); | |
353 | } | |
354 | return list; | |
355 | }, []).sort(semver.compare)[0]; | |
356 | } | |
357 | ||
358 | /** | |
359 | * Gets the commit that deleted a file. | |
360 | * @param {string} filePath The path to the deleted file. | |
361 | * @returns {string} The commit sha. | |
362 | */ | |
363 | function getCommitDeletingFile(filePath) { | |
364 | const commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
365 | ||
366 | return splitCommandResultToLines(commits)[0]; | |
367 | } | |
368 | ||
369 | /** | |
370 | * Gets the first version number where a given file is no longer present. | |
371 | * @param {string} filePath The path to the deleted file. | |
372 | * @returns {string} The version number. | |
373 | */ | |
374 | function getFirstVersionOfDeletion(filePath) { | |
375 | const deletionCommit = getCommitDeletingFile(filePath), | |
376 | tags = execSilent(`git tag --contains ${deletionCommit}`); | |
377 | ||
378 | return splitCommandResultToLines(tags) | |
379 | .map(version => semver.valid(version.trim())) | |
380 | .filter(version => version) | |
381 | .sort(semver.compare)[0]; | |
382 | } | |
383 | ||
384 | /** | |
385 | * Lints Markdown files. | |
386 | * @param {Array} files Array of file names to lint. | |
387 | * @returns {Object} exec-style exit code object. | |
388 | * @private | |
389 | */ | |
390 | function lintMarkdown(files) { | |
391 | const config = yaml.safeLoad(fs.readFileSync(path.join(__dirname, "./.markdownlint.yml"), "utf8")), | |
392 | result = markdownlint.sync({ | |
393 | files, | |
394 | config, | |
395 | resultVersion: 1 | |
396 | }), | |
397 | resultString = result.toString(), | |
398 | returnCode = resultString ? 1 : 0; | |
399 | ||
400 | if (resultString) { | |
401 | console.error(resultString); | |
402 | } | |
403 | return { code: returnCode }; | |
404 | } | |
405 | ||
406 | /** | |
407 | * Gets linting results from every formatter, based on a hard-coded snippet and config | |
408 | * @returns {Object} Output from each formatter | |
409 | */ | |
410 | function getFormatterResults() { | |
411 | const stripAnsi = require("strip-ansi"); | |
412 | ||
413 | const formatterFiles = fs.readdirSync("./lib/cli-engine/formatters/"), | |
414 | rules = { | |
415 | "no-else-return": "warn", | |
416 | indent: ["warn", 4], | |
417 | "space-unary-ops": "error", | |
418 | semi: ["warn", "always"], | |
419 | "consistent-return": "error" | |
420 | }, | |
421 | cli = new CLIEngine({ | |
422 | useEslintrc: false, | |
423 | baseConfig: { extends: "eslint:recommended" }, | |
424 | rules | |
425 | }), | |
426 | codeString = [ | |
427 | "function addOne(i) {", | |
428 | " if (i != NaN) {", | |
429 | " return i ++", | |
430 | " } else {", | |
431 | " return", | |
432 | " }", | |
433 | "};" | |
434 | ].join("\n"), | |
435 | rawMessages = cli.executeOnText(codeString, "fullOfProblems.js", true), | |
436 | rulesMap = cli.getRules(), | |
437 | rulesMeta = {}; | |
438 | ||
439 | Object.keys(rules).forEach(ruleId => { | |
440 | rulesMeta[ruleId] = rulesMap.get(ruleId).meta; | |
441 | }); | |
442 | ||
443 | return formatterFiles.reduce((data, filename) => { | |
444 | const fileExt = path.extname(filename), | |
445 | name = path.basename(filename, fileExt); | |
446 | ||
447 | if (fileExt === ".js") { | |
448 | const formattedOutput = cli.getFormatter(name)( | |
449 | rawMessages.results, | |
450 | { rulesMeta } | |
451 | ); | |
452 | ||
453 | data.formatterResults[name] = { | |
454 | result: stripAnsi(formattedOutput) | |
455 | }; | |
456 | } | |
457 | return data; | |
458 | }, { formatterResults: {} }); | |
459 | } | |
460 | ||
461 | /** | |
462 | * Gets a path to an executable in node_modules/.bin | |
463 | * @param {string} command The executable name | |
464 | * @returns {string} The executable path | |
465 | */ | |
466 | function getBinFile(command) { | |
467 | return path.join("node_modules", ".bin", command); | |
468 | } | |
469 | ||
470 | //------------------------------------------------------------------------------ | |
471 | // Tasks | |
472 | //------------------------------------------------------------------------------ | |
473 | ||
474 | target.all = function() { | |
475 | target.test(); | |
476 | }; | |
477 | ||
478 | target.lint = function([fix = false] = []) { | |
479 | let errors = 0, | |
480 | lastReturn; | |
481 | ||
482 | echo("Validating JavaScript files"); | |
483 | lastReturn = exec(`${ESLINT}${fix ? "--fix" : ""} .`); | |
484 | if (lastReturn.code !== 0) { | |
485 | errors++; | |
486 | } | |
487 | ||
488 | echo("Validating JSON Files"); | |
5422a9cc | 489 | JSON_FILES.forEach(validateJsonFile); |
eb39fafa DC |
490 | |
491 | echo("Validating Markdown Files"); | |
492 | lastReturn = lintMarkdown(MARKDOWN_FILES_ARRAY); | |
493 | if (lastReturn.code !== 0) { | |
494 | errors++; | |
495 | } | |
496 | ||
497 | if (errors) { | |
498 | exit(1); | |
499 | } | |
500 | }; | |
501 | ||
502 | target.fuzz = function({ amount = 1000, fuzzBrokenAutofixes = false } = {}) { | |
503 | const fuzzerRunner = require("./tools/fuzzer-runner"); | |
504 | const fuzzResults = fuzzerRunner.run({ amount, fuzzBrokenAutofixes }); | |
505 | ||
506 | if (fuzzResults.length) { | |
507 | ||
508 | const uniqueStackTraceCount = new Set(fuzzResults.map(result => result.error)).size; | |
509 | ||
510 | echo(`The fuzzer reported ${fuzzResults.length} error${fuzzResults.length === 1 ? "" : "s"} with a total of ${uniqueStackTraceCount} unique stack trace${uniqueStackTraceCount === 1 ? "" : "s"}.`); | |
511 | ||
512 | const formattedResults = JSON.stringify({ results: fuzzResults }, null, 4); | |
513 | ||
514 | if (process.env.CI) { | |
515 | echo("More details can be found below."); | |
516 | echo(formattedResults); | |
517 | } else { | |
518 | if (!test("-d", DEBUG_DIR)) { | |
519 | mkdir(DEBUG_DIR); | |
520 | } | |
521 | ||
522 | let fuzzLogPath; | |
523 | let fileSuffix = 0; | |
524 | ||
525 | // To avoid overwriting any existing fuzzer log files, append a numeric suffix to the end of the filename. | |
526 | do { | |
527 | fuzzLogPath = path.join(DEBUG_DIR, `fuzzer-log-${fileSuffix}.json`); | |
528 | fileSuffix++; | |
529 | } while (test("-f", fuzzLogPath)); | |
530 | ||
531 | formattedResults.to(fuzzLogPath); | |
532 | ||
533 | // TODO: (not-an-aardvark) Create a better way to isolate and test individual fuzzer errors from the log file | |
534 | echo(`More details can be found in ${fuzzLogPath}.`); | |
535 | } | |
536 | ||
537 | exit(1); | |
538 | } | |
539 | }; | |
540 | ||
541 | target.mocha = () => { | |
542 | let errors = 0, | |
543 | lastReturn; | |
544 | ||
545 | echo("Running unit tests"); | |
546 | ||
547 | lastReturn = exec(`${getBinFile("nyc")} -- ${MOCHA} -R progress -t ${MOCHA_TIMEOUT} -c ${TEST_FILES}`); | |
548 | if (lastReturn.code !== 0) { | |
549 | errors++; | |
550 | } | |
551 | ||
6f036462 | 552 | lastReturn = exec(`${getBinFile("nyc")} check-coverage --statement 98 --branch 97 --function 98 --lines 98`); |
eb39fafa DC |
553 | if (lastReturn.code !== 0) { |
554 | errors++; | |
555 | } | |
556 | ||
557 | if (errors) { | |
558 | exit(1); | |
559 | } | |
560 | }; | |
561 | ||
562 | target.karma = () => { | |
563 | echo("Running unit tests on browsers"); | |
564 | ||
565 | target.webpack("production"); | |
566 | ||
567 | const lastReturn = exec(`${getBinFile("karma")} start karma.conf.js`); | |
568 | ||
569 | if (lastReturn.code !== 0) { | |
570 | exit(1); | |
571 | } | |
572 | }; | |
573 | ||
574 | target.test = function() { | |
575 | target.lint(); | |
576 | target.checkRuleFiles(); | |
577 | target.mocha(); | |
578 | target.karma(); | |
579 | target.fuzz({ amount: 150, fuzzBrokenAutofixes: false }); | |
580 | target.checkLicenses(); | |
581 | }; | |
582 | ||
583 | target.docs = function() { | |
584 | echo("Generating documentation"); | |
585 | exec(`${getBinFile("jsdoc")} -d jsdoc lib`); | |
586 | echo("Documentation has been output to /jsdoc"); | |
587 | }; | |
588 | ||
589 | target.gensite = function(prereleaseVersion) { | |
590 | echo("Generating eslint.org"); | |
591 | ||
592 | let docFiles = [ | |
593 | "/rules/", | |
594 | "/user-guide/", | |
595 | "/maintainer-guide/", | |
596 | "/developer-guide/", | |
597 | "/about/" | |
598 | ]; | |
599 | ||
600 | // append version | |
601 | if (prereleaseVersion) { | |
602 | docFiles = docFiles.map(docFile => `/${prereleaseVersion}${docFile}`); | |
603 | } | |
604 | ||
605 | // 1. create temp and build directory | |
606 | echo("> Creating a temporary directory (Step 1)"); | |
607 | if (!test("-d", TEMP_DIR)) { | |
608 | mkdir(TEMP_DIR); | |
609 | } | |
610 | ||
611 | // 2. remove old files from the site | |
612 | echo("> Removing old files (Step 2)"); | |
613 | docFiles.forEach(filePath => { | |
614 | const fullPath = path.join(DOCS_DIR, filePath), | |
615 | htmlFullPath = fullPath.replace(".md", ".html"); | |
616 | ||
617 | if (test("-f", fullPath)) { | |
eb39fafa DC |
618 | rm("-rf", fullPath); |
619 | ||
620 | if (filePath.indexOf(".md") >= 0 && test("-f", htmlFullPath)) { | |
621 | rm("-rf", htmlFullPath); | |
622 | } | |
623 | } | |
624 | }); | |
625 | ||
626 | // 3. Copy docs folder to a temporary directory | |
627 | echo("> Copying the docs folder (Step 3)"); | |
628 | cp("-rf", "docs/*", TEMP_DIR); | |
629 | ||
630 | let versions = test("-f", "./versions.json") ? JSON.parse(cat("./versions.json")) : {}; | |
631 | ||
632 | if (!versions.added) { | |
633 | versions = { | |
634 | added: versions, | |
635 | removed: {} | |
636 | }; | |
637 | } | |
638 | ||
639 | const rules = require(".").linter.getRules(); | |
640 | ||
641 | const RECOMMENDED_TEXT = "\n\n(recommended) The `\"extends\": \"eslint:recommended\"` property in a configuration file enables this rule."; | |
642 | const FIXABLE_TEXT = "\n\n(fixable) The `--fix` option on the [command line](../user-guide/command-line-interface#fixing-problems) can automatically fix some of the problems reported by this rule."; | |
643 | ||
644 | // 4. Loop through all files in temporary directory | |
645 | process.stdout.write("> Updating files (Steps 4-9): 0/... - ...\r"); | |
646 | const tempFiles = find(TEMP_DIR); | |
647 | const length = tempFiles.length; | |
648 | ||
649 | tempFiles.forEach((filename, i) => { | |
650 | if (test("-f", filename) && path.extname(filename) === ".md") { | |
651 | ||
652 | const rulesUrl = "https://github.com/eslint/eslint/tree/master/lib/rules/", | |
653 | docsUrl = "https://github.com/eslint/eslint/tree/master/docs/rules/", | |
654 | baseName = path.basename(filename), | |
655 | sourceBaseName = `${path.basename(filename, ".md")}.js`, | |
656 | sourcePath = path.join("lib/rules", sourceBaseName), | |
657 | ruleName = path.basename(filename, ".md"), | |
658 | filePath = path.join("docs", path.relative("tmp", filename)); | |
659 | let text = cat(filename), | |
660 | ruleType = "", | |
661 | title; | |
662 | ||
663 | process.stdout.write(`> Updating files (Steps 4-9): ${i}/${length} - ${filePath + " ".repeat(30)}\r`); | |
664 | ||
665 | // 5. Prepend page title and layout variables at the top of rules | |
666 | if (path.dirname(filename).indexOf("rules") >= 0) { | |
667 | ||
668 | // Find out if the rule requires a special docs portion (e.g. if it is recommended and/or fixable) | |
669 | const rule = rules.get(ruleName); | |
670 | const isRecommended = rule && rule.meta.docs.recommended; | |
671 | const isFixable = rule && rule.meta.fixable; | |
672 | ||
673 | // Incorporate the special portion into the documentation content | |
674 | const textSplit = text.split("\n"); | |
675 | const ruleHeading = textSplit[0]; | |
676 | const ruleDocsContent = textSplit.slice(1).join("\n"); | |
677 | ||
678 | text = `${ruleHeading}${isRecommended ? RECOMMENDED_TEXT : ""}${isFixable ? FIXABLE_TEXT : ""}\n${ruleDocsContent}`; | |
679 | title = `${ruleName} - Rules`; | |
680 | ||
681 | if (rule && rule.meta) { | |
682 | ruleType = `rule_type: ${rule.meta.type}`; | |
683 | } | |
684 | } else { | |
685 | ||
686 | // extract the title from the file itself | |
687 | title = text.match(/#([^#].+)\n/u); | |
688 | if (title) { | |
689 | title = title[1].trim(); | |
690 | } else { | |
691 | title = "Documentation"; | |
692 | } | |
693 | } | |
694 | ||
695 | text = [ | |
696 | "---", | |
697 | `title: ${title}`, | |
698 | "layout: doc", | |
699 | `edit_link: https://github.com/eslint/eslint/edit/master/${filePath}`, | |
700 | ruleType, | |
701 | "---", | |
702 | "<!-- Note: No pull requests accepted for this file. See README.md in the root directory for details. -->", | |
703 | "", | |
704 | text | |
705 | ].join("\n"); | |
706 | ||
707 | // 6. Remove .md extension for relative links and change README to empty string | |
708 | text = text.replace(/\((?!https?:\/\/)([^)]*?)\.md(.*?)\)/gu, "($1$2)").replace("README.html", ""); | |
709 | ||
710 | // 7. Check if there's a trailing white line at the end of the file, if there isn't one, add it | |
711 | if (!/\n$/u.test(text)) { | |
712 | text = `${text}\n`; | |
713 | } | |
714 | ||
715 | // 8. Append first version of ESLint rule was added at. | |
716 | if (filename.indexOf("rules/") !== -1) { | |
717 | if (!versions.added[baseName]) { | |
718 | versions.added[baseName] = getFirstVersionOfFile(sourcePath); | |
719 | } | |
720 | const added = versions.added[baseName]; | |
721 | ||
722 | if (!versions.removed[baseName] && !test("-f", sourcePath)) { | |
723 | versions.removed[baseName] = getFirstVersionOfDeletion(sourcePath); | |
724 | } | |
725 | const removed = versions.removed[baseName]; | |
726 | ||
727 | text += "\n## Version\n\n"; | |
728 | text += removed | |
729 | ? `This rule was introduced in ESLint ${added} and removed in ${removed}.\n` | |
730 | : `This rule was introduced in ESLint ${added}.\n`; | |
731 | ||
732 | text += "\n## Resources\n\n"; | |
733 | if (!removed) { | |
734 | text += `* [Rule source](${rulesUrl}${sourceBaseName})\n`; | |
735 | } | |
736 | text += `* [Documentation source](${docsUrl}${baseName})\n`; | |
737 | } | |
738 | ||
739 | // 9. Update content of the file with changes | |
740 | text.to(filename.replace("README.md", "index.md")); | |
741 | } | |
742 | }); | |
743 | JSON.stringify(versions).to("./versions.json"); | |
744 | echo(`> Updating files (Steps 4-9)${" ".repeat(50)}`); | |
745 | ||
746 | // 10. Copy temporary directory to site's docs folder | |
56c4a2cb | 747 | echo("> Copying the temporary directory into the site's docs folder (Step 10)"); |
eb39fafa DC |
748 | let outputDir = DOCS_DIR; |
749 | ||
750 | if (prereleaseVersion) { | |
751 | outputDir += `/${prereleaseVersion}`; | |
752 | if (!test("-d", outputDir)) { | |
753 | mkdir(outputDir); | |
754 | } | |
755 | } | |
756 | cp("-rf", `${TEMP_DIR}*`, outputDir); | |
757 | ||
56c4a2cb DC |
758 | // 11. Generate rules index page |
759 | if (prereleaseVersion) { | |
760 | echo("> Skipping generating rules index page because this is a prerelease (Step 11)"); | |
761 | } else { | |
762 | echo("> Generating the rules index page (Step 11)"); | |
763 | generateRuleIndexPage(); | |
764 | } | |
eb39fafa DC |
765 | |
766 | // 12. Delete temporary directory | |
767 | echo("> Removing the temporary directory (Step 12)"); | |
768 | rm("-rf", TEMP_DIR); | |
769 | ||
770 | // 13. Create Example Formatter Output Page | |
771 | echo("> Creating the formatter examples (Step 14)"); | |
772 | generateFormatterExamples(getFormatterResults(), prereleaseVersion); | |
773 | ||
774 | echo("Done generating eslint.org"); | |
775 | }; | |
776 | ||
777 | target.webpack = function(mode = "none") { | |
778 | exec(`${getBinFile("webpack")} --mode=${mode} --output-path=${BUILD_DIR}`); | |
779 | }; | |
780 | ||
781 | target.checkRuleFiles = function() { | |
782 | ||
783 | echo("Validating rules"); | |
784 | ||
785 | const ruleTypes = require("./tools/rule-types.json"); | |
786 | let errors = 0; | |
787 | ||
788 | RULE_FILES.forEach(filename => { | |
789 | const basename = path.basename(filename, ".js"); | |
790 | const docFilename = `docs/rules/${basename}.md`; | |
791 | ||
792 | /** | |
793 | * Check if basename is present in rule-types.json file. | |
794 | * @returns {boolean} true if present | |
795 | * @private | |
796 | */ | |
797 | function isInRuleTypes() { | |
798 | return Object.prototype.hasOwnProperty.call(ruleTypes, basename); | |
799 | } | |
800 | ||
801 | /** | |
802 | * Check if id is present in title | |
803 | * @param {string} id id to check for | |
804 | * @returns {boolean} true if present | |
805 | * @private | |
806 | */ | |
807 | function hasIdInTitle(id) { | |
808 | const docText = cat(docFilename); | |
809 | const idOldAtEndOfTitleRegExp = new RegExp(`^# (.*?) \\(${id}\\)`, "u"); // original format | |
810 | const idNewAtBeginningOfTitleRegExp = new RegExp(`^# ${id}: `, "u"); // new format is same as rules index | |
811 | /* | |
812 | * 1. Added support for new format. | |
813 | * 2. Will remove support for old format after all docs files have new format. | |
814 | * 3. Will remove this check when the main heading is automatically generated from rule metadata. | |
815 | */ | |
816 | ||
817 | return idNewAtBeginningOfTitleRegExp.test(docText) || idOldAtEndOfTitleRegExp.test(docText); | |
818 | } | |
819 | ||
820 | // check for docs | |
821 | if (!test("-f", docFilename)) { | |
822 | console.error("Missing documentation for rule %s", basename); | |
823 | errors++; | |
824 | } else { | |
825 | ||
826 | // check for proper doc format | |
827 | if (!hasIdInTitle(basename)) { | |
828 | console.error("Missing id in the doc page's title of rule %s", basename); | |
829 | errors++; | |
830 | } | |
831 | } | |
832 | ||
833 | // check for recommended configuration | |
834 | if (!isInRuleTypes()) { | |
835 | console.error("Missing setting for %s in tools/rule-types.json", basename); | |
836 | errors++; | |
837 | } | |
838 | ||
839 | // check parity between rules index file and rules directory | |
840 | const ruleIdsInIndex = require("./lib/rules/index"); | |
841 | const ruleDef = ruleIdsInIndex.get(basename); | |
842 | ||
843 | if (!ruleDef) { | |
844 | console.error(`Missing rule from index (./lib/rules/index.js): ${basename}. If you just added a new rule then add an entry for it in this file.`); | |
845 | errors++; | |
846 | } | |
847 | ||
848 | // check eslint:recommended | |
849 | const recommended = require("./conf/eslint-recommended"); | |
850 | ||
851 | if (ruleDef) { | |
852 | if (ruleDef.meta.docs.recommended) { | |
853 | if (recommended.rules[basename] !== "error") { | |
854 | console.error(`Missing rule from eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just made a rule recommended then add an entry for it in this file.`); | |
855 | errors++; | |
856 | } | |
857 | } else { | |
858 | if (basename in recommended.rules) { | |
859 | console.error(`Extra rule in eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just added a rule then don't add an entry for it in this file.`); | |
860 | errors++; | |
861 | } | |
862 | } | |
863 | } | |
864 | ||
865 | // check for tests | |
866 | if (!test("-f", `tests/lib/rules/${basename}.js`)) { | |
867 | console.error("Missing tests for rule %s", basename); | |
868 | errors++; | |
869 | } | |
870 | ||
871 | }); | |
872 | ||
873 | if (errors) { | |
874 | exit(1); | |
875 | } | |
876 | ||
877 | }; | |
878 | ||
879 | target.checkLicenses = function() { | |
880 | ||
881 | /** | |
882 | * Check if a dependency is eligible to be used by us | |
883 | * @param {Object} dependency dependency to check | |
884 | * @returns {boolean} true if we have permission | |
885 | * @private | |
886 | */ | |
887 | function isPermissible(dependency) { | |
888 | const licenses = dependency.licenses; | |
889 | ||
890 | if (Array.isArray(licenses)) { | |
891 | return licenses.some(license => isPermissible({ | |
892 | name: dependency.name, | |
893 | licenses: license | |
894 | })); | |
895 | } | |
896 | ||
897 | return OPEN_SOURCE_LICENSES.some(license => license.test(licenses)); | |
898 | } | |
899 | ||
900 | echo("Validating licenses"); | |
901 | ||
902 | checker.init({ | |
903 | start: __dirname | |
904 | }, deps => { | |
905 | const impermissible = Object.keys(deps).map(dependency => ({ | |
906 | name: dependency, | |
907 | licenses: deps[dependency].licenses | |
908 | })).filter(dependency => !isPermissible(dependency)); | |
909 | ||
910 | if (impermissible.length) { | |
911 | impermissible.forEach(dependency => { | |
912 | console.error( | |
913 | "%s license for %s is impermissible.", | |
914 | dependency.licenses, | |
915 | dependency.name | |
916 | ); | |
917 | }); | |
918 | exit(1); | |
919 | } | |
920 | }); | |
921 | }; | |
922 | ||
923 | /** | |
924 | * Downloads a repository which has many js files to test performance with multi files. | |
925 | * Here, it's eslint@1.10.3 (450 files) | |
926 | * @param {Function} cb A callback function. | |
927 | * @returns {void} | |
928 | */ | |
929 | function downloadMultifilesTestTarget(cb) { | |
930 | if (test("-d", PERF_MULTIFILES_TARGET_DIR)) { | |
931 | process.nextTick(cb); | |
932 | } else { | |
933 | mkdir("-p", PERF_MULTIFILES_TARGET_DIR); | |
934 | echo("Downloading the repository of multi-files performance test target."); | |
935 | exec(`git clone -b v1.10.3 --depth 1 https://github.com/eslint/eslint.git "${PERF_MULTIFILES_TARGET_DIR}"`, { silent: true }, cb); | |
936 | } | |
937 | } | |
938 | ||
939 | /** | |
940 | * Creates a config file to use performance tests. | |
941 | * This config is turning all core rules on. | |
942 | * @returns {void} | |
943 | */ | |
944 | function createConfigForPerformanceTest() { | |
945 | const content = [ | |
946 | "root: true", | |
947 | "env:", | |
948 | " node: true", | |
949 | " es6: true", | |
950 | "rules:" | |
951 | ]; | |
952 | ||
953 | for (const [ruleId] of builtinRules) { | |
954 | content.push(` ${ruleId}: 1`); | |
955 | } | |
956 | ||
957 | content.join("\n").to(PERF_ESLINTRC); | |
958 | } | |
959 | ||
960 | /** | |
961 | * Calculates the time for each run for performance | |
962 | * @param {string} cmd cmd | |
963 | * @param {int} runs Total number of runs to do | |
964 | * @param {int} runNumber Current run number | |
965 | * @param {int[]} results Collection results from each run | |
966 | * @param {Function} cb Function to call when everything is done | |
967 | * @returns {int[]} calls the cb with all the results | |
968 | * @private | |
969 | */ | |
970 | function time(cmd, runs, runNumber, results, cb) { | |
971 | const start = process.hrtime(); | |
972 | ||
973 | exec(cmd, { maxBuffer: 64 * 1024 * 1024, silent: true }, (code, stdout, stderr) => { | |
974 | const diff = process.hrtime(start), | |
975 | actual = (diff[0] * 1e3 + diff[1] / 1e6); // ms | |
976 | ||
977 | if (code) { | |
978 | echo(` Performance Run #${runNumber} failed.`); | |
979 | if (stdout) { | |
980 | echo(`STDOUT:\n${stdout}\n\n`); | |
981 | } | |
982 | ||
983 | if (stderr) { | |
984 | echo(`STDERR:\n${stderr}\n\n`); | |
985 | } | |
986 | return cb(null); | |
987 | } | |
988 | ||
989 | results.push(actual); | |
990 | echo(` Performance Run #${runNumber}: %dms`, actual); | |
991 | if (runs > 1) { | |
992 | return time(cmd, runs - 1, runNumber + 1, results, cb); | |
993 | } | |
994 | return cb(results); | |
995 | ||
996 | }); | |
997 | ||
998 | } | |
999 | ||
1000 | /** | |
1001 | * Run a performance test. | |
1002 | * @param {string} title A title. | |
1003 | * @param {string} targets Test targets. | |
1004 | * @param {number} multiplier A multiplier for limitation. | |
1005 | * @param {Function} cb A callback function. | |
1006 | * @returns {void} | |
1007 | */ | |
1008 | function runPerformanceTest(title, targets, multiplier, cb) { | |
1009 | const cpuSpeed = os.cpus()[0].speed, | |
1010 | max = multiplier / cpuSpeed, | |
1011 | cmd = `${ESLINT}--config "${PERF_ESLINTRC}" --no-eslintrc --no-ignore ${targets}`; | |
1012 | ||
1013 | echo(""); | |
1014 | echo(title); | |
1015 | echo(" CPU Speed is %d with multiplier %d", cpuSpeed, multiplier); | |
1016 | ||
1017 | time(cmd, 5, 1, [], results => { | |
1018 | if (!results || results.length === 0) { // No results? Something is wrong. | |
1019 | throw new Error("Performance test failed."); | |
1020 | } | |
1021 | ||
1022 | results.sort((a, b) => a - b); | |
1023 | ||
1024 | const median = results[~~(results.length / 2)]; | |
1025 | ||
1026 | echo(""); | |
1027 | if (median > max) { | |
1028 | echo(" Performance budget exceeded: %dms (limit: %dms)", median, max); | |
1029 | } else { | |
1030 | echo(" Performance budget ok: %dms (limit: %dms)", median, max); | |
1031 | } | |
1032 | echo(""); | |
1033 | cb(); | |
1034 | }); | |
1035 | } | |
1036 | ||
1037 | /** | |
1038 | * Run the load performance for eslint | |
1039 | * @returns {void} | |
1040 | * @private | |
1041 | */ | |
1042 | function loadPerformance() { | |
1043 | echo(""); | |
1044 | echo("Loading:"); | |
1045 | ||
1046 | const results = []; | |
1047 | ||
1048 | for (let cnt = 0; cnt < 5; cnt++) { | |
1049 | const loadPerfData = loadPerf({ | |
1050 | checkDependencies: false | |
1051 | }); | |
1052 | ||
1053 | echo(` Load performance Run #${cnt + 1}: %dms`, loadPerfData.loadTime); | |
1054 | results.push(loadPerfData.loadTime); | |
1055 | } | |
1056 | ||
1057 | results.sort((a, b) => a - b); | |
1058 | const median = results[~~(results.length / 2)]; | |
1059 | ||
1060 | echo(""); | |
1061 | echo(" Load Performance median: %dms", median); | |
1062 | echo(""); | |
1063 | } | |
1064 | ||
1065 | target.perf = function() { | |
1066 | downloadMultifilesTestTarget(() => { | |
1067 | createConfigForPerformanceTest(); | |
1068 | ||
1069 | loadPerformance(); | |
1070 | ||
1071 | runPerformanceTest( | |
1072 | "Single File:", | |
1073 | "tests/performance/jshint.js", | |
1074 | PERF_MULTIPLIER, | |
1075 | () => { | |
1076 | ||
1077 | // Count test target files. | |
1078 | const count = glob.sync( | |
1079 | process.platform === "win32" | |
1080 | ? PERF_MULTIFILES_TARGETS.slice(2).replace(/\\/gu, "/") | |
1081 | : PERF_MULTIFILES_TARGETS | |
1082 | ).length; | |
1083 | ||
1084 | runPerformanceTest( | |
1085 | `Multi Files (${count} files):`, | |
1086 | PERF_MULTIFILES_TARGETS, | |
1087 | 3 * PERF_MULTIPLIER, | |
1088 | () => {} | |
1089 | ); | |
1090 | } | |
1091 | ); | |
1092 | }); | |
1093 | }; | |
1094 | ||
1095 | target.generateRelease = generateRelease; | |
1096 | target.generatePrerelease = ([prereleaseType]) => generatePrerelease(prereleaseType); | |
1097 | target.publishRelease = publishRelease; |