]>
Commit | Line | Data |
---|---|---|
eb39fafa DC |
1 | /** |
2 | * @fileoverview Build file | |
3 | * @author nzakas | |
4 | */ | |
5 | ||
6 | /* global target */ | |
7 | /* eslint no-use-before-define: "off", no-console: "off" */ | |
8 | "use strict"; | |
9 | ||
10 | //------------------------------------------------------------------------------ | |
11 | // Requirements | |
12 | //------------------------------------------------------------------------------ | |
13 | ||
14 | require("shelljs/make"); | |
15 | ||
16 | const lodash = require("lodash"), | |
17 | checker = require("npm-license"), | |
18 | ReleaseOps = require("eslint-release"), | |
19 | dateformat = require("dateformat"), | |
20 | fs = require("fs"), | |
21 | glob = require("glob"), | |
22 | markdownlint = require("markdownlint"), | |
23 | os = require("os"), | |
24 | path = require("path"), | |
25 | semver = require("semver"), | |
26 | ejs = require("ejs"), | |
27 | loadPerf = require("load-perf"), | |
28 | yaml = require("js-yaml"), | |
29 | { CLIEngine } = require("./lib/cli-engine"), | |
30 | builtinRules = require("./lib/rules/index"); | |
31 | ||
32 | const { cat, cd, cp, echo, exec, exit, find, ls, mkdir, pwd, rm, test } = require("shelljs"); | |
33 | ||
34 | //------------------------------------------------------------------------------ | |
35 | // Settings | |
36 | //------------------------------------------------------------------------------ | |
37 | ||
38 | /* | |
39 | * A little bit fuzzy. My computer has a first CPU speed of 3392 and the perf test | |
40 | * always completes in < 3800ms. However, Travis is less predictable due to | |
41 | * multiple different VM types. So I'm fudging this for now in the hopes that it | |
42 | * at least provides some sort of useful signal. | |
43 | */ | |
44 | const PERF_MULTIPLIER = 13e6; | |
45 | ||
46 | const OPEN_SOURCE_LICENSES = [ | |
47 | /MIT/u, /BSD/u, /Apache/u, /ISC/u, /WTF/u, /Public Domain/u, /LGPL/u | |
48 | ]; | |
49 | ||
50 | //------------------------------------------------------------------------------ | |
51 | // Data | |
52 | //------------------------------------------------------------------------------ | |
53 | ||
54 | const NODE = "node ", // intentional extra space | |
55 | NODE_MODULES = "./node_modules/", | |
56 | TEMP_DIR = "./tmp/", | |
57 | DEBUG_DIR = "./debug/", | |
58 | BUILD_DIR = "build", | |
59 | DOCS_DIR = "../website/docs", | |
60 | SITE_DIR = "../website/", | |
61 | PERF_TMP_DIR = path.join(TEMP_DIR, "eslint", "performance"), | |
62 | ||
63 | // Utilities - intentional extra space at the end of each string | |
64 | MOCHA = `${NODE_MODULES}mocha/bin/_mocha `, | |
65 | ESLINT = `${NODE} bin/eslint.js --report-unused-disable-directives `, | |
66 | ||
67 | // Files | |
68 | RULE_FILES = glob.sync("lib/rules/*.js").filter(filePath => path.basename(filePath) !== "index.js"), | |
69 | JSON_FILES = find("conf/").filter(fileType("json")), | |
70 | MARKDOWN_FILES_ARRAY = find("docs/").concat(ls(".")).filter(fileType("md")), | |
71 | TEST_FILES = "\"tests/{bin,lib,tools}/**/*.js\"", | |
72 | PERF_ESLINTRC = path.join(PERF_TMP_DIR, "eslintrc.yml"), | |
73 | PERF_MULTIFILES_TARGET_DIR = path.join(PERF_TMP_DIR, "eslint"), | |
74 | PERF_MULTIFILES_TARGETS = `"${PERF_MULTIFILES_TARGET_DIR + path.sep}{lib,tests${path.sep}lib}${path.sep}**${path.sep}*.js"`, | |
75 | ||
76 | // Settings | |
77 | MOCHA_TIMEOUT = 10000; | |
78 | ||
79 | //------------------------------------------------------------------------------ | |
80 | // Helpers | |
81 | //------------------------------------------------------------------------------ | |
82 | ||
83 | /** | |
84 | * Simple JSON file validation that relies on ES JSON parser. | |
85 | * @param {string} filePath Path to JSON. | |
86 | * @throws Error If file contents is invalid JSON. | |
87 | * @returns {undefined} | |
88 | */ | |
89 | function validateJsonFile(filePath) { | |
90 | const contents = fs.readFileSync(filePath, "utf8"); | |
91 | ||
92 | JSON.parse(contents); | |
93 | } | |
94 | ||
95 | /** | |
96 | * Generates a function that matches files with a particular extension. | |
97 | * @param {string} extension The file extension (i.e. "js") | |
98 | * @returns {Function} The function to pass into a filter method. | |
99 | * @private | |
100 | */ | |
101 | function fileType(extension) { | |
102 | return function(filename) { | |
103 | return filename.slice(filename.lastIndexOf(".") + 1) === extension; | |
104 | }; | |
105 | } | |
106 | ||
107 | /** | |
108 | * Executes a command and returns the output instead of printing it to stdout. | |
109 | * @param {string} cmd The command string to execute. | |
110 | * @returns {string} The result of the executed command. | |
111 | */ | |
112 | function execSilent(cmd) { | |
113 | return exec(cmd, { silent: true }).stdout; | |
114 | } | |
115 | ||
116 | /** | |
117 | * Generates a release blog post for eslint.org | |
118 | * @param {Object} releaseInfo The release metadata. | |
119 | * @param {string} [prereleaseMajorVersion] If this is a prerelease, the next major version after this prerelease | |
120 | * @returns {void} | |
121 | * @private | |
122 | */ | |
123 | function generateBlogPost(releaseInfo, prereleaseMajorVersion) { | |
124 | const ruleList = RULE_FILES | |
125 | ||
126 | // Strip the .js extension | |
127 | .map(ruleFileName => path.basename(ruleFileName, ".js")) | |
128 | ||
129 | /* | |
130 | * Sort by length descending. This ensures that rule names which are substrings of other rule names are not | |
131 | * matched incorrectly. For example, the string "no-undefined" should get matched with the `no-undefined` rule, | |
132 | * instead of getting matched with the `no-undef` rule followed by the string "ined". | |
133 | */ | |
134 | .sort((ruleA, ruleB) => ruleB.length - ruleA.length); | |
135 | ||
136 | const renderContext = Object.assign({ prereleaseMajorVersion, ruleList }, releaseInfo); | |
137 | ||
138 | const output = ejs.render(cat("./templates/blogpost.md.ejs"), renderContext), | |
139 | now = new Date(), | |
140 | month = now.getMonth() + 1, | |
141 | day = now.getDate(), | |
142 | filename = `../website/_posts/${now.getFullYear()}-${ | |
143 | month < 10 ? `0${month}` : month}-${ | |
144 | day < 10 ? `0${day}` : day}-eslint-v${ | |
145 | releaseInfo.version}-released.md`; | |
146 | ||
147 | output.to(filename); | |
148 | } | |
149 | ||
150 | /** | |
151 | * Generates a doc page with formatter result examples | |
152 | * @param {Object} formatterInfo Linting results from each formatter | |
153 | * @param {string} [prereleaseVersion] The version used for a prerelease. This | |
154 | * changes where the output is stored. | |
155 | * @returns {void} | |
156 | */ | |
157 | function generateFormatterExamples(formatterInfo, prereleaseVersion) { | |
158 | const output = ejs.render(cat("./templates/formatter-examples.md.ejs"), formatterInfo); | |
159 | let filename = "../website/docs/user-guide/formatters/index.md", | |
160 | htmlFilename = "../website/docs/user-guide/formatters/html-formatter-example.html"; | |
161 | ||
162 | if (prereleaseVersion) { | |
163 | filename = filename.replace("/docs", `/docs/${prereleaseVersion}`); | |
164 | htmlFilename = htmlFilename.replace("/docs", `/docs/${prereleaseVersion}`); | |
165 | if (!test("-d", path.dirname(filename))) { | |
166 | mkdir(path.dirname(filename)); | |
167 | } | |
168 | } | |
169 | ||
170 | output.to(filename); | |
171 | formatterInfo.formatterResults.html.result.to(htmlFilename); | |
172 | } | |
173 | ||
174 | /** | |
175 | * Generate a doc page that lists all of the rules and links to them | |
176 | * @returns {void} | |
177 | */ | |
178 | function generateRuleIndexPage() { | |
179 | const outputFile = "../website/_data/rules.yml", | |
180 | categoryList = "conf/category-list.json", | |
181 | categoriesData = JSON.parse(cat(path.resolve(categoryList))); | |
182 | ||
183 | RULE_FILES | |
184 | .map(filename => [filename, path.basename(filename, ".js")]) | |
185 | .sort((a, b) => a[1].localeCompare(b[1])) | |
186 | .forEach(pair => { | |
187 | const filename = pair[0]; | |
188 | const basename = pair[1]; | |
189 | const rule = require(path.resolve(filename)); | |
190 | ||
191 | if (rule.meta.deprecated) { | |
192 | categoriesData.deprecated.rules.push({ | |
193 | name: basename, | |
194 | replacedBy: rule.meta.replacedBy || [] | |
195 | }); | |
196 | } else { | |
197 | const output = { | |
198 | name: basename, | |
199 | description: rule.meta.docs.description, | |
200 | recommended: rule.meta.docs.recommended || false, | |
201 | fixable: !!rule.meta.fixable | |
202 | }, | |
203 | category = lodash.find(categoriesData.categories, { name: rule.meta.docs.category }); | |
204 | ||
205 | if (!category.rules) { | |
206 | category.rules = []; | |
207 | } | |
208 | ||
209 | category.rules.push(output); | |
210 | } | |
211 | }); | |
212 | ||
56c4a2cb DC |
213 | // `.rules` will be `undefined` if all rules in category are deprecated. |
214 | categoriesData.categories = categoriesData.categories.filter(category => !!category.rules); | |
215 | ||
eb39fafa DC |
216 | const output = yaml.safeDump(categoriesData, { sortKeys: true }); |
217 | ||
218 | output.to(outputFile); | |
219 | } | |
220 | ||
221 | /** | |
222 | * Creates a git commit and tag in an adjacent `website` repository, without pushing it to | |
223 | * the remote. This assumes that the repository has already been modified somehow (e.g. by adding a blogpost). | |
224 | * @param {string} [tag] The string to tag the commit with | |
225 | * @returns {void} | |
226 | */ | |
227 | function commitSiteToGit(tag) { | |
228 | const currentDir = pwd(); | |
229 | ||
230 | cd(SITE_DIR); | |
231 | exec("git add -A ."); | |
232 | exec(`git commit -m "Autogenerated new docs and demo at ${dateformat(new Date())}"`); | |
233 | ||
234 | if (tag) { | |
235 | exec(`git tag ${tag}`); | |
236 | } | |
237 | ||
238 | exec("git fetch origin && git rebase origin/master"); | |
239 | cd(currentDir); | |
240 | } | |
241 | ||
242 | /** | |
243 | * Publishes the changes in an adjacent `website` repository to the remote. The | |
244 | * site should already have local commits (e.g. from running `commitSiteToGit`). | |
245 | * @returns {void} | |
246 | */ | |
247 | function publishSite() { | |
248 | const currentDir = pwd(); | |
249 | ||
250 | cd(SITE_DIR); | |
251 | exec("git push origin master --tags"); | |
252 | cd(currentDir); | |
253 | } | |
254 | ||
255 | /** | |
256 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
257 | * and generates the site in an adjacent `website` folder. | |
258 | * @returns {void} | |
259 | */ | |
260 | function generateRelease() { | |
261 | ReleaseOps.generateRelease(); | |
262 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
263 | ||
264 | echo("Generating site"); | |
265 | target.gensite(); | |
266 | generateBlogPost(releaseInfo); | |
267 | commitSiteToGit(`v${releaseInfo.version}`); | |
268 | } | |
269 | ||
270 | /** | |
271 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
272 | * and generates the site in an adjacent `website` folder. | |
273 | * @param {string} prereleaseId The prerelease identifier (alpha, beta, etc.) | |
274 | * @returns {void} | |
275 | */ | |
276 | function generatePrerelease(prereleaseId) { | |
277 | ReleaseOps.generateRelease(prereleaseId); | |
278 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
279 | const nextMajor = semver.inc(releaseInfo.version, "major"); | |
280 | ||
281 | echo("Generating site"); | |
282 | ||
283 | // always write docs into the next major directory (so 2.0.0-alpha.0 writes to 2.0.0) | |
284 | target.gensite(nextMajor); | |
285 | ||
286 | /* | |
287 | * Premajor release should have identical "next major version". | |
288 | * Preminor and prepatch release will not. | |
289 | * 5.0.0-alpha.0 --> next major = 5, current major = 5 | |
290 | * 4.4.0-alpha.0 --> next major = 5, current major = 4 | |
291 | * 4.0.1-alpha.0 --> next major = 5, current major = 4 | |
292 | */ | |
293 | if (semver.major(releaseInfo.version) === semver.major(nextMajor)) { | |
294 | ||
295 | /* | |
296 | * This prerelease is for a major release (not preminor/prepatch). | |
297 | * Blog post generation logic needs to be aware of this (as well as | |
298 | * know what the next major version is actually supposed to be). | |
299 | */ | |
300 | generateBlogPost(releaseInfo, nextMajor); | |
301 | } else { | |
302 | generateBlogPost(releaseInfo); | |
303 | } | |
304 | ||
305 | commitSiteToGit(`v${releaseInfo.version}`); | |
306 | } | |
307 | ||
308 | /** | |
309 | * Publishes a generated release to npm and GitHub, and pushes changes to the adjacent `website` repo | |
310 | * to remote repo. | |
311 | * @returns {void} | |
312 | */ | |
313 | function publishRelease() { | |
314 | ReleaseOps.publishRelease(); | |
315 | publishSite(); | |
316 | } | |
317 | ||
318 | /** | |
319 | * Splits a command result to separate lines. | |
320 | * @param {string} result The command result string. | |
321 | * @returns {Array} The separated lines. | |
322 | */ | |
323 | function splitCommandResultToLines(result) { | |
324 | return result.trim().split("\n"); | |
325 | } | |
326 | ||
327 | /** | |
328 | * Gets the first commit sha of the given file. | |
329 | * @param {string} filePath The file path which should be checked. | |
330 | * @returns {string} The commit sha. | |
331 | */ | |
332 | function getFirstCommitOfFile(filePath) { | |
333 | let commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
334 | ||
335 | commits = splitCommandResultToLines(commits); | |
336 | return commits[commits.length - 1].trim(); | |
337 | } | |
338 | ||
339 | /** | |
340 | * Gets the tag name where a given file was introduced first. | |
341 | * @param {string} filePath The file path to check. | |
342 | * @returns {string} The tag name. | |
343 | */ | |
344 | function getFirstVersionOfFile(filePath) { | |
345 | const firstCommit = getFirstCommitOfFile(filePath); | |
346 | let tags = execSilent(`git tag --contains ${firstCommit}`); | |
347 | ||
348 | tags = splitCommandResultToLines(tags); | |
349 | return tags.reduce((list, version) => { | |
350 | const validatedVersion = semver.valid(version.trim()); | |
351 | ||
352 | if (validatedVersion) { | |
353 | list.push(validatedVersion); | |
354 | } | |
355 | return list; | |
356 | }, []).sort(semver.compare)[0]; | |
357 | } | |
358 | ||
359 | /** | |
360 | * Gets the commit that deleted a file. | |
361 | * @param {string} filePath The path to the deleted file. | |
362 | * @returns {string} The commit sha. | |
363 | */ | |
364 | function getCommitDeletingFile(filePath) { | |
365 | const commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
366 | ||
367 | return splitCommandResultToLines(commits)[0]; | |
368 | } | |
369 | ||
370 | /** | |
371 | * Gets the first version number where a given file is no longer present. | |
372 | * @param {string} filePath The path to the deleted file. | |
373 | * @returns {string} The version number. | |
374 | */ | |
375 | function getFirstVersionOfDeletion(filePath) { | |
376 | const deletionCommit = getCommitDeletingFile(filePath), | |
377 | tags = execSilent(`git tag --contains ${deletionCommit}`); | |
378 | ||
379 | return splitCommandResultToLines(tags) | |
380 | .map(version => semver.valid(version.trim())) | |
381 | .filter(version => version) | |
382 | .sort(semver.compare)[0]; | |
383 | } | |
384 | ||
385 | /** | |
386 | * Lints Markdown files. | |
387 | * @param {Array} files Array of file names to lint. | |
388 | * @returns {Object} exec-style exit code object. | |
389 | * @private | |
390 | */ | |
391 | function lintMarkdown(files) { | |
392 | const config = yaml.safeLoad(fs.readFileSync(path.join(__dirname, "./.markdownlint.yml"), "utf8")), | |
393 | result = markdownlint.sync({ | |
394 | files, | |
395 | config, | |
396 | resultVersion: 1 | |
397 | }), | |
398 | resultString = result.toString(), | |
399 | returnCode = resultString ? 1 : 0; | |
400 | ||
401 | if (resultString) { | |
402 | console.error(resultString); | |
403 | } | |
404 | return { code: returnCode }; | |
405 | } | |
406 | ||
407 | /** | |
408 | * Gets linting results from every formatter, based on a hard-coded snippet and config | |
409 | * @returns {Object} Output from each formatter | |
410 | */ | |
411 | function getFormatterResults() { | |
412 | const stripAnsi = require("strip-ansi"); | |
413 | ||
414 | const formatterFiles = fs.readdirSync("./lib/cli-engine/formatters/"), | |
415 | rules = { | |
416 | "no-else-return": "warn", | |
417 | indent: ["warn", 4], | |
418 | "space-unary-ops": "error", | |
419 | semi: ["warn", "always"], | |
420 | "consistent-return": "error" | |
421 | }, | |
422 | cli = new CLIEngine({ | |
423 | useEslintrc: false, | |
424 | baseConfig: { extends: "eslint:recommended" }, | |
425 | rules | |
426 | }), | |
427 | codeString = [ | |
428 | "function addOne(i) {", | |
429 | " if (i != NaN) {", | |
430 | " return i ++", | |
431 | " } else {", | |
432 | " return", | |
433 | " }", | |
434 | "};" | |
435 | ].join("\n"), | |
436 | rawMessages = cli.executeOnText(codeString, "fullOfProblems.js", true), | |
437 | rulesMap = cli.getRules(), | |
438 | rulesMeta = {}; | |
439 | ||
440 | Object.keys(rules).forEach(ruleId => { | |
441 | rulesMeta[ruleId] = rulesMap.get(ruleId).meta; | |
442 | }); | |
443 | ||
444 | return formatterFiles.reduce((data, filename) => { | |
445 | const fileExt = path.extname(filename), | |
446 | name = path.basename(filename, fileExt); | |
447 | ||
448 | if (fileExt === ".js") { | |
449 | const formattedOutput = cli.getFormatter(name)( | |
450 | rawMessages.results, | |
451 | { rulesMeta } | |
452 | ); | |
453 | ||
454 | data.formatterResults[name] = { | |
455 | result: stripAnsi(formattedOutput) | |
456 | }; | |
457 | } | |
458 | return data; | |
459 | }, { formatterResults: {} }); | |
460 | } | |
461 | ||
462 | /** | |
463 | * Gets a path to an executable in node_modules/.bin | |
464 | * @param {string} command The executable name | |
465 | * @returns {string} The executable path | |
466 | */ | |
467 | function getBinFile(command) { | |
468 | return path.join("node_modules", ".bin", command); | |
469 | } | |
470 | ||
471 | //------------------------------------------------------------------------------ | |
472 | // Tasks | |
473 | //------------------------------------------------------------------------------ | |
474 | ||
475 | target.all = function() { | |
476 | target.test(); | |
477 | }; | |
478 | ||
479 | target.lint = function([fix = false] = []) { | |
480 | let errors = 0, | |
481 | lastReturn; | |
482 | ||
483 | echo("Validating JavaScript files"); | |
484 | lastReturn = exec(`${ESLINT}${fix ? "--fix" : ""} .`); | |
485 | if (lastReturn.code !== 0) { | |
486 | errors++; | |
487 | } | |
488 | ||
489 | echo("Validating JSON Files"); | |
490 | lodash.forEach(JSON_FILES, validateJsonFile); | |
491 | ||
492 | echo("Validating Markdown Files"); | |
493 | lastReturn = lintMarkdown(MARKDOWN_FILES_ARRAY); | |
494 | if (lastReturn.code !== 0) { | |
495 | errors++; | |
496 | } | |
497 | ||
498 | if (errors) { | |
499 | exit(1); | |
500 | } | |
501 | }; | |
502 | ||
503 | target.fuzz = function({ amount = 1000, fuzzBrokenAutofixes = false } = {}) { | |
504 | const fuzzerRunner = require("./tools/fuzzer-runner"); | |
505 | const fuzzResults = fuzzerRunner.run({ amount, fuzzBrokenAutofixes }); | |
506 | ||
507 | if (fuzzResults.length) { | |
508 | ||
509 | const uniqueStackTraceCount = new Set(fuzzResults.map(result => result.error)).size; | |
510 | ||
511 | echo(`The fuzzer reported ${fuzzResults.length} error${fuzzResults.length === 1 ? "" : "s"} with a total of ${uniqueStackTraceCount} unique stack trace${uniqueStackTraceCount === 1 ? "" : "s"}.`); | |
512 | ||
513 | const formattedResults = JSON.stringify({ results: fuzzResults }, null, 4); | |
514 | ||
515 | if (process.env.CI) { | |
516 | echo("More details can be found below."); | |
517 | echo(formattedResults); | |
518 | } else { | |
519 | if (!test("-d", DEBUG_DIR)) { | |
520 | mkdir(DEBUG_DIR); | |
521 | } | |
522 | ||
523 | let fuzzLogPath; | |
524 | let fileSuffix = 0; | |
525 | ||
526 | // To avoid overwriting any existing fuzzer log files, append a numeric suffix to the end of the filename. | |
527 | do { | |
528 | fuzzLogPath = path.join(DEBUG_DIR, `fuzzer-log-${fileSuffix}.json`); | |
529 | fileSuffix++; | |
530 | } while (test("-f", fuzzLogPath)); | |
531 | ||
532 | formattedResults.to(fuzzLogPath); | |
533 | ||
534 | // TODO: (not-an-aardvark) Create a better way to isolate and test individual fuzzer errors from the log file | |
535 | echo(`More details can be found in ${fuzzLogPath}.`); | |
536 | } | |
537 | ||
538 | exit(1); | |
539 | } | |
540 | }; | |
541 | ||
542 | target.mocha = () => { | |
543 | let errors = 0, | |
544 | lastReturn; | |
545 | ||
546 | echo("Running unit tests"); | |
547 | ||
548 | lastReturn = exec(`${getBinFile("nyc")} -- ${MOCHA} -R progress -t ${MOCHA_TIMEOUT} -c ${TEST_FILES}`); | |
549 | if (lastReturn.code !== 0) { | |
550 | errors++; | |
551 | } | |
552 | ||
6f036462 | 553 | lastReturn = exec(`${getBinFile("nyc")} check-coverage --statement 98 --branch 97 --function 98 --lines 98`); |
eb39fafa DC |
554 | if (lastReturn.code !== 0) { |
555 | errors++; | |
556 | } | |
557 | ||
558 | if (errors) { | |
559 | exit(1); | |
560 | } | |
561 | }; | |
562 | ||
563 | target.karma = () => { | |
564 | echo("Running unit tests on browsers"); | |
565 | ||
566 | target.webpack("production"); | |
567 | ||
568 | const lastReturn = exec(`${getBinFile("karma")} start karma.conf.js`); | |
569 | ||
570 | if (lastReturn.code !== 0) { | |
571 | exit(1); | |
572 | } | |
573 | }; | |
574 | ||
575 | target.test = function() { | |
576 | target.lint(); | |
577 | target.checkRuleFiles(); | |
578 | target.mocha(); | |
579 | target.karma(); | |
580 | target.fuzz({ amount: 150, fuzzBrokenAutofixes: false }); | |
581 | target.checkLicenses(); | |
582 | }; | |
583 | ||
584 | target.docs = function() { | |
585 | echo("Generating documentation"); | |
586 | exec(`${getBinFile("jsdoc")} -d jsdoc lib`); | |
587 | echo("Documentation has been output to /jsdoc"); | |
588 | }; | |
589 | ||
590 | target.gensite = function(prereleaseVersion) { | |
591 | echo("Generating eslint.org"); | |
592 | ||
593 | let docFiles = [ | |
594 | "/rules/", | |
595 | "/user-guide/", | |
596 | "/maintainer-guide/", | |
597 | "/developer-guide/", | |
598 | "/about/" | |
599 | ]; | |
600 | ||
601 | // append version | |
602 | if (prereleaseVersion) { | |
603 | docFiles = docFiles.map(docFile => `/${prereleaseVersion}${docFile}`); | |
604 | } | |
605 | ||
606 | // 1. create temp and build directory | |
607 | echo("> Creating a temporary directory (Step 1)"); | |
608 | if (!test("-d", TEMP_DIR)) { | |
609 | mkdir(TEMP_DIR); | |
610 | } | |
611 | ||
612 | // 2. remove old files from the site | |
613 | echo("> Removing old files (Step 2)"); | |
614 | docFiles.forEach(filePath => { | |
615 | const fullPath = path.join(DOCS_DIR, filePath), | |
616 | htmlFullPath = fullPath.replace(".md", ".html"); | |
617 | ||
618 | if (test("-f", fullPath)) { | |
eb39fafa DC |
619 | rm("-rf", fullPath); |
620 | ||
621 | if (filePath.indexOf(".md") >= 0 && test("-f", htmlFullPath)) { | |
622 | rm("-rf", htmlFullPath); | |
623 | } | |
624 | } | |
625 | }); | |
626 | ||
627 | // 3. Copy docs folder to a temporary directory | |
628 | echo("> Copying the docs folder (Step 3)"); | |
629 | cp("-rf", "docs/*", TEMP_DIR); | |
630 | ||
631 | let versions = test("-f", "./versions.json") ? JSON.parse(cat("./versions.json")) : {}; | |
632 | ||
633 | if (!versions.added) { | |
634 | versions = { | |
635 | added: versions, | |
636 | removed: {} | |
637 | }; | |
638 | } | |
639 | ||
640 | const rules = require(".").linter.getRules(); | |
641 | ||
642 | const RECOMMENDED_TEXT = "\n\n(recommended) The `\"extends\": \"eslint:recommended\"` property in a configuration file enables this rule."; | |
643 | const FIXABLE_TEXT = "\n\n(fixable) The `--fix` option on the [command line](../user-guide/command-line-interface#fixing-problems) can automatically fix some of the problems reported by this rule."; | |
644 | ||
645 | // 4. Loop through all files in temporary directory | |
646 | process.stdout.write("> Updating files (Steps 4-9): 0/... - ...\r"); | |
647 | const tempFiles = find(TEMP_DIR); | |
648 | const length = tempFiles.length; | |
649 | ||
650 | tempFiles.forEach((filename, i) => { | |
651 | if (test("-f", filename) && path.extname(filename) === ".md") { | |
652 | ||
653 | const rulesUrl = "https://github.com/eslint/eslint/tree/master/lib/rules/", | |
654 | docsUrl = "https://github.com/eslint/eslint/tree/master/docs/rules/", | |
655 | baseName = path.basename(filename), | |
656 | sourceBaseName = `${path.basename(filename, ".md")}.js`, | |
657 | sourcePath = path.join("lib/rules", sourceBaseName), | |
658 | ruleName = path.basename(filename, ".md"), | |
659 | filePath = path.join("docs", path.relative("tmp", filename)); | |
660 | let text = cat(filename), | |
661 | ruleType = "", | |
662 | title; | |
663 | ||
664 | process.stdout.write(`> Updating files (Steps 4-9): ${i}/${length} - ${filePath + " ".repeat(30)}\r`); | |
665 | ||
666 | // 5. Prepend page title and layout variables at the top of rules | |
667 | if (path.dirname(filename).indexOf("rules") >= 0) { | |
668 | ||
669 | // Find out if the rule requires a special docs portion (e.g. if it is recommended and/or fixable) | |
670 | const rule = rules.get(ruleName); | |
671 | const isRecommended = rule && rule.meta.docs.recommended; | |
672 | const isFixable = rule && rule.meta.fixable; | |
673 | ||
674 | // Incorporate the special portion into the documentation content | |
675 | const textSplit = text.split("\n"); | |
676 | const ruleHeading = textSplit[0]; | |
677 | const ruleDocsContent = textSplit.slice(1).join("\n"); | |
678 | ||
679 | text = `${ruleHeading}${isRecommended ? RECOMMENDED_TEXT : ""}${isFixable ? FIXABLE_TEXT : ""}\n${ruleDocsContent}`; | |
680 | title = `${ruleName} - Rules`; | |
681 | ||
682 | if (rule && rule.meta) { | |
683 | ruleType = `rule_type: ${rule.meta.type}`; | |
684 | } | |
685 | } else { | |
686 | ||
687 | // extract the title from the file itself | |
688 | title = text.match(/#([^#].+)\n/u); | |
689 | if (title) { | |
690 | title = title[1].trim(); | |
691 | } else { | |
692 | title = "Documentation"; | |
693 | } | |
694 | } | |
695 | ||
696 | text = [ | |
697 | "---", | |
698 | `title: ${title}`, | |
699 | "layout: doc", | |
700 | `edit_link: https://github.com/eslint/eslint/edit/master/${filePath}`, | |
701 | ruleType, | |
702 | "---", | |
703 | "<!-- Note: No pull requests accepted for this file. See README.md in the root directory for details. -->", | |
704 | "", | |
705 | text | |
706 | ].join("\n"); | |
707 | ||
708 | // 6. Remove .md extension for relative links and change README to empty string | |
709 | text = text.replace(/\((?!https?:\/\/)([^)]*?)\.md(.*?)\)/gu, "($1$2)").replace("README.html", ""); | |
710 | ||
711 | // 7. Check if there's a trailing white line at the end of the file, if there isn't one, add it | |
712 | if (!/\n$/u.test(text)) { | |
713 | text = `${text}\n`; | |
714 | } | |
715 | ||
716 | // 8. Append first version of ESLint rule was added at. | |
717 | if (filename.indexOf("rules/") !== -1) { | |
718 | if (!versions.added[baseName]) { | |
719 | versions.added[baseName] = getFirstVersionOfFile(sourcePath); | |
720 | } | |
721 | const added = versions.added[baseName]; | |
722 | ||
723 | if (!versions.removed[baseName] && !test("-f", sourcePath)) { | |
724 | versions.removed[baseName] = getFirstVersionOfDeletion(sourcePath); | |
725 | } | |
726 | const removed = versions.removed[baseName]; | |
727 | ||
728 | text += "\n## Version\n\n"; | |
729 | text += removed | |
730 | ? `This rule was introduced in ESLint ${added} and removed in ${removed}.\n` | |
731 | : `This rule was introduced in ESLint ${added}.\n`; | |
732 | ||
733 | text += "\n## Resources\n\n"; | |
734 | if (!removed) { | |
735 | text += `* [Rule source](${rulesUrl}${sourceBaseName})\n`; | |
736 | } | |
737 | text += `* [Documentation source](${docsUrl}${baseName})\n`; | |
738 | } | |
739 | ||
740 | // 9. Update content of the file with changes | |
741 | text.to(filename.replace("README.md", "index.md")); | |
742 | } | |
743 | }); | |
744 | JSON.stringify(versions).to("./versions.json"); | |
745 | echo(`> Updating files (Steps 4-9)${" ".repeat(50)}`); | |
746 | ||
747 | // 10. Copy temporary directory to site's docs folder | |
56c4a2cb | 748 | echo("> Copying the temporary directory into the site's docs folder (Step 10)"); |
eb39fafa DC |
749 | let outputDir = DOCS_DIR; |
750 | ||
751 | if (prereleaseVersion) { | |
752 | outputDir += `/${prereleaseVersion}`; | |
753 | if (!test("-d", outputDir)) { | |
754 | mkdir(outputDir); | |
755 | } | |
756 | } | |
757 | cp("-rf", `${TEMP_DIR}*`, outputDir); | |
758 | ||
56c4a2cb DC |
759 | // 11. Generate rules index page |
760 | if (prereleaseVersion) { | |
761 | echo("> Skipping generating rules index page because this is a prerelease (Step 11)"); | |
762 | } else { | |
763 | echo("> Generating the rules index page (Step 11)"); | |
764 | generateRuleIndexPage(); | |
765 | } | |
eb39fafa DC |
766 | |
767 | // 12. Delete temporary directory | |
768 | echo("> Removing the temporary directory (Step 12)"); | |
769 | rm("-rf", TEMP_DIR); | |
770 | ||
771 | // 13. Create Example Formatter Output Page | |
772 | echo("> Creating the formatter examples (Step 14)"); | |
773 | generateFormatterExamples(getFormatterResults(), prereleaseVersion); | |
774 | ||
775 | echo("Done generating eslint.org"); | |
776 | }; | |
777 | ||
778 | target.webpack = function(mode = "none") { | |
779 | exec(`${getBinFile("webpack")} --mode=${mode} --output-path=${BUILD_DIR}`); | |
780 | }; | |
781 | ||
782 | target.checkRuleFiles = function() { | |
783 | ||
784 | echo("Validating rules"); | |
785 | ||
786 | const ruleTypes = require("./tools/rule-types.json"); | |
787 | let errors = 0; | |
788 | ||
789 | RULE_FILES.forEach(filename => { | |
790 | const basename = path.basename(filename, ".js"); | |
791 | const docFilename = `docs/rules/${basename}.md`; | |
792 | ||
793 | /** | |
794 | * Check if basename is present in rule-types.json file. | |
795 | * @returns {boolean} true if present | |
796 | * @private | |
797 | */ | |
798 | function isInRuleTypes() { | |
799 | return Object.prototype.hasOwnProperty.call(ruleTypes, basename); | |
800 | } | |
801 | ||
802 | /** | |
803 | * Check if id is present in title | |
804 | * @param {string} id id to check for | |
805 | * @returns {boolean} true if present | |
806 | * @private | |
807 | */ | |
808 | function hasIdInTitle(id) { | |
809 | const docText = cat(docFilename); | |
810 | const idOldAtEndOfTitleRegExp = new RegExp(`^# (.*?) \\(${id}\\)`, "u"); // original format | |
811 | const idNewAtBeginningOfTitleRegExp = new RegExp(`^# ${id}: `, "u"); // new format is same as rules index | |
812 | /* | |
813 | * 1. Added support for new format. | |
814 | * 2. Will remove support for old format after all docs files have new format. | |
815 | * 3. Will remove this check when the main heading is automatically generated from rule metadata. | |
816 | */ | |
817 | ||
818 | return idNewAtBeginningOfTitleRegExp.test(docText) || idOldAtEndOfTitleRegExp.test(docText); | |
819 | } | |
820 | ||
821 | // check for docs | |
822 | if (!test("-f", docFilename)) { | |
823 | console.error("Missing documentation for rule %s", basename); | |
824 | errors++; | |
825 | } else { | |
826 | ||
827 | // check for proper doc format | |
828 | if (!hasIdInTitle(basename)) { | |
829 | console.error("Missing id in the doc page's title of rule %s", basename); | |
830 | errors++; | |
831 | } | |
832 | } | |
833 | ||
834 | // check for recommended configuration | |
835 | if (!isInRuleTypes()) { | |
836 | console.error("Missing setting for %s in tools/rule-types.json", basename); | |
837 | errors++; | |
838 | } | |
839 | ||
840 | // check parity between rules index file and rules directory | |
841 | const ruleIdsInIndex = require("./lib/rules/index"); | |
842 | const ruleDef = ruleIdsInIndex.get(basename); | |
843 | ||
844 | if (!ruleDef) { | |
845 | console.error(`Missing rule from index (./lib/rules/index.js): ${basename}. If you just added a new rule then add an entry for it in this file.`); | |
846 | errors++; | |
847 | } | |
848 | ||
849 | // check eslint:recommended | |
850 | const recommended = require("./conf/eslint-recommended"); | |
851 | ||
852 | if (ruleDef) { | |
853 | if (ruleDef.meta.docs.recommended) { | |
854 | if (recommended.rules[basename] !== "error") { | |
855 | console.error(`Missing rule from eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just made a rule recommended then add an entry for it in this file.`); | |
856 | errors++; | |
857 | } | |
858 | } else { | |
859 | if (basename in recommended.rules) { | |
860 | console.error(`Extra rule in eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just added a rule then don't add an entry for it in this file.`); | |
861 | errors++; | |
862 | } | |
863 | } | |
864 | } | |
865 | ||
866 | // check for tests | |
867 | if (!test("-f", `tests/lib/rules/${basename}.js`)) { | |
868 | console.error("Missing tests for rule %s", basename); | |
869 | errors++; | |
870 | } | |
871 | ||
872 | }); | |
873 | ||
874 | if (errors) { | |
875 | exit(1); | |
876 | } | |
877 | ||
878 | }; | |
879 | ||
880 | target.checkLicenses = function() { | |
881 | ||
882 | /** | |
883 | * Check if a dependency is eligible to be used by us | |
884 | * @param {Object} dependency dependency to check | |
885 | * @returns {boolean} true if we have permission | |
886 | * @private | |
887 | */ | |
888 | function isPermissible(dependency) { | |
889 | const licenses = dependency.licenses; | |
890 | ||
891 | if (Array.isArray(licenses)) { | |
892 | return licenses.some(license => isPermissible({ | |
893 | name: dependency.name, | |
894 | licenses: license | |
895 | })); | |
896 | } | |
897 | ||
898 | return OPEN_SOURCE_LICENSES.some(license => license.test(licenses)); | |
899 | } | |
900 | ||
901 | echo("Validating licenses"); | |
902 | ||
903 | checker.init({ | |
904 | start: __dirname | |
905 | }, deps => { | |
906 | const impermissible = Object.keys(deps).map(dependency => ({ | |
907 | name: dependency, | |
908 | licenses: deps[dependency].licenses | |
909 | })).filter(dependency => !isPermissible(dependency)); | |
910 | ||
911 | if (impermissible.length) { | |
912 | impermissible.forEach(dependency => { | |
913 | console.error( | |
914 | "%s license for %s is impermissible.", | |
915 | dependency.licenses, | |
916 | dependency.name | |
917 | ); | |
918 | }); | |
919 | exit(1); | |
920 | } | |
921 | }); | |
922 | }; | |
923 | ||
924 | /** | |
925 | * Downloads a repository which has many js files to test performance with multi files. | |
926 | * Here, it's eslint@1.10.3 (450 files) | |
927 | * @param {Function} cb A callback function. | |
928 | * @returns {void} | |
929 | */ | |
930 | function downloadMultifilesTestTarget(cb) { | |
931 | if (test("-d", PERF_MULTIFILES_TARGET_DIR)) { | |
932 | process.nextTick(cb); | |
933 | } else { | |
934 | mkdir("-p", PERF_MULTIFILES_TARGET_DIR); | |
935 | echo("Downloading the repository of multi-files performance test target."); | |
936 | exec(`git clone -b v1.10.3 --depth 1 https://github.com/eslint/eslint.git "${PERF_MULTIFILES_TARGET_DIR}"`, { silent: true }, cb); | |
937 | } | |
938 | } | |
939 | ||
940 | /** | |
941 | * Creates a config file to use performance tests. | |
942 | * This config is turning all core rules on. | |
943 | * @returns {void} | |
944 | */ | |
945 | function createConfigForPerformanceTest() { | |
946 | const content = [ | |
947 | "root: true", | |
948 | "env:", | |
949 | " node: true", | |
950 | " es6: true", | |
951 | "rules:" | |
952 | ]; | |
953 | ||
954 | for (const [ruleId] of builtinRules) { | |
955 | content.push(` ${ruleId}: 1`); | |
956 | } | |
957 | ||
958 | content.join("\n").to(PERF_ESLINTRC); | |
959 | } | |
960 | ||
961 | /** | |
962 | * Calculates the time for each run for performance | |
963 | * @param {string} cmd cmd | |
964 | * @param {int} runs Total number of runs to do | |
965 | * @param {int} runNumber Current run number | |
966 | * @param {int[]} results Collection results from each run | |
967 | * @param {Function} cb Function to call when everything is done | |
968 | * @returns {int[]} calls the cb with all the results | |
969 | * @private | |
970 | */ | |
971 | function time(cmd, runs, runNumber, results, cb) { | |
972 | const start = process.hrtime(); | |
973 | ||
974 | exec(cmd, { maxBuffer: 64 * 1024 * 1024, silent: true }, (code, stdout, stderr) => { | |
975 | const diff = process.hrtime(start), | |
976 | actual = (diff[0] * 1e3 + diff[1] / 1e6); // ms | |
977 | ||
978 | if (code) { | |
979 | echo(` Performance Run #${runNumber} failed.`); | |
980 | if (stdout) { | |
981 | echo(`STDOUT:\n${stdout}\n\n`); | |
982 | } | |
983 | ||
984 | if (stderr) { | |
985 | echo(`STDERR:\n${stderr}\n\n`); | |
986 | } | |
987 | return cb(null); | |
988 | } | |
989 | ||
990 | results.push(actual); | |
991 | echo(` Performance Run #${runNumber}: %dms`, actual); | |
992 | if (runs > 1) { | |
993 | return time(cmd, runs - 1, runNumber + 1, results, cb); | |
994 | } | |
995 | return cb(results); | |
996 | ||
997 | }); | |
998 | ||
999 | } | |
1000 | ||
1001 | /** | |
1002 | * Run a performance test. | |
1003 | * @param {string} title A title. | |
1004 | * @param {string} targets Test targets. | |
1005 | * @param {number} multiplier A multiplier for limitation. | |
1006 | * @param {Function} cb A callback function. | |
1007 | * @returns {void} | |
1008 | */ | |
1009 | function runPerformanceTest(title, targets, multiplier, cb) { | |
1010 | const cpuSpeed = os.cpus()[0].speed, | |
1011 | max = multiplier / cpuSpeed, | |
1012 | cmd = `${ESLINT}--config "${PERF_ESLINTRC}" --no-eslintrc --no-ignore ${targets}`; | |
1013 | ||
1014 | echo(""); | |
1015 | echo(title); | |
1016 | echo(" CPU Speed is %d with multiplier %d", cpuSpeed, multiplier); | |
1017 | ||
1018 | time(cmd, 5, 1, [], results => { | |
1019 | if (!results || results.length === 0) { // No results? Something is wrong. | |
1020 | throw new Error("Performance test failed."); | |
1021 | } | |
1022 | ||
1023 | results.sort((a, b) => a - b); | |
1024 | ||
1025 | const median = results[~~(results.length / 2)]; | |
1026 | ||
1027 | echo(""); | |
1028 | if (median > max) { | |
1029 | echo(" Performance budget exceeded: %dms (limit: %dms)", median, max); | |
1030 | } else { | |
1031 | echo(" Performance budget ok: %dms (limit: %dms)", median, max); | |
1032 | } | |
1033 | echo(""); | |
1034 | cb(); | |
1035 | }); | |
1036 | } | |
1037 | ||
1038 | /** | |
1039 | * Run the load performance for eslint | |
1040 | * @returns {void} | |
1041 | * @private | |
1042 | */ | |
1043 | function loadPerformance() { | |
1044 | echo(""); | |
1045 | echo("Loading:"); | |
1046 | ||
1047 | const results = []; | |
1048 | ||
1049 | for (let cnt = 0; cnt < 5; cnt++) { | |
1050 | const loadPerfData = loadPerf({ | |
1051 | checkDependencies: false | |
1052 | }); | |
1053 | ||
1054 | echo(` Load performance Run #${cnt + 1}: %dms`, loadPerfData.loadTime); | |
1055 | results.push(loadPerfData.loadTime); | |
1056 | } | |
1057 | ||
1058 | results.sort((a, b) => a - b); | |
1059 | const median = results[~~(results.length / 2)]; | |
1060 | ||
1061 | echo(""); | |
1062 | echo(" Load Performance median: %dms", median); | |
1063 | echo(""); | |
1064 | } | |
1065 | ||
1066 | target.perf = function() { | |
1067 | downloadMultifilesTestTarget(() => { | |
1068 | createConfigForPerformanceTest(); | |
1069 | ||
1070 | loadPerformance(); | |
1071 | ||
1072 | runPerformanceTest( | |
1073 | "Single File:", | |
1074 | "tests/performance/jshint.js", | |
1075 | PERF_MULTIPLIER, | |
1076 | () => { | |
1077 | ||
1078 | // Count test target files. | |
1079 | const count = glob.sync( | |
1080 | process.platform === "win32" | |
1081 | ? PERF_MULTIFILES_TARGETS.slice(2).replace(/\\/gu, "/") | |
1082 | : PERF_MULTIFILES_TARGETS | |
1083 | ).length; | |
1084 | ||
1085 | runPerformanceTest( | |
1086 | `Multi Files (${count} files):`, | |
1087 | PERF_MULTIFILES_TARGETS, | |
1088 | 3 * PERF_MULTIPLIER, | |
1089 | () => {} | |
1090 | ); | |
1091 | } | |
1092 | ); | |
1093 | }); | |
1094 | }; | |
1095 | ||
1096 | target.generateRelease = generateRelease; | |
1097 | target.generatePrerelease = ([prereleaseType]) => generatePrerelease(prereleaseType); | |
1098 | target.publishRelease = publishRelease; |