]>
Commit | Line | Data |
---|---|---|
eb39fafa DC |
1 | /** |
2 | * @fileoverview Build file | |
3 | * @author nzakas | |
4 | */ | |
5 | ||
609c276f | 6 | /* eslint no-use-before-define: "off", no-console: "off" -- CLI */ |
eb39fafa DC |
7 | "use strict"; |
8 | ||
9 | //------------------------------------------------------------------------------ | |
10 | // Requirements | |
11 | //------------------------------------------------------------------------------ | |
12 | ||
5422a9cc | 13 | const checker = require("npm-license"), |
eb39fafa DC |
14 | ReleaseOps = require("eslint-release"), |
15 | dateformat = require("dateformat"), | |
16 | fs = require("fs"), | |
17 | glob = require("glob"), | |
18 | markdownlint = require("markdownlint"), | |
19 | os = require("os"), | |
20 | path = require("path"), | |
21 | semver = require("semver"), | |
22 | ejs = require("ejs"), | |
23 | loadPerf = require("load-perf"), | |
24 | yaml = require("js-yaml"), | |
25 | { CLIEngine } = require("./lib/cli-engine"), | |
26 | builtinRules = require("./lib/rules/index"); | |
27 | ||
609c276f TL |
28 | require("shelljs/make"); |
29 | /* global target -- global.target is declared in `shelljs/make.js` */ | |
30 | /** | |
31 | * global.target = {}; | |
32 | * @see https://github.com/shelljs/shelljs/blob/124d3349af42cb794ae8f78fc9b0b538109f7ca7/make.js#L4 | |
33 | * @see https://github.com/DefinitelyTyped/DefinitelyTyped/blob/3aa2d09b6408380598cfb802743b07e1edb725f3/types/shelljs/make.d.ts#L8-L11 | |
34 | */ | |
eb39fafa DC |
35 | const { cat, cd, cp, echo, exec, exit, find, ls, mkdir, pwd, rm, test } = require("shelljs"); |
36 | ||
37 | //------------------------------------------------------------------------------ | |
38 | // Settings | |
39 | //------------------------------------------------------------------------------ | |
40 | ||
41 | /* | |
42 | * A little bit fuzzy. My computer has a first CPU speed of 3392 and the perf test | |
43 | * always completes in < 3800ms. However, Travis is less predictable due to | |
44 | * multiple different VM types. So I'm fudging this for now in the hopes that it | |
45 | * at least provides some sort of useful signal. | |
46 | */ | |
47 | const PERF_MULTIPLIER = 13e6; | |
48 | ||
49 | const OPEN_SOURCE_LICENSES = [ | |
609c276f | 50 | /MIT/u, /BSD/u, /Apache/u, /ISC/u, /WTF/u, /Public Domain/u, /LGPL/u, /Python/u |
eb39fafa DC |
51 | ]; |
52 | ||
53 | //------------------------------------------------------------------------------ | |
54 | // Data | |
55 | //------------------------------------------------------------------------------ | |
56 | ||
57 | const NODE = "node ", // intentional extra space | |
58 | NODE_MODULES = "./node_modules/", | |
59 | TEMP_DIR = "./tmp/", | |
60 | DEBUG_DIR = "./debug/", | |
61 | BUILD_DIR = "build", | |
62 | DOCS_DIR = "../website/docs", | |
63 | SITE_DIR = "../website/", | |
64 | PERF_TMP_DIR = path.join(TEMP_DIR, "eslint", "performance"), | |
65 | ||
66 | // Utilities - intentional extra space at the end of each string | |
67 | MOCHA = `${NODE_MODULES}mocha/bin/_mocha `, | |
68 | ESLINT = `${NODE} bin/eslint.js --report-unused-disable-directives `, | |
69 | ||
70 | // Files | |
71 | RULE_FILES = glob.sync("lib/rules/*.js").filter(filePath => path.basename(filePath) !== "index.js"), | |
72 | JSON_FILES = find("conf/").filter(fileType("json")), | |
609c276f TL |
73 | MARKDOWNLINT_IGNORED_FILES = fs.readFileSync(path.join(__dirname, ".markdownlintignore"), "utf-8").split("\n"), |
74 | MARKDOWN_FILES_ARRAY = find("docs/").concat(ls(".")).filter(fileType("md")).filter(file => !MARKDOWNLINT_IGNORED_FILES.includes(file)), | |
eb39fafa DC |
75 | TEST_FILES = "\"tests/{bin,lib,tools}/**/*.js\"", |
76 | PERF_ESLINTRC = path.join(PERF_TMP_DIR, "eslintrc.yml"), | |
77 | PERF_MULTIFILES_TARGET_DIR = path.join(PERF_TMP_DIR, "eslint"), | |
78 | PERF_MULTIFILES_TARGETS = `"${PERF_MULTIFILES_TARGET_DIR + path.sep}{lib,tests${path.sep}lib}${path.sep}**${path.sep}*.js"`, | |
79 | ||
80 | // Settings | |
81 | MOCHA_TIMEOUT = 10000; | |
82 | ||
83 | //------------------------------------------------------------------------------ | |
84 | // Helpers | |
85 | //------------------------------------------------------------------------------ | |
86 | ||
87 | /** | |
88 | * Simple JSON file validation that relies on ES JSON parser. | |
89 | * @param {string} filePath Path to JSON. | |
90 | * @throws Error If file contents is invalid JSON. | |
91 | * @returns {undefined} | |
92 | */ | |
93 | function validateJsonFile(filePath) { | |
94 | const contents = fs.readFileSync(filePath, "utf8"); | |
95 | ||
96 | JSON.parse(contents); | |
97 | } | |
98 | ||
99 | /** | |
100 | * Generates a function that matches files with a particular extension. | |
101 | * @param {string} extension The file extension (i.e. "js") | |
102 | * @returns {Function} The function to pass into a filter method. | |
103 | * @private | |
104 | */ | |
105 | function fileType(extension) { | |
106 | return function(filename) { | |
107 | return filename.slice(filename.lastIndexOf(".") + 1) === extension; | |
108 | }; | |
109 | } | |
110 | ||
111 | /** | |
112 | * Executes a command and returns the output instead of printing it to stdout. | |
113 | * @param {string} cmd The command string to execute. | |
114 | * @returns {string} The result of the executed command. | |
115 | */ | |
116 | function execSilent(cmd) { | |
117 | return exec(cmd, { silent: true }).stdout; | |
118 | } | |
119 | ||
120 | /** | |
121 | * Generates a release blog post for eslint.org | |
122 | * @param {Object} releaseInfo The release metadata. | |
123 | * @param {string} [prereleaseMajorVersion] If this is a prerelease, the next major version after this prerelease | |
124 | * @returns {void} | |
125 | * @private | |
126 | */ | |
127 | function generateBlogPost(releaseInfo, prereleaseMajorVersion) { | |
128 | const ruleList = RULE_FILES | |
129 | ||
130 | // Strip the .js extension | |
131 | .map(ruleFileName => path.basename(ruleFileName, ".js")) | |
132 | ||
133 | /* | |
134 | * Sort by length descending. This ensures that rule names which are substrings of other rule names are not | |
135 | * matched incorrectly. For example, the string "no-undefined" should get matched with the `no-undefined` rule, | |
136 | * instead of getting matched with the `no-undef` rule followed by the string "ined". | |
137 | */ | |
138 | .sort((ruleA, ruleB) => ruleB.length - ruleA.length); | |
139 | ||
140 | const renderContext = Object.assign({ prereleaseMajorVersion, ruleList }, releaseInfo); | |
141 | ||
142 | const output = ejs.render(cat("./templates/blogpost.md.ejs"), renderContext), | |
143 | now = new Date(), | |
144 | month = now.getMonth() + 1, | |
145 | day = now.getDate(), | |
146 | filename = `../website/_posts/${now.getFullYear()}-${ | |
147 | month < 10 ? `0${month}` : month}-${ | |
148 | day < 10 ? `0${day}` : day}-eslint-v${ | |
149 | releaseInfo.version}-released.md`; | |
150 | ||
151 | output.to(filename); | |
152 | } | |
153 | ||
154 | /** | |
155 | * Generates a doc page with formatter result examples | |
609c276f TL |
156 | * @param {Object} formatterInfo Linting results from each formatter |
157 | * @param {string} [prereleaseVersion] The version used for a prerelease. This | |
eb39fafa DC |
158 | * changes where the output is stored. |
159 | * @returns {void} | |
160 | */ | |
161 | function generateFormatterExamples(formatterInfo, prereleaseVersion) { | |
162 | const output = ejs.render(cat("./templates/formatter-examples.md.ejs"), formatterInfo); | |
163 | let filename = "../website/docs/user-guide/formatters/index.md", | |
164 | htmlFilename = "../website/docs/user-guide/formatters/html-formatter-example.html"; | |
165 | ||
166 | if (prereleaseVersion) { | |
167 | filename = filename.replace("/docs", `/docs/${prereleaseVersion}`); | |
168 | htmlFilename = htmlFilename.replace("/docs", `/docs/${prereleaseVersion}`); | |
169 | if (!test("-d", path.dirname(filename))) { | |
170 | mkdir(path.dirname(filename)); | |
171 | } | |
172 | } | |
173 | ||
174 | output.to(filename); | |
175 | formatterInfo.formatterResults.html.result.to(htmlFilename); | |
176 | } | |
177 | ||
178 | /** | |
179 | * Generate a doc page that lists all of the rules and links to them | |
180 | * @returns {void} | |
181 | */ | |
182 | function generateRuleIndexPage() { | |
183 | const outputFile = "../website/_data/rules.yml", | |
609c276f TL |
184 | ruleTypes = "conf/rule-type-list.json", |
185 | ruleTypesData = JSON.parse(cat(path.resolve(ruleTypes))); | |
eb39fafa DC |
186 | |
187 | RULE_FILES | |
188 | .map(filename => [filename, path.basename(filename, ".js")]) | |
189 | .sort((a, b) => a[1].localeCompare(b[1])) | |
190 | .forEach(pair => { | |
191 | const filename = pair[0]; | |
192 | const basename = pair[1]; | |
193 | const rule = require(path.resolve(filename)); | |
194 | ||
195 | if (rule.meta.deprecated) { | |
609c276f | 196 | ruleTypesData.deprecated.rules.push({ |
eb39fafa DC |
197 | name: basename, |
198 | replacedBy: rule.meta.replacedBy || [] | |
199 | }); | |
200 | } else { | |
201 | const output = { | |
202 | name: basename, | |
203 | description: rule.meta.docs.description, | |
204 | recommended: rule.meta.docs.recommended || false, | |
609c276f TL |
205 | fixable: !!rule.meta.fixable, |
206 | hasSuggestions: !!rule.meta.hasSuggestions | |
eb39fafa | 207 | }, |
609c276f | 208 | ruleType = ruleTypesData.types.find(c => c.name === rule.meta.type); |
eb39fafa | 209 | |
609c276f TL |
210 | if (!ruleType.rules) { |
211 | ruleType.rules = []; | |
eb39fafa DC |
212 | } |
213 | ||
609c276f | 214 | ruleType.rules.push(output); |
eb39fafa DC |
215 | } |
216 | }); | |
217 | ||
56c4a2cb | 218 | // `.rules` will be `undefined` if all rules in category are deprecated. |
609c276f | 219 | ruleTypesData.types = ruleTypesData.types.filter(ruleType => !!ruleType.rules); |
56c4a2cb | 220 | |
609c276f | 221 | const output = yaml.dump(ruleTypesData, { sortKeys: true }); |
eb39fafa DC |
222 | |
223 | output.to(outputFile); | |
224 | } | |
225 | ||
226 | /** | |
227 | * Creates a git commit and tag in an adjacent `website` repository, without pushing it to | |
228 | * the remote. This assumes that the repository has already been modified somehow (e.g. by adding a blogpost). | |
229 | * @param {string} [tag] The string to tag the commit with | |
230 | * @returns {void} | |
231 | */ | |
232 | function commitSiteToGit(tag) { | |
233 | const currentDir = pwd(); | |
234 | ||
235 | cd(SITE_DIR); | |
236 | exec("git add -A ."); | |
237 | exec(`git commit -m "Autogenerated new docs and demo at ${dateformat(new Date())}"`); | |
238 | ||
239 | if (tag) { | |
240 | exec(`git tag ${tag}`); | |
241 | } | |
242 | ||
243 | exec("git fetch origin && git rebase origin/master"); | |
244 | cd(currentDir); | |
245 | } | |
246 | ||
247 | /** | |
248 | * Publishes the changes in an adjacent `website` repository to the remote. The | |
249 | * site should already have local commits (e.g. from running `commitSiteToGit`). | |
250 | * @returns {void} | |
251 | */ | |
252 | function publishSite() { | |
253 | const currentDir = pwd(); | |
254 | ||
255 | cd(SITE_DIR); | |
256 | exec("git push origin master --tags"); | |
257 | cd(currentDir); | |
258 | } | |
259 | ||
260 | /** | |
261 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
262 | * and generates the site in an adjacent `website` folder. | |
263 | * @returns {void} | |
264 | */ | |
265 | function generateRelease() { | |
266 | ReleaseOps.generateRelease(); | |
267 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
268 | ||
269 | echo("Generating site"); | |
270 | target.gensite(); | |
271 | generateBlogPost(releaseInfo); | |
272 | commitSiteToGit(`v${releaseInfo.version}`); | |
273 | } | |
274 | ||
275 | /** | |
276 | * Updates the changelog, bumps the version number in package.json, creates a local git commit and tag, | |
277 | * and generates the site in an adjacent `website` folder. | |
278 | * @param {string} prereleaseId The prerelease identifier (alpha, beta, etc.) | |
279 | * @returns {void} | |
280 | */ | |
281 | function generatePrerelease(prereleaseId) { | |
282 | ReleaseOps.generateRelease(prereleaseId); | |
283 | const releaseInfo = JSON.parse(cat(".eslint-release-info.json")); | |
284 | const nextMajor = semver.inc(releaseInfo.version, "major"); | |
285 | ||
286 | echo("Generating site"); | |
287 | ||
288 | // always write docs into the next major directory (so 2.0.0-alpha.0 writes to 2.0.0) | |
289 | target.gensite(nextMajor); | |
290 | ||
291 | /* | |
292 | * Premajor release should have identical "next major version". | |
293 | * Preminor and prepatch release will not. | |
294 | * 5.0.0-alpha.0 --> next major = 5, current major = 5 | |
295 | * 4.4.0-alpha.0 --> next major = 5, current major = 4 | |
296 | * 4.0.1-alpha.0 --> next major = 5, current major = 4 | |
297 | */ | |
298 | if (semver.major(releaseInfo.version) === semver.major(nextMajor)) { | |
299 | ||
300 | /* | |
301 | * This prerelease is for a major release (not preminor/prepatch). | |
302 | * Blog post generation logic needs to be aware of this (as well as | |
303 | * know what the next major version is actually supposed to be). | |
304 | */ | |
305 | generateBlogPost(releaseInfo, nextMajor); | |
306 | } else { | |
307 | generateBlogPost(releaseInfo); | |
308 | } | |
309 | ||
310 | commitSiteToGit(`v${releaseInfo.version}`); | |
311 | } | |
312 | ||
313 | /** | |
314 | * Publishes a generated release to npm and GitHub, and pushes changes to the adjacent `website` repo | |
315 | * to remote repo. | |
316 | * @returns {void} | |
317 | */ | |
318 | function publishRelease() { | |
319 | ReleaseOps.publishRelease(); | |
320 | publishSite(); | |
321 | } | |
322 | ||
323 | /** | |
324 | * Splits a command result to separate lines. | |
325 | * @param {string} result The command result string. | |
326 | * @returns {Array} The separated lines. | |
327 | */ | |
328 | function splitCommandResultToLines(result) { | |
329 | return result.trim().split("\n"); | |
330 | } | |
331 | ||
332 | /** | |
333 | * Gets the first commit sha of the given file. | |
334 | * @param {string} filePath The file path which should be checked. | |
335 | * @returns {string} The commit sha. | |
336 | */ | |
337 | function getFirstCommitOfFile(filePath) { | |
338 | let commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
339 | ||
340 | commits = splitCommandResultToLines(commits); | |
341 | return commits[commits.length - 1].trim(); | |
342 | } | |
343 | ||
344 | /** | |
345 | * Gets the tag name where a given file was introduced first. | |
346 | * @param {string} filePath The file path to check. | |
347 | * @returns {string} The tag name. | |
348 | */ | |
349 | function getFirstVersionOfFile(filePath) { | |
350 | const firstCommit = getFirstCommitOfFile(filePath); | |
351 | let tags = execSilent(`git tag --contains ${firstCommit}`); | |
352 | ||
353 | tags = splitCommandResultToLines(tags); | |
354 | return tags.reduce((list, version) => { | |
355 | const validatedVersion = semver.valid(version.trim()); | |
356 | ||
357 | if (validatedVersion) { | |
358 | list.push(validatedVersion); | |
359 | } | |
360 | return list; | |
361 | }, []).sort(semver.compare)[0]; | |
362 | } | |
363 | ||
364 | /** | |
365 | * Gets the commit that deleted a file. | |
366 | * @param {string} filePath The path to the deleted file. | |
367 | * @returns {string} The commit sha. | |
368 | */ | |
369 | function getCommitDeletingFile(filePath) { | |
370 | const commits = execSilent(`git rev-list HEAD -- ${filePath}`); | |
371 | ||
372 | return splitCommandResultToLines(commits)[0]; | |
373 | } | |
374 | ||
375 | /** | |
376 | * Gets the first version number where a given file is no longer present. | |
377 | * @param {string} filePath The path to the deleted file. | |
378 | * @returns {string} The version number. | |
379 | */ | |
380 | function getFirstVersionOfDeletion(filePath) { | |
381 | const deletionCommit = getCommitDeletingFile(filePath), | |
382 | tags = execSilent(`git tag --contains ${deletionCommit}`); | |
383 | ||
384 | return splitCommandResultToLines(tags) | |
385 | .map(version => semver.valid(version.trim())) | |
386 | .filter(version => version) | |
387 | .sort(semver.compare)[0]; | |
388 | } | |
389 | ||
390 | /** | |
391 | * Lints Markdown files. | |
392 | * @param {Array} files Array of file names to lint. | |
393 | * @returns {Object} exec-style exit code object. | |
394 | * @private | |
395 | */ | |
396 | function lintMarkdown(files) { | |
609c276f | 397 | const config = yaml.load(fs.readFileSync(path.join(__dirname, "./.markdownlint.yml"), "utf8")), |
eb39fafa DC |
398 | result = markdownlint.sync({ |
399 | files, | |
400 | config, | |
401 | resultVersion: 1 | |
402 | }), | |
403 | resultString = result.toString(), | |
404 | returnCode = resultString ? 1 : 0; | |
405 | ||
406 | if (resultString) { | |
407 | console.error(resultString); | |
408 | } | |
409 | return { code: returnCode }; | |
410 | } | |
411 | ||
412 | /** | |
413 | * Gets linting results from every formatter, based on a hard-coded snippet and config | |
414 | * @returns {Object} Output from each formatter | |
415 | */ | |
416 | function getFormatterResults() { | |
417 | const stripAnsi = require("strip-ansi"); | |
418 | ||
419 | const formatterFiles = fs.readdirSync("./lib/cli-engine/formatters/"), | |
420 | rules = { | |
421 | "no-else-return": "warn", | |
422 | indent: ["warn", 4], | |
423 | "space-unary-ops": "error", | |
424 | semi: ["warn", "always"], | |
425 | "consistent-return": "error" | |
426 | }, | |
427 | cli = new CLIEngine({ | |
428 | useEslintrc: false, | |
429 | baseConfig: { extends: "eslint:recommended" }, | |
430 | rules | |
431 | }), | |
432 | codeString = [ | |
433 | "function addOne(i) {", | |
434 | " if (i != NaN) {", | |
435 | " return i ++", | |
436 | " } else {", | |
437 | " return", | |
438 | " }", | |
439 | "};" | |
440 | ].join("\n"), | |
441 | rawMessages = cli.executeOnText(codeString, "fullOfProblems.js", true), | |
442 | rulesMap = cli.getRules(), | |
443 | rulesMeta = {}; | |
444 | ||
445 | Object.keys(rules).forEach(ruleId => { | |
446 | rulesMeta[ruleId] = rulesMap.get(ruleId).meta; | |
447 | }); | |
448 | ||
449 | return formatterFiles.reduce((data, filename) => { | |
450 | const fileExt = path.extname(filename), | |
451 | name = path.basename(filename, fileExt); | |
452 | ||
453 | if (fileExt === ".js") { | |
454 | const formattedOutput = cli.getFormatter(name)( | |
455 | rawMessages.results, | |
456 | { rulesMeta } | |
457 | ); | |
458 | ||
459 | data.formatterResults[name] = { | |
460 | result: stripAnsi(formattedOutput) | |
461 | }; | |
462 | } | |
463 | return data; | |
464 | }, { formatterResults: {} }); | |
465 | } | |
466 | ||
467 | /** | |
468 | * Gets a path to an executable in node_modules/.bin | |
469 | * @param {string} command The executable name | |
470 | * @returns {string} The executable path | |
471 | */ | |
472 | function getBinFile(command) { | |
473 | return path.join("node_modules", ".bin", command); | |
474 | } | |
475 | ||
476 | //------------------------------------------------------------------------------ | |
477 | // Tasks | |
478 | //------------------------------------------------------------------------------ | |
479 | ||
eb39fafa DC |
480 | target.lint = function([fix = false] = []) { |
481 | let errors = 0, | |
482 | lastReturn; | |
483 | ||
484 | echo("Validating JavaScript files"); | |
485 | lastReturn = exec(`${ESLINT}${fix ? "--fix" : ""} .`); | |
486 | if (lastReturn.code !== 0) { | |
487 | errors++; | |
488 | } | |
489 | ||
490 | echo("Validating JSON Files"); | |
5422a9cc | 491 | JSON_FILES.forEach(validateJsonFile); |
eb39fafa DC |
492 | |
493 | echo("Validating Markdown Files"); | |
494 | lastReturn = lintMarkdown(MARKDOWN_FILES_ARRAY); | |
495 | if (lastReturn.code !== 0) { | |
496 | errors++; | |
497 | } | |
498 | ||
499 | if (errors) { | |
500 | exit(1); | |
501 | } | |
502 | }; | |
503 | ||
504 | target.fuzz = function({ amount = 1000, fuzzBrokenAutofixes = false } = {}) { | |
505 | const fuzzerRunner = require("./tools/fuzzer-runner"); | |
506 | const fuzzResults = fuzzerRunner.run({ amount, fuzzBrokenAutofixes }); | |
507 | ||
508 | if (fuzzResults.length) { | |
509 | ||
510 | const uniqueStackTraceCount = new Set(fuzzResults.map(result => result.error)).size; | |
511 | ||
512 | echo(`The fuzzer reported ${fuzzResults.length} error${fuzzResults.length === 1 ? "" : "s"} with a total of ${uniqueStackTraceCount} unique stack trace${uniqueStackTraceCount === 1 ? "" : "s"}.`); | |
513 | ||
514 | const formattedResults = JSON.stringify({ results: fuzzResults }, null, 4); | |
515 | ||
516 | if (process.env.CI) { | |
517 | echo("More details can be found below."); | |
518 | echo(formattedResults); | |
519 | } else { | |
520 | if (!test("-d", DEBUG_DIR)) { | |
521 | mkdir(DEBUG_DIR); | |
522 | } | |
523 | ||
524 | let fuzzLogPath; | |
525 | let fileSuffix = 0; | |
526 | ||
527 | // To avoid overwriting any existing fuzzer log files, append a numeric suffix to the end of the filename. | |
528 | do { | |
529 | fuzzLogPath = path.join(DEBUG_DIR, `fuzzer-log-${fileSuffix}.json`); | |
530 | fileSuffix++; | |
531 | } while (test("-f", fuzzLogPath)); | |
532 | ||
533 | formattedResults.to(fuzzLogPath); | |
534 | ||
535 | // TODO: (not-an-aardvark) Create a better way to isolate and test individual fuzzer errors from the log file | |
536 | echo(`More details can be found in ${fuzzLogPath}.`); | |
537 | } | |
538 | ||
539 | exit(1); | |
540 | } | |
541 | }; | |
542 | ||
543 | target.mocha = () => { | |
544 | let errors = 0, | |
545 | lastReturn; | |
546 | ||
547 | echo("Running unit tests"); | |
548 | ||
609c276f | 549 | lastReturn = exec(`${getBinFile("nyc")} -- ${MOCHA} --forbid-only -R progress -t ${MOCHA_TIMEOUT} -c ${TEST_FILES}`); |
eb39fafa DC |
550 | if (lastReturn.code !== 0) { |
551 | errors++; | |
552 | } | |
553 | ||
6f036462 | 554 | lastReturn = exec(`${getBinFile("nyc")} check-coverage --statement 98 --branch 97 --function 98 --lines 98`); |
eb39fafa DC |
555 | if (lastReturn.code !== 0) { |
556 | errors++; | |
557 | } | |
558 | ||
559 | if (errors) { | |
560 | exit(1); | |
561 | } | |
562 | }; | |
563 | ||
564 | target.karma = () => { | |
565 | echo("Running unit tests on browsers"); | |
566 | ||
567 | target.webpack("production"); | |
568 | ||
569 | const lastReturn = exec(`${getBinFile("karma")} start karma.conf.js`); | |
570 | ||
571 | if (lastReturn.code !== 0) { | |
572 | exit(1); | |
573 | } | |
574 | }; | |
575 | ||
576 | target.test = function() { | |
577 | target.lint(); | |
578 | target.checkRuleFiles(); | |
579 | target.mocha(); | |
580 | target.karma(); | |
581 | target.fuzz({ amount: 150, fuzzBrokenAutofixes: false }); | |
582 | target.checkLicenses(); | |
583 | }; | |
584 | ||
eb39fafa DC |
585 | target.gensite = function(prereleaseVersion) { |
586 | echo("Generating eslint.org"); | |
587 | ||
588 | let docFiles = [ | |
589 | "/rules/", | |
590 | "/user-guide/", | |
591 | "/maintainer-guide/", | |
592 | "/developer-guide/", | |
593 | "/about/" | |
594 | ]; | |
595 | ||
596 | // append version | |
597 | if (prereleaseVersion) { | |
598 | docFiles = docFiles.map(docFile => `/${prereleaseVersion}${docFile}`); | |
599 | } | |
600 | ||
601 | // 1. create temp and build directory | |
602 | echo("> Creating a temporary directory (Step 1)"); | |
603 | if (!test("-d", TEMP_DIR)) { | |
604 | mkdir(TEMP_DIR); | |
605 | } | |
606 | ||
607 | // 2. remove old files from the site | |
608 | echo("> Removing old files (Step 2)"); | |
609 | docFiles.forEach(filePath => { | |
610 | const fullPath = path.join(DOCS_DIR, filePath), | |
611 | htmlFullPath = fullPath.replace(".md", ".html"); | |
612 | ||
613 | if (test("-f", fullPath)) { | |
eb39fafa DC |
614 | rm("-rf", fullPath); |
615 | ||
616 | if (filePath.indexOf(".md") >= 0 && test("-f", htmlFullPath)) { | |
617 | rm("-rf", htmlFullPath); | |
618 | } | |
619 | } | |
620 | }); | |
621 | ||
622 | // 3. Copy docs folder to a temporary directory | |
623 | echo("> Copying the docs folder (Step 3)"); | |
624 | cp("-rf", "docs/*", TEMP_DIR); | |
625 | ||
626 | let versions = test("-f", "./versions.json") ? JSON.parse(cat("./versions.json")) : {}; | |
627 | ||
628 | if (!versions.added) { | |
629 | versions = { | |
630 | added: versions, | |
631 | removed: {} | |
632 | }; | |
633 | } | |
634 | ||
609c276f TL |
635 | const { Linter } = require("."); |
636 | const rules = new Linter().getRules(); | |
eb39fafa DC |
637 | |
638 | const RECOMMENDED_TEXT = "\n\n(recommended) The `\"extends\": \"eslint:recommended\"` property in a configuration file enables this rule."; | |
639 | const FIXABLE_TEXT = "\n\n(fixable) The `--fix` option on the [command line](../user-guide/command-line-interface#fixing-problems) can automatically fix some of the problems reported by this rule."; | |
609c276f | 640 | const HAS_SUGGESTIONS_TEXT = "\n\n(hasSuggestions) Some problems reported by this rule are manually fixable by editor [suggestions](../developer-guide/working-with-rules#providing-suggestions)."; |
eb39fafa DC |
641 | |
642 | // 4. Loop through all files in temporary directory | |
643 | process.stdout.write("> Updating files (Steps 4-9): 0/... - ...\r"); | |
644 | const tempFiles = find(TEMP_DIR); | |
645 | const length = tempFiles.length; | |
646 | ||
647 | tempFiles.forEach((filename, i) => { | |
648 | if (test("-f", filename) && path.extname(filename) === ".md") { | |
649 | ||
609c276f TL |
650 | const rulesUrl = "https://github.com/eslint/eslint/tree/HEAD/lib/rules/", |
651 | testsUrl = "https://github.com/eslint/eslint/tree/HEAD/tests/lib/rules/", | |
652 | docsUrl = "https://github.com/eslint/eslint/tree/HEAD/docs/rules/", | |
eb39fafa DC |
653 | baseName = path.basename(filename), |
654 | sourceBaseName = `${path.basename(filename, ".md")}.js`, | |
655 | sourcePath = path.join("lib/rules", sourceBaseName), | |
656 | ruleName = path.basename(filename, ".md"), | |
657 | filePath = path.join("docs", path.relative("tmp", filename)); | |
658 | let text = cat(filename), | |
659 | ruleType = "", | |
660 | title; | |
661 | ||
662 | process.stdout.write(`> Updating files (Steps 4-9): ${i}/${length} - ${filePath + " ".repeat(30)}\r`); | |
663 | ||
664 | // 5. Prepend page title and layout variables at the top of rules | |
665 | if (path.dirname(filename).indexOf("rules") >= 0) { | |
666 | ||
667 | // Find out if the rule requires a special docs portion (e.g. if it is recommended and/or fixable) | |
668 | const rule = rules.get(ruleName); | |
669 | const isRecommended = rule && rule.meta.docs.recommended; | |
670 | const isFixable = rule && rule.meta.fixable; | |
609c276f | 671 | const hasSuggestions = rule && rule.meta.hasSuggestions; |
eb39fafa DC |
672 | |
673 | // Incorporate the special portion into the documentation content | |
674 | const textSplit = text.split("\n"); | |
675 | const ruleHeading = textSplit[0]; | |
676 | const ruleDocsContent = textSplit.slice(1).join("\n"); | |
677 | ||
609c276f | 678 | text = `${ruleHeading}${isRecommended ? RECOMMENDED_TEXT : ""}${isFixable ? FIXABLE_TEXT : ""}${hasSuggestions ? HAS_SUGGESTIONS_TEXT : ""}\n${ruleDocsContent}`; |
eb39fafa DC |
679 | title = `${ruleName} - Rules`; |
680 | ||
681 | if (rule && rule.meta) { | |
682 | ruleType = `rule_type: ${rule.meta.type}`; | |
683 | } | |
684 | } else { | |
685 | ||
686 | // extract the title from the file itself | |
687 | title = text.match(/#([^#].+)\n/u); | |
688 | if (title) { | |
689 | title = title[1].trim(); | |
690 | } else { | |
691 | title = "Documentation"; | |
692 | } | |
693 | } | |
694 | ||
695 | text = [ | |
696 | "---", | |
697 | `title: ${title}`, | |
698 | "layout: doc", | |
609c276f | 699 | `edit_link: https://github.com/eslint/eslint/edit/main/${filePath}`, |
eb39fafa DC |
700 | ruleType, |
701 | "---", | |
702 | "<!-- Note: No pull requests accepted for this file. See README.md in the root directory for details. -->", | |
703 | "", | |
704 | text | |
705 | ].join("\n"); | |
706 | ||
707 | // 6. Remove .md extension for relative links and change README to empty string | |
708 | text = text.replace(/\((?!https?:\/\/)([^)]*?)\.md(.*?)\)/gu, "($1$2)").replace("README.html", ""); | |
709 | ||
710 | // 7. Check if there's a trailing white line at the end of the file, if there isn't one, add it | |
711 | if (!/\n$/u.test(text)) { | |
712 | text = `${text}\n`; | |
713 | } | |
714 | ||
715 | // 8. Append first version of ESLint rule was added at. | |
716 | if (filename.indexOf("rules/") !== -1) { | |
717 | if (!versions.added[baseName]) { | |
718 | versions.added[baseName] = getFirstVersionOfFile(sourcePath); | |
719 | } | |
720 | const added = versions.added[baseName]; | |
721 | ||
722 | if (!versions.removed[baseName] && !test("-f", sourcePath)) { | |
723 | versions.removed[baseName] = getFirstVersionOfDeletion(sourcePath); | |
724 | } | |
725 | const removed = versions.removed[baseName]; | |
726 | ||
727 | text += "\n## Version\n\n"; | |
728 | text += removed | |
729 | ? `This rule was introduced in ESLint ${added} and removed in ${removed}.\n` | |
730 | : `This rule was introduced in ESLint ${added}.\n`; | |
731 | ||
732 | text += "\n## Resources\n\n"; | |
733 | if (!removed) { | |
734 | text += `* [Rule source](${rulesUrl}${sourceBaseName})\n`; | |
609c276f | 735 | text += `* [Test source](${testsUrl}${sourceBaseName})\n`; |
eb39fafa DC |
736 | } |
737 | text += `* [Documentation source](${docsUrl}${baseName})\n`; | |
738 | } | |
739 | ||
740 | // 9. Update content of the file with changes | |
741 | text.to(filename.replace("README.md", "index.md")); | |
742 | } | |
743 | }); | |
744 | JSON.stringify(versions).to("./versions.json"); | |
745 | echo(`> Updating files (Steps 4-9)${" ".repeat(50)}`); | |
746 | ||
747 | // 10. Copy temporary directory to site's docs folder | |
56c4a2cb | 748 | echo("> Copying the temporary directory into the site's docs folder (Step 10)"); |
eb39fafa DC |
749 | let outputDir = DOCS_DIR; |
750 | ||
751 | if (prereleaseVersion) { | |
752 | outputDir += `/${prereleaseVersion}`; | |
753 | if (!test("-d", outputDir)) { | |
754 | mkdir(outputDir); | |
755 | } | |
756 | } | |
757 | cp("-rf", `${TEMP_DIR}*`, outputDir); | |
758 | ||
56c4a2cb DC |
759 | // 11. Generate rules index page |
760 | if (prereleaseVersion) { | |
761 | echo("> Skipping generating rules index page because this is a prerelease (Step 11)"); | |
762 | } else { | |
763 | echo("> Generating the rules index page (Step 11)"); | |
764 | generateRuleIndexPage(); | |
765 | } | |
eb39fafa DC |
766 | |
767 | // 12. Delete temporary directory | |
768 | echo("> Removing the temporary directory (Step 12)"); | |
769 | rm("-rf", TEMP_DIR); | |
770 | ||
771 | // 13. Create Example Formatter Output Page | |
772 | echo("> Creating the formatter examples (Step 14)"); | |
773 | generateFormatterExamples(getFormatterResults(), prereleaseVersion); | |
774 | ||
775 | echo("Done generating eslint.org"); | |
776 | }; | |
777 | ||
778 | target.webpack = function(mode = "none") { | |
779 | exec(`${getBinFile("webpack")} --mode=${mode} --output-path=${BUILD_DIR}`); | |
780 | }; | |
781 | ||
782 | target.checkRuleFiles = function() { | |
783 | ||
784 | echo("Validating rules"); | |
785 | ||
786 | const ruleTypes = require("./tools/rule-types.json"); | |
787 | let errors = 0; | |
788 | ||
789 | RULE_FILES.forEach(filename => { | |
790 | const basename = path.basename(filename, ".js"); | |
791 | const docFilename = `docs/rules/${basename}.md`; | |
34eeec05 TL |
792 | const docText = cat(docFilename); |
793 | const ruleCode = cat(filename); | |
eb39fafa DC |
794 | |
795 | /** | |
796 | * Check if basename is present in rule-types.json file. | |
797 | * @returns {boolean} true if present | |
798 | * @private | |
799 | */ | |
800 | function isInRuleTypes() { | |
801 | return Object.prototype.hasOwnProperty.call(ruleTypes, basename); | |
802 | } | |
803 | ||
804 | /** | |
805 | * Check if id is present in title | |
806 | * @param {string} id id to check for | |
807 | * @returns {boolean} true if present | |
808 | * @private | |
809 | */ | |
810 | function hasIdInTitle(id) { | |
eb39fafa DC |
811 | const idOldAtEndOfTitleRegExp = new RegExp(`^# (.*?) \\(${id}\\)`, "u"); // original format |
812 | const idNewAtBeginningOfTitleRegExp = new RegExp(`^# ${id}: `, "u"); // new format is same as rules index | |
813 | /* | |
814 | * 1. Added support for new format. | |
815 | * 2. Will remove support for old format after all docs files have new format. | |
816 | * 3. Will remove this check when the main heading is automatically generated from rule metadata. | |
817 | */ | |
818 | ||
819 | return idNewAtBeginningOfTitleRegExp.test(docText) || idOldAtEndOfTitleRegExp.test(docText); | |
820 | } | |
821 | ||
609c276f TL |
822 | /** |
823 | * Check if deprecated information is in rule code and READNE.md. | |
824 | * @returns {boolean} true if present | |
825 | * @private | |
826 | */ | |
827 | function hasDeprecatedInfo() { | |
609c276f | 828 | const deprecatedTagRegExp = /@deprecated in ESLint/u; |
609c276f TL |
829 | const deprecatedInfoRegExp = /This rule was .+deprecated.+in ESLint/u; |
830 | ||
831 | return deprecatedTagRegExp.test(ruleCode) && deprecatedInfoRegExp.test(docText); | |
832 | } | |
833 | ||
34eeec05 TL |
834 | /** |
835 | * Check if the rule code has the jsdoc comment with the rule type annotation. | |
836 | * @returns {boolean} true if present | |
837 | * @private | |
838 | */ | |
839 | function hasRuleTypeJSDocComment() { | |
840 | const comment = "/** @type {import('../shared/types').Rule} */"; | |
841 | ||
842 | return ruleCode.includes(comment); | |
843 | } | |
844 | ||
eb39fafa DC |
845 | // check for docs |
846 | if (!test("-f", docFilename)) { | |
847 | console.error("Missing documentation for rule %s", basename); | |
848 | errors++; | |
849 | } else { | |
850 | ||
851 | // check for proper doc format | |
852 | if (!hasIdInTitle(basename)) { | |
853 | console.error("Missing id in the doc page's title of rule %s", basename); | |
854 | errors++; | |
855 | } | |
856 | } | |
857 | ||
858 | // check for recommended configuration | |
859 | if (!isInRuleTypes()) { | |
860 | console.error("Missing setting for %s in tools/rule-types.json", basename); | |
861 | errors++; | |
862 | } | |
863 | ||
864 | // check parity between rules index file and rules directory | |
865 | const ruleIdsInIndex = require("./lib/rules/index"); | |
866 | const ruleDef = ruleIdsInIndex.get(basename); | |
867 | ||
868 | if (!ruleDef) { | |
869 | console.error(`Missing rule from index (./lib/rules/index.js): ${basename}. If you just added a new rule then add an entry for it in this file.`); | |
870 | errors++; | |
609c276f | 871 | } else { |
eb39fafa | 872 | |
609c276f TL |
873 | // check deprecated |
874 | if (ruleDef.meta.deprecated && !hasDeprecatedInfo()) { | |
875 | console.error(`Missing deprecated information in ${basename} rule code or README.md. Please write @deprecated tag in code or 「This rule was deprecated in ESLint ...」 in README.md.`); | |
876 | errors++; | |
877 | } | |
878 | ||
879 | // check eslint:recommended | |
880 | const recommended = require("./conf/eslint-recommended"); | |
eb39fafa | 881 | |
eb39fafa DC |
882 | if (ruleDef.meta.docs.recommended) { |
883 | if (recommended.rules[basename] !== "error") { | |
884 | console.error(`Missing rule from eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just made a rule recommended then add an entry for it in this file.`); | |
885 | errors++; | |
886 | } | |
887 | } else { | |
888 | if (basename in recommended.rules) { | |
889 | console.error(`Extra rule in eslint:recommended (./conf/eslint-recommended.js): ${basename}. If you just added a rule then don't add an entry for it in this file.`); | |
890 | errors++; | |
891 | } | |
892 | } | |
34eeec05 TL |
893 | |
894 | if (!hasRuleTypeJSDocComment()) { | |
895 | console.error(`Missing rule type JSDoc comment from ${basename} rule code.`); | |
896 | errors++; | |
897 | } | |
eb39fafa DC |
898 | } |
899 | ||
900 | // check for tests | |
901 | if (!test("-f", `tests/lib/rules/${basename}.js`)) { | |
902 | console.error("Missing tests for rule %s", basename); | |
903 | errors++; | |
904 | } | |
905 | ||
906 | }); | |
907 | ||
908 | if (errors) { | |
909 | exit(1); | |
910 | } | |
911 | ||
912 | }; | |
913 | ||
914 | target.checkLicenses = function() { | |
915 | ||
916 | /** | |
917 | * Check if a dependency is eligible to be used by us | |
918 | * @param {Object} dependency dependency to check | |
919 | * @returns {boolean} true if we have permission | |
920 | * @private | |
921 | */ | |
922 | function isPermissible(dependency) { | |
923 | const licenses = dependency.licenses; | |
924 | ||
925 | if (Array.isArray(licenses)) { | |
926 | return licenses.some(license => isPermissible({ | |
927 | name: dependency.name, | |
928 | licenses: license | |
929 | })); | |
930 | } | |
931 | ||
932 | return OPEN_SOURCE_LICENSES.some(license => license.test(licenses)); | |
933 | } | |
934 | ||
935 | echo("Validating licenses"); | |
936 | ||
937 | checker.init({ | |
938 | start: __dirname | |
939 | }, deps => { | |
940 | const impermissible = Object.keys(deps).map(dependency => ({ | |
941 | name: dependency, | |
942 | licenses: deps[dependency].licenses | |
943 | })).filter(dependency => !isPermissible(dependency)); | |
944 | ||
945 | if (impermissible.length) { | |
946 | impermissible.forEach(dependency => { | |
947 | console.error( | |
948 | "%s license for %s is impermissible.", | |
949 | dependency.licenses, | |
950 | dependency.name | |
951 | ); | |
952 | }); | |
953 | exit(1); | |
954 | } | |
955 | }); | |
956 | }; | |
957 | ||
958 | /** | |
959 | * Downloads a repository which has many js files to test performance with multi files. | |
960 | * Here, it's eslint@1.10.3 (450 files) | |
961 | * @param {Function} cb A callback function. | |
962 | * @returns {void} | |
963 | */ | |
964 | function downloadMultifilesTestTarget(cb) { | |
965 | if (test("-d", PERF_MULTIFILES_TARGET_DIR)) { | |
966 | process.nextTick(cb); | |
967 | } else { | |
968 | mkdir("-p", PERF_MULTIFILES_TARGET_DIR); | |
969 | echo("Downloading the repository of multi-files performance test target."); | |
970 | exec(`git clone -b v1.10.3 --depth 1 https://github.com/eslint/eslint.git "${PERF_MULTIFILES_TARGET_DIR}"`, { silent: true }, cb); | |
971 | } | |
972 | } | |
973 | ||
974 | /** | |
975 | * Creates a config file to use performance tests. | |
976 | * This config is turning all core rules on. | |
977 | * @returns {void} | |
978 | */ | |
979 | function createConfigForPerformanceTest() { | |
980 | const content = [ | |
981 | "root: true", | |
982 | "env:", | |
983 | " node: true", | |
984 | " es6: true", | |
985 | "rules:" | |
986 | ]; | |
987 | ||
988 | for (const [ruleId] of builtinRules) { | |
989 | content.push(` ${ruleId}: 1`); | |
990 | } | |
991 | ||
992 | content.join("\n").to(PERF_ESLINTRC); | |
993 | } | |
994 | ||
609c276f TL |
995 | /** |
996 | * @callback TimeCallback | |
997 | * @param {?int[]} results | |
998 | * @returns {void} | |
999 | */ | |
1000 | ||
eb39fafa DC |
1001 | /** |
1002 | * Calculates the time for each run for performance | |
1003 | * @param {string} cmd cmd | |
1004 | * @param {int} runs Total number of runs to do | |
1005 | * @param {int} runNumber Current run number | |
1006 | * @param {int[]} results Collection results from each run | |
609c276f TL |
1007 | * @param {TimeCallback} cb Function to call when everything is done |
1008 | * @returns {void} calls the cb with all the results | |
eb39fafa DC |
1009 | * @private |
1010 | */ | |
1011 | function time(cmd, runs, runNumber, results, cb) { | |
1012 | const start = process.hrtime(); | |
1013 | ||
1014 | exec(cmd, { maxBuffer: 64 * 1024 * 1024, silent: true }, (code, stdout, stderr) => { | |
1015 | const diff = process.hrtime(start), | |
1016 | actual = (diff[0] * 1e3 + diff[1] / 1e6); // ms | |
1017 | ||
1018 | if (code) { | |
1019 | echo(` Performance Run #${runNumber} failed.`); | |
1020 | if (stdout) { | |
1021 | echo(`STDOUT:\n${stdout}\n\n`); | |
1022 | } | |
1023 | ||
1024 | if (stderr) { | |
1025 | echo(`STDERR:\n${stderr}\n\n`); | |
1026 | } | |
1027 | return cb(null); | |
1028 | } | |
1029 | ||
1030 | results.push(actual); | |
1031 | echo(` Performance Run #${runNumber}: %dms`, actual); | |
1032 | if (runs > 1) { | |
1033 | return time(cmd, runs - 1, runNumber + 1, results, cb); | |
1034 | } | |
1035 | return cb(results); | |
1036 | ||
1037 | }); | |
1038 | ||
1039 | } | |
1040 | ||
1041 | /** | |
1042 | * Run a performance test. | |
1043 | * @param {string} title A title. | |
1044 | * @param {string} targets Test targets. | |
1045 | * @param {number} multiplier A multiplier for limitation. | |
1046 | * @param {Function} cb A callback function. | |
1047 | * @returns {void} | |
1048 | */ | |
1049 | function runPerformanceTest(title, targets, multiplier, cb) { | |
1050 | const cpuSpeed = os.cpus()[0].speed, | |
1051 | max = multiplier / cpuSpeed, | |
1052 | cmd = `${ESLINT}--config "${PERF_ESLINTRC}" --no-eslintrc --no-ignore ${targets}`; | |
1053 | ||
1054 | echo(""); | |
1055 | echo(title); | |
1056 | echo(" CPU Speed is %d with multiplier %d", cpuSpeed, multiplier); | |
1057 | ||
1058 | time(cmd, 5, 1, [], results => { | |
1059 | if (!results || results.length === 0) { // No results? Something is wrong. | |
1060 | throw new Error("Performance test failed."); | |
1061 | } | |
1062 | ||
1063 | results.sort((a, b) => a - b); | |
1064 | ||
1065 | const median = results[~~(results.length / 2)]; | |
1066 | ||
1067 | echo(""); | |
1068 | if (median > max) { | |
1069 | echo(" Performance budget exceeded: %dms (limit: %dms)", median, max); | |
1070 | } else { | |
1071 | echo(" Performance budget ok: %dms (limit: %dms)", median, max); | |
1072 | } | |
1073 | echo(""); | |
1074 | cb(); | |
1075 | }); | |
1076 | } | |
1077 | ||
1078 | /** | |
1079 | * Run the load performance for eslint | |
1080 | * @returns {void} | |
1081 | * @private | |
1082 | */ | |
1083 | function loadPerformance() { | |
1084 | echo(""); | |
1085 | echo("Loading:"); | |
1086 | ||
1087 | const results = []; | |
1088 | ||
1089 | for (let cnt = 0; cnt < 5; cnt++) { | |
1090 | const loadPerfData = loadPerf({ | |
1091 | checkDependencies: false | |
1092 | }); | |
1093 | ||
1094 | echo(` Load performance Run #${cnt + 1}: %dms`, loadPerfData.loadTime); | |
1095 | results.push(loadPerfData.loadTime); | |
1096 | } | |
1097 | ||
1098 | results.sort((a, b) => a - b); | |
1099 | const median = results[~~(results.length / 2)]; | |
1100 | ||
1101 | echo(""); | |
1102 | echo(" Load Performance median: %dms", median); | |
1103 | echo(""); | |
1104 | } | |
1105 | ||
1106 | target.perf = function() { | |
1107 | downloadMultifilesTestTarget(() => { | |
1108 | createConfigForPerformanceTest(); | |
1109 | ||
1110 | loadPerformance(); | |
1111 | ||
1112 | runPerformanceTest( | |
1113 | "Single File:", | |
1114 | "tests/performance/jshint.js", | |
1115 | PERF_MULTIPLIER, | |
1116 | () => { | |
1117 | ||
1118 | // Count test target files. | |
1119 | const count = glob.sync( | |
609c276f TL |
1120 | ( |
1121 | process.platform === "win32" | |
1122 | ? PERF_MULTIFILES_TARGETS.replace(/\\/gu, "/") | |
1123 | : PERF_MULTIFILES_TARGETS | |
1124 | ) | |
1125 | .slice(1, -1) // strip quotes | |
eb39fafa DC |
1126 | ).length; |
1127 | ||
1128 | runPerformanceTest( | |
1129 | `Multi Files (${count} files):`, | |
1130 | PERF_MULTIFILES_TARGETS, | |
1131 | 3 * PERF_MULTIPLIER, | |
1132 | () => {} | |
1133 | ); | |
1134 | } | |
1135 | ); | |
1136 | }); | |
1137 | }; | |
1138 | ||
1139 | target.generateRelease = generateRelease; | |
1140 | target.generatePrerelease = ([prereleaseType]) => generatePrerelease(prereleaseType); | |
1141 | target.publishRelease = publishRelease; |