Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tools: update doctool dependencies, migrate to ESM #38966

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 18 additions & 18 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ test-valgrind: all
test-check-deopts: all
$(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) --check-deopts parallel sequential

DOCBUILDSTAMP_PREREQS = tools/doc/addon-verify.js doc/api/addons.md
DOCBUILDSTAMP_PREREQS = tools/doc/addon-verify.mjs doc/api/addons.md

ifeq ($(OSTYPE),aix)
DOCBUILDSTAMP_PREREQS := $(DOCBUILDSTAMP_PREREQS) out/$(BUILDTYPE)/node.exp
Expand Down Expand Up @@ -589,12 +589,12 @@ test-doc: doc-only lint-md ## Builds, lints, and verifies the docs.
else \
$(PYTHON) tools/test.py $(PARALLEL_ARGS) doctool; \
fi
$(NODE) tools/doc/checkLinks.js .
$(NODE) tools/doc/checkLinks.mjs .

.PHONY: test-doc-ci
test-doc-ci: doc-only
$(PYTHON) tools/test.py --shell $(NODE) $(TEST_CI_ARGS) $(PARALLEL_ARGS) doctool
$(NODE) tools/doc/checkLinks.js .
$(NODE) tools/doc/checkLinks.mjs .

test-known-issues: all
$(PYTHON) tools/test.py $(PARALLEL_ARGS) known_issues
Expand Down Expand Up @@ -736,33 +736,33 @@ run-npm-ci = $(PWD)/$(NPM) ci

LINK_DATA = out/doc/apilinks.json
VERSIONS_DATA = out/previous-doc-versions.json
gen-api = tools/doc/generate.js --node-version=$(FULLVERSION) \
gen-api = tools/doc/generate.mjs --node-version=$(FULLVERSION) \
--apilinks=$(LINK_DATA) $< --output-directory=out/doc/api \
--versions-file=$(VERSIONS_DATA)
gen-apilink = tools/doc/apilinks.js $(LINK_DATA) $(wildcard lib/*.js)
gen-apilink = tools/doc/apilinks.mjs $(LINK_DATA) $(wildcard lib/*.js)

$(LINK_DATA): $(wildcard lib/*.js) tools/doc/apilinks.js | out/doc
$(LINK_DATA): $(wildcard lib/*.js) tools/doc/apilinks.mjs | out/doc
$(call available-node, $(gen-apilink))

# Regenerate previous versions data if the current version changes
$(VERSIONS_DATA): CHANGELOG.md src/node_version.h tools/doc/versions.js
$(call available-node, tools/doc/versions.js $@)
$(VERSIONS_DATA): CHANGELOG.md src/node_version.h tools/doc/versions.mjs
$(call available-node, tools/doc/versions.mjs $@)

out/doc/api/%.json out/doc/api/%.html: doc/api/%.md tools/doc/generate.js \
tools/doc/markdown.js tools/doc/html.js tools/doc/json.js \
tools/doc/apilinks.js $(VERSIONS_DATA) | $(LINK_DATA) out/doc/api
out/doc/api/%.json out/doc/api/%.html: doc/api/%.md tools/doc/generate.mjs \
tools/doc/markdown.mjs tools/doc/html.mjs tools/doc/json.mjs \
tools/doc/apilinks.mjs $(VERSIONS_DATA) | $(LINK_DATA) out/doc/api
$(call available-node, $(gen-api))

out/doc/api/all.html: $(apidocs_html) tools/doc/allhtml.js \
tools/doc/apilinks.js | out/doc/api
$(call available-node, tools/doc/allhtml.js)
out/doc/api/all.html: $(apidocs_html) tools/doc/allhtml.mjs \
tools/doc/apilinks.mjs | out/doc/api
$(call available-node, tools/doc/allhtml.mjs)

out/doc/api/all.json: $(apidocs_json) tools/doc/alljson.js | out/doc/api
$(call available-node, tools/doc/alljson.js)
out/doc/api/all.json: $(apidocs_json) tools/doc/alljson.mjs | out/doc/api
$(call available-node, tools/doc/alljson.mjs)

.PHONY: out/doc/api/stability
out/doc/api/stability: out/doc/api/all.json tools/doc/stability.js | out/doc/api
$(call available-node, tools/doc/stability.js)
out/doc/api/stability: out/doc/api/all.json tools/doc/stability.mjs | out/doc/api
$(call available-node, tools/doc/stability.mjs)

.PHONY: docopen
docopen: out/doc/api/all.html
Expand Down
2 changes: 1 addition & 1 deletion doc/api/index.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<!--
NB(chrisdickinson): if you move this file, be sure to update
tools/doc/html.js to point at the new location.
tools/doc/html.mjs to point at the new location.
-->

<!--introduced_in=v0.10.0-->
Expand Down
2 changes: 2 additions & 0 deletions test/common/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ const {
allowGlobals,
mustCall,
mustCallAtLeast,
mustSucceed,
hasMultiLocalhost,
skipIfDumbTerminal,
skipIfEslintMissing,
Expand Down Expand Up @@ -75,6 +76,7 @@ export {
allowGlobals,
mustCall,
mustCallAtLeast,
mustSucceed,
hasMultiLocalhost,
skipIfDumbTerminal,
skipIfEslintMissing,
Expand Down
23 changes: 12 additions & 11 deletions test/doctool/test-apilinks.js → test/doctool/test-apilinks.mjs
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
'use strict';

require('../common');
const fixtures = require('../common/fixtures');
const tmpdir = require('../common/tmpdir');
const fs = require('fs');
const assert = require('assert');
const path = require('path');
const { execFileSync } = require('child_process');

const script = path.join(__dirname, '..', '..', 'tools', 'doc', 'apilinks.js');
import '../common/index.mjs';
import * as fixtures from '../common/fixtures.mjs';
import tmpdir from '../common/tmpdir.js';

import assert from 'assert';
import { execFileSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';

const script = fileURLToPath(
new URL('../../tools/doc/apilinks.mjs', import.meta.url));
const apilinks = fixtures.path('apilinks');

tmpdir.refresh();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,22 +1,14 @@
'use strict';
import '../common/index.mjs';
import * as fixtures from '../common/fixtures.mjs';

const common = require('../common');
// The doctool currently uses js-yaml from the tool/node_modules/eslint/ tree.
try {
require('../../tools/node_modules/eslint/node_modules/js-yaml');
} catch {
common.skip('missing js-yaml (eslint not present)');
}
import assert from 'assert';
import { readFileSync } from 'fs';
import { createRequire } from 'module';

const assert = require('assert');
const { readFileSync } = require('fs');
const fixtures = require('../common/fixtures');
const { replaceLinks } = require('../../tools/doc/markdown.js');
const html = require('../../tools/doc/html.js');
const path = require('path');
import * as html from '../../tools/doc/html.mjs';
import { replaceLinks } from '../../tools/doc/markdown.mjs';

module.paths.unshift(
path.join(__dirname, '..', '..', 'tools', 'doc', 'node_modules'));
const require = createRequire(new URL('../../tools/doc/', import.meta.url));
const unified = require('unified');
const markdown = require('remark-parse');
const remark2rehype = require('remark-rehype');
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,13 @@
'use strict';
import * as common from '../common/index.mjs';
import * as fixtures from '../common/fixtures.mjs';

const common = require('../common');
// The doctool currently uses js-yaml from the tool/node_modules/eslint/ tree.
try {
require('../../tools/node_modules/eslint/node_modules/js-yaml');
} catch {
common.skip('missing js-yaml (eslint not present)');
}
import assert from 'assert';
import fs from 'fs';
import { createRequire } from 'module';

const assert = require('assert');
const fs = require('fs');
const path = require('path');
const fixtures = require('../common/fixtures');
const json = require('../../tools/doc/json.js');
import * as json from '../../tools/doc/json.mjs';

module.paths.unshift(
path.join(__dirname, '..', '..', 'tools', 'doc', 'node_modules'));
const require = createRequire(new URL('../../tools/doc/', import.meta.url));
const unified = require('unified');
const markdown = require('remark-parse');

Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
'use strict';
import '../common/index.mjs';
import tmpdir from '../common/tmpdir.js';

require('../common');
const assert = require('assert');
const { spawnSync } = require('child_process');
const fs = require('fs');
const path = require('path');
const tmpdir = require('../common/tmpdir');
const util = require('util');
import assert from 'assert';
import { spawnSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import util from 'util';

const debuglog = util.debuglog('test');
const versionsTool = path.resolve(__dirname, '../../tools/doc/versions.js');
const versionsTool = fileURLToPath(
new URL('../../tools/doc/versions.mjs', import.meta.url));

// At the time of writing these are the minimum expected versions.
// New versions of Node.js do not have to be explicitly added here.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
'use strict';
import '../common/index.mjs';

require('../common');
const assert = require('assert');
import assert from 'assert';

const { referenceToLocalMdFile } = require('../../tools/doc/markdown.js');
import { referenceToLocalMdFile } from '../../tools/doc/markdown.mjs';

{
const shouldBeSpotted = [
Expand Down
27 changes: 14 additions & 13 deletions test/doctool/test-make-doc.js → test/doctool/test-make-doc.mjs
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
'use strict';
const common = require('../common');
import * as common from '../common/index.mjs';

import assert from 'assert';
import fs from 'fs';
import path from 'path';

if (common.isWindows) {
common.skip('`make doc` does not run on Windows');
}

// This tests that `make doc` generates the documentation properly.
// Note that for this test to pass, `make doc` must be run first.

const assert = require('assert');
const fs = require('fs');
const path = require('path');

const apiPath = path.resolve(__dirname, '..', '..', 'out', 'doc', 'api');
const mdPath = path.resolve(__dirname, '..', '..', 'doc', 'api');
const allMD = fs.readdirSync(mdPath);
const allDocs = fs.readdirSync(apiPath);
const apiURL = new URL('../../out/doc/api/', import.meta.url);
const mdURL = new URL('../../doc/api/', import.meta.url);
const allMD = fs.readdirSync(mdURL);
const allDocs = fs.readdirSync(apiURL);
assert.ok(allDocs.includes('index.html'));

const actualDocs = allDocs.filter(
Expand All @@ -33,7 +33,7 @@ for (const name of actualDocs) {
);
}

const toc = fs.readFileSync(path.resolve(apiPath, 'index.html'), 'utf8');
const toc = fs.readFileSync(new URL('./index.html', apiURL), 'utf8');
const re = /href="([^/]+\.html)"/;
const globalRe = new RegExp(re, 'g');
const links = toc.match(globalRe);
Expand All @@ -56,8 +56,9 @@ for (const actualDoc of actualDocs) {
assert.ok(
expectedDocs.includes(actualDoc), `${actualDoc} does not match TOC`);

assert.ok(
fs.statSync(path.join(apiPath, actualDoc)).size !== 0,
assert.notStrictEqual(
fs.statSync(new URL(`./${actualDoc}`, apiURL)).size,
0,
`${actualDoc} is empty`
);
}
2 changes: 1 addition & 1 deletion tools/doc/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ added: v0.10.0

* Returns: {SomeClass | null} The next `SomeClass` in line.

`SomeClass` must be registered in `tools/doc/type-parser.js`
`SomeClass` must be registered in `tools/doc/type-parser.mjs`
to be properly parsed in `{type}` fields.

### SomeClass.someProperty
Expand Down
59 changes: 25 additions & 34 deletions tools/doc/addon-verify.js → tools/doc/addon-verify.mjs
Original file line number Diff line number Diff line change
@@ -1,23 +1,21 @@
'use strict';

// doc/api/addons.md has a bunch of code. Extract it for verification
// that the C++ code compiles and the js code runs.
// Add .gyp files which will be used to compile the C++ code.
// Modify the require paths in the js code to pull from the build tree.
// Triggered from the build-addons target in the Makefile and vcbuild.bat.

const { mkdir, writeFile } = require('fs');
const { resolve } = require('path');
const vfile = require('to-vfile');
const unified = require('unified');
const remarkParse = require('remark-parse');
const gfm = require('remark-gfm');
import { mkdir, writeFile } from 'fs/promises';

import gfm from 'remark-gfm';
import remarkParse from 'remark-parse';
import { toVFile } from 'to-vfile';
import unified from 'unified';

const rootDir = resolve(__dirname, '..', '..');
const doc = resolve(rootDir, 'doc', 'api', 'addons.md');
const verifyDir = resolve(rootDir, 'test', 'addons');
const rootDir = new URL('../../', import.meta.url);
const doc = new URL('./doc/api/addons.md', rootDir);
const verifyDir = new URL('./test/addons/', rootDir);

const file = vfile.readSync(doc, 'utf8');
const file = toVFile.readSync(doc, 'utf8');
targos marked this conversation as resolved.
Show resolved Hide resolved
const tree = unified().use(remarkParse).use(gfm).parse(file);
const addons = {};
let id = 0;
Expand All @@ -26,7 +24,7 @@ let currentHeader;
const validNames = /^\/\/\s+(.*\.(?:cc|h|js))[\r\n]/;
tree.children.forEach((node) => {
if (node.type === 'heading') {
currentHeader = file.contents.slice(
currentHeader = file.value.slice(
node.children[0].position.start.offset,
node.position.end.offset);
addons[currentHeader] = { files: {} };
Expand All @@ -38,23 +36,24 @@ tree.children.forEach((node) => {
}
});

Object.keys(addons).forEach((header) => {
verifyFiles(addons[header].files, header);
});
await Promise.all(
Object.keys(addons).flatMap(
(header) => verifyFiles(addons[header].files, header)
));

function verifyFiles(files, blockName) {
const fileNames = Object.keys(files);

// Must have a .cc and a .js to be a valid test.
if (!fileNames.some((name) => name.endsWith('.cc')) ||
!fileNames.some((name) => name.endsWith('.js'))) {
return;
return [];
}

blockName = blockName.toLowerCase().replace(/\s/g, '_').replace(/\W/g, '');
const dir = resolve(
const dir = new URL(
`./${String(++id).padStart(2, '0')}_${blockName}/`,
verifyDir,
`${String(++id).padStart(2, '0')}_${blockName}`
);

files = fileNames.map((name) => {
Expand All @@ -68,14 +67,14 @@ ${files[name].replace(
`;
}
return {
path: resolve(dir, name),
name: name,
content: files[name]
content: files[name],
name,
url: new URL(`./${name}`, dir),
};
});

files.push({
path: resolve(dir, 'binding.gyp'),
url: new URL('./binding.gyp', dir),
content: JSON.stringify({
targets: [
{
Expand All @@ -87,16 +86,8 @@ ${files[name].replace(
})
});

mkdir(dir, () => {
// Ignore errors.
const dirCreation = mkdir(dir);

files.forEach(({ path, content }) => {
writeFile(path, content, (err) => {
if (err)
throw err;

console.log(`Wrote ${path}`);
});
});
});
return files.map(({ url, content }) =>
dirCreation.then(() => writeFile(url, content)));
}
Loading