diff --git a/apps/oxlint/src-js/generated/constants.ts b/apps/oxlint/src-js/generated/constants.ts index 913aa7ddac3ae..dacc195349cc6 100644 --- a/apps/oxlint/src-js/generated/constants.ts +++ b/apps/oxlint/src-js/generated/constants.ts @@ -81,6 +81,14 @@ export const COMMENT_SIZE = 16; */ export const COMMENT_KIND_OFFSET = 12; +/** + * Byte offset of the deserialized flag within each token/comment entry. + * + * Corresponds to `content` field of `Comment` struct, and unused bytes in `Token`. + * Initialized to 0 by Rust. JS side sets to 1 after deserialization. + */ +export const DESERIALIZED_FLAG_OFFSET = 15; + /** * Discriminant value for `CommentKind::Line`. */ diff --git a/apps/oxlint/src-js/plugins/comments.ts b/apps/oxlint/src-js/plugins/comments.ts index d936e7f6cb797..f8696f1d06cf1 100644 --- a/apps/oxlint/src-js/plugins/comments.ts +++ b/apps/oxlint/src-js/plugins/comments.ts @@ -10,8 +10,10 @@ import { COMMENT_KIND_OFFSET, COMMENT_LINE_KIND, DATA_POINTER_POS_32, + DESERIALIZED_FLAG_OFFSET, } from "../generated/constants.ts"; import { computeLoc } from "./location.ts"; +import { FLAG_NOT_DESERIALIZED, FLAG_DESERIALIZED } from "./tokens.ts"; import { debugAssert, debugAssertIsNonNull } from "../utils/asserts.ts"; import type { Location, Span } from "./location.ts"; @@ -31,9 +33,20 @@ export type { CommentType as Comment }; // Created lazily only when needed. export let comments: CommentType[] | null = null; +// Typed array views over the comments region of the buffer. +// These persist for the lifetime of the file (cleared in `resetComments`). +let commentsUint8: Uint8Array | null = null; +export let commentsUint32: Uint32Array | null = null; + +// Number of comments for the current file. +export let commentsLen = 0; + +// Whether all comments have been deserialized into `cachedComments`. +export let allCommentsDeserialized = false; + // Cached comment objects, reused across files to reduce GC pressure. // Comments are mutated in place during deserialization, then `comments` is set to a slice of this array. -const cachedComments: Comment[] = []; +export const cachedComments: Comment[] = []; // Comments array from previous file. // Reused for next file if next file has fewer comments than the previous file (by truncating to correct length). @@ -44,7 +57,14 @@ const commentsWithLoc: Comment[] = []; // Empty comments array. // Reused for all files which don't have any comments. Frozen to avoid rules mutating it. -const emptyComments: CommentType[] = Object.freeze([]) as unknown as CommentType[]; +const EMPTY_COMMENTS: CommentType[] = Object.freeze([]) as unknown as CommentType[]; + +// Empty typed arrays, reused for files with no comments. +const EMPTY_UINT8_ARRAY = new Uint8Array(0); +const EMPTY_UINT32_ARRAY = new Uint32Array(0); + +const COMMENT_SIZE_SHIFT = 4; // 1 << 4 == 16 bytes, the size of `Comment` in Rust +debugAssert(COMMENT_SIZE === 1 << COMMENT_SIZE_SHIFT); // Reset `#loc` field on a `Comment` class instance. let resetCommentLoc: (comment: Comment) => void; @@ -86,13 +106,67 @@ class Comment implements Span { Object.defineProperty(Comment.prototype, "loc", { enumerable: true }); /** - * Initialize comments for current file. - * - * Deserializes comments from the buffer using object pooling. - * If the program has a hashbang, sets first comment to a `Shebang` comment. + * Deserialize all comments and build the `comments` array. + * Called by `ast.comments` getter. */ export function initComments(): void { - debugAssert(comments === null, "Comments already initialized"); + debugAssert(comments === null, "Comments already deserialized"); + + if (!allCommentsDeserialized) deserializeComments(); + + // `initCommentsBuffer` (called by `deserializeComments`) sets `comments` for zero-comment files + if (comments !== null) return; + + // Create `comments` array as a slice of `cachedComments` array. + // + // Use `slice` rather than copying comments one-by-one into a new array. + // V8 implements `slice` with a single `memcpy` of the backing store, which is faster + // than N individual `push` calls with bounds checking and potential resizing. + // + // If the comments array from previous file is longer than the current one, + // reuse it and truncate it to avoid the memcpy entirely. + // Assuming random distribution of number of comments in files, this cheaper branch should be hit on 50% of files. + if (previousComments.length >= commentsLen) { + previousComments.length = commentsLen; + comments = previousComments; + } else { + comments = previousComments = cachedComments.slice(0, commentsLen); + } +} + +/** + * Deserialize all comments into `cachedComments`. + * Does NOT build the `comments` array - use `initComments` for that. + */ +export function deserializeComments(): void { + debugAssert(!allCommentsDeserialized, "Comments already deserialized"); + + if (commentsUint32 === null) initCommentsBuffer(); + + for (let i = 0; i < commentsLen; i++) { + deserializeCommentIfNeeded(i); + } + + allCommentsDeserialized = true; + + debugCheckDeserializedComments(); +} + +/** + * Initialize typed array views over the comments region of the buffer. + * + * Populates `commentsUint8`, `commentsUint32`, and `commentsLen`, and grows `cachedComments` if needed. + * Does NOT deserialize comments - they are deserialized lazily via `deserializeCommentIfNeeded`. + * + * Exception: If the file has a hashbang, eagerly deserializes the first comment and sets its type to `Shebang`. + */ +export function initCommentsBuffer(): void { + debugAssert( + commentsUint8 === null && commentsUint32 === null, + "Comments buffer already initialized", + ); + + debugAssertIsNonNull(buffer); // We don't need to deserialize source text if there are no comments, so we could move this call to after // the `commentsLen === 0` check. However, various comments methods rely on that if `initComments` has been called, @@ -100,78 +174,109 @@ export function initComments(): void { // in all those methods, which can be called quite frequently. if (sourceText === null) initSourceText(); debugAssertIsNonNull(sourceText); - debugAssertIsNonNull(buffer); const { uint32 } = buffer; const programPos32 = uint32[DATA_POINTER_POS_32] >> 2; const commentsPos = uint32[programPos32 + (COMMENTS_OFFSET >> 2)]; - const commentsLen = uint32[programPos32 + (COMMENTS_LEN_OFFSET >> 2)]; + commentsLen = uint32[programPos32 + (COMMENTS_LEN_OFFSET >> 2)]; // Fast path for files with no comments if (commentsLen === 0) { - comments = emptyComments; + comments = EMPTY_COMMENTS; + commentsUint8 = EMPTY_UINT8_ARRAY; + commentsUint32 = EMPTY_UINT32_ARRAY; + allCommentsDeserialized = true; return; } + // Create typed array views over the comments region of the buffer. + // These are zero-copy views over the same underlying `ArrayBuffer`. + const arrayBuffer = buffer.buffer, + absolutePos = buffer.byteOffset + commentsPos; + commentsUint8 = new Uint8Array(arrayBuffer, absolutePos, commentsLen * COMMENT_SIZE); + commentsUint32 = new Uint32Array(arrayBuffer, absolutePos, commentsLen * (COMMENT_SIZE >> 2)); + // Grow cache if needed (one-time cost as cache warms up) while (cachedComments.length < commentsLen) { cachedComments.push(new Comment()); } - // Deserialize comments from buffer - for (let i = 0; i < commentsLen; i++) { - const comment = cachedComments[i]; + // If file has a hashbang, eagerly deserialize the first comment, and set its type to `Shebang`. + // We do this here instead of lazily when comment 0 is deserialized, to remove code + // from `deserializeCommentIfNeeded`, which can be called many times. + // Rust side adds hashbang comment to start of comments `Vec` as a `Line` comment. + // `commentsUint32[0]` is the start of the first comment. + if (commentsUint32[0] === 0 && sourceText.startsWith("#!")) { + getComment(0).type = "Shebang"; + } - const pos = commentsPos + i * COMMENT_SIZE, - pos32 = pos >> 2; + // Check buffer data has valid ranges and ascending order + debugCheckValidRanges(); +} - const start = uint32[pos32]; - const end = uint32[pos32 + 1]; - const isBlock = buffer[pos + COMMENT_KIND_OFFSET] !== COMMENT_LINE_KIND; +/** + * Get comment at `index`, deserializing if needed. + * + * Caller must ensure `initCommentsBuffer()` has been called before calling this function. + * + * @param index - Comment index in the comments buffer + * @returns Deserialized comment + */ +export function getComment(index: number): CommentType { + const comment = deserializeCommentIfNeeded(index); + return comment === null ? cachedComments[index] : comment; +} - comment.type = isBlock ? "Block" : "Line"; - // Line comments: `// text` -> slice `start + 2..end` - // Block comments: `/* text */` -> slice `start + 2..end - 2` - comment.value = sourceText.slice(start + 2, end - (+isBlock << 1)); - comment.range[0] = comment.start = start; - comment.range[1] = comment.end = end; - } +/** + * Deserialize comment at `index` if not already deserialized. + * + * Caller must ensure `initCommentsBuffer()` has been called before calling this function. + * + * @param index - Comment index in the comments buffer + * @returns `Comment` object if newly deserialized, or `null` if already deserialized + */ +export function deserializeCommentIfNeeded(index: number): Comment | null { + const pos = index << COMMENT_SIZE_SHIFT; - // Set first comment as `Shebang` if file has hashbang. - // Rust side adds hashbang comment to start of comments `Vec` as a `Line` comment. - // `uint32[commentsPos >> 2]` is the start of the first comment. - if (uint32[commentsPos >> 2] === 0 && sourceText.startsWith("#!")) { - cachedComments[0].type = "Shebang"; - } + // Fast path: If already deserialized, exit + const flagPos = pos + DESERIALIZED_FLAG_OFFSET; + if (commentsUint8![flagPos] !== FLAG_NOT_DESERIALIZED) return null; - // Use `slice` rather than copying comments one-by-one into a new array. - // V8 implements `slice` with a single `memcpy` of the backing store, which is faster - // than N individual `push` calls with bounds checking and potential resizing. - // - // If the comments array from previous file is longer than the current one, - // reuse it and truncate it to avoid the memcpy entirely. - if (previousComments.length >= commentsLen) { - previousComments.length = commentsLen; - comments = previousComments; - } else { - comments = previousComments = cachedComments.slice(0, commentsLen); - } + // Mark comment as deserialized, so it won't be deserialized again + commentsUint8![flagPos] = FLAG_DESERIALIZED; + + // Deserialize comment into a cached `Comment` object + const comment = cachedComments[index]; + + const isBlock = commentsUint8![pos + COMMENT_KIND_OFFSET] !== COMMENT_LINE_KIND; - // Check `comments` have valid ranges and are in ascending order - debugCheckValidRanges(comments); + const pos32 = pos >> 2, + start = commentsUint32![pos32], + end = commentsUint32![pos32 + 1]; + + comment.type = isBlock ? "Block" : "Line"; + // Line comments: `// text` -> slice `start + 2..end` + // Block comments: `/* text */` -> slice `start + 2..end - 2` + comment.value = sourceText!.slice(start + 2, end - (+isBlock << 1)); + comment.range[0] = comment.start = start; + comment.range[1] = comment.end = end; + + return comment; } /** - * Check comments have valid ranges and are in ascending order. + * Check comments buffer has valid ranges and ascending order. * * Only runs in debug build (tests). In release build, this function is entirely removed by minifier. */ -function debugCheckValidRanges(commentsArr: CommentType[]): void { +function debugCheckValidRanges(): void { if (!DEBUG) return; let lastEnd = 0; - for (const comment of commentsArr) { - const { start, end } = comment; + for (let i = 0; i < commentsLen; i++) { + const pos32 = i << 2; + const start = commentsUint32![pos32]; + const end = commentsUint32![pos32 + 1]; if (end <= start) throw new Error(`Invalid comment range: ${start}-${end}`); if (start < lastEnd) { throw new Error(`Overlapping comments: last end: ${lastEnd}, next start: ${start}`); @@ -184,6 +289,34 @@ function debugCheckValidRanges(commentsArr: CommentType[]): void { } } +/** + * Check all deserialized comments are in ascending order. + * + * Only runs in debug build (tests). In release build, this function is entirely removed by minifier. + */ +function debugCheckDeserializedComments(): void { + if (!DEBUG) return; + + let lastEnd = 0; + for (let i = 0; i < commentsLen; i++) { + const flagPos = (i << COMMENT_SIZE_SHIFT) + DESERIALIZED_FLAG_OFFSET; + if (commentsUint8![flagPos] !== FLAG_DESERIALIZED) { + throw new Error( + `Comment ${i} not marked as deserialized after \`deserializeComments()\` call`, + ); + } + + const { start, end } = cachedComments[i]; + if (end <= start) throw new Error(`Invalid deserialized comment range: ${start}-${end}`); + if (start < lastEnd) { + throw new Error( + `Deserialized comments not in order: last end: ${lastEnd}, next start: ${start}`, + ); + } + lastEnd = end; + } +} + /** * Reset comments after file has been linted. * @@ -197,4 +330,8 @@ export function resetComments(): void { commentsWithLoc.length = 0; comments = null; + commentsUint8 = null; + commentsUint32 = null; + commentsLen = 0; + allCommentsDeserialized = false; } diff --git a/apps/oxlint/src-js/plugins/comments_methods.ts b/apps/oxlint/src-js/plugins/comments_methods.ts index ffbff174fc314..92287a3eb6d1b 100644 --- a/apps/oxlint/src-js/plugins/comments_methods.ts +++ b/apps/oxlint/src-js/plugins/comments_methods.ts @@ -2,7 +2,15 @@ * `SourceCode` methods related to comments. */ -import { comments, initComments } from "./comments.ts"; +import { + cachedComments, + comments, + commentsUint32, + commentsLen, + initComments, + initCommentsBuffer, + deserializeCommentIfNeeded, +} from "./comments.ts"; import { sourceText } from "./source_code.ts"; import { firstTokenAtOrAfter } from "./tokens_methods.ts"; import { debugAssertIsNonNull } from "../utils/asserts.ts"; @@ -41,31 +49,40 @@ export function getAllComments(): Comment[] { * @returns Array of `Comment`s in occurrence order. */ export function getCommentsBefore(nodeOrToken: NodeOrToken): Comment[] { - if (comments === null) initComments(); - debugAssertIsNonNull(comments); + if (commentsUint32 === null) initCommentsBuffer(); + debugAssertIsNonNull(commentsUint32); debugAssertIsNonNull(sourceText); + // Early exit for files with no comments + if (commentsLen === 0) return []; + let targetStart = nodeOrToken.range[0]; // start // Binary search for first comment at or past `nodeOrToken`'s start. // Comments before this index are candidates to be included in returned array. - const sliceEnd = firstTokenAtOrAfter(comments, targetStart, 0); + const sliceEnd = firstTokenAtOrAfter(commentsUint32, targetStart, 0, commentsLen); - let sliceStart = comments.length; + let sliceStart = commentsLen; for (let i = sliceEnd - 1; i >= 0; i--) { - const comment = comments[i]; - const gap = sourceText.slice(comment.end, targetStart); + // Read `end` from buffer: u32 at offset 1 of each 4 x u32 entry + const commentEnd = commentsUint32[(i << 2) + 1]; + const gap = sourceText.slice(commentEnd, targetStart); // Ensure that there is nothing except whitespace between the end of the // current comment and the start of the next one as we iterate backwards if (WHITESPACE_ONLY_REGEXP.test(gap)) { sliceStart = i; - targetStart = comment.start; + // Read `start` from buffer + targetStart = commentsUint32[i << 2]; } else { break; } } - return comments.slice(sliceStart, sliceEnd); + // Deserialize only the comments we're returning + for (let i = sliceStart; i < sliceEnd; i++) { + deserializeCommentIfNeeded(i); + } + return cachedComments.slice(sliceStart, sliceEnd); } /** @@ -87,32 +104,39 @@ export function getCommentsBefore(nodeOrToken: NodeOrToken): Comment[] { * @returns Array of `Comment`s in occurrence order. */ export function getCommentsAfter(nodeOrToken: NodeOrToken): Comment[] { - if (comments === null) initComments(); - debugAssertIsNonNull(comments); + if (commentsUint32 === null) initCommentsBuffer(); + debugAssertIsNonNull(commentsUint32); debugAssertIsNonNull(sourceText); + // Early exit for files with no comments + if (commentsLen === 0) return []; + let targetEnd = nodeOrToken.range[1]; // end // Binary search for first comment at or past `nodeOrToken`'s end. // Comments from this index onwards are candidates to be included in returned array. - const sliceStart = firstTokenAtOrAfter(comments, targetEnd, 0); + const sliceStart = firstTokenAtOrAfter(commentsUint32, targetEnd, 0, commentsLen); - const commentsLength = comments.length; let sliceEnd = 0; - for (let i = sliceStart; i < commentsLength; i++) { + for (let i = sliceStart; i < commentsLen; i++) { // Ensure that there is nothing except whitespace between the // end of the previous comment and the start of the current one - const comment = comments[i]; - const gap = sourceText.slice(targetEnd, comment.start); + const commentStart = commentsUint32[i << 2]; + const gap = sourceText.slice(targetEnd, commentStart); if (WHITESPACE_ONLY_REGEXP.test(gap)) { sliceEnd = i + 1; - targetEnd = comment.end; + // Read `end` from buffer + targetEnd = commentsUint32[(i << 2) + 1]; } else { break; } } - return comments.slice(sliceStart, sliceEnd); + // Deserialize only the comments we're returning + for (let i = sliceStart; i < sliceEnd; i++) { + deserializeCommentIfNeeded(i); + } + return cachedComments.slice(sliceStart, sliceEnd); } /** @@ -121,20 +145,27 @@ export function getCommentsAfter(nodeOrToken: NodeOrToken): Comment[] { * @returns Array of `Comment`s in occurrence order. */ export function getCommentsInside(node: Node): Comment[] { - if (comments === null) initComments(); - debugAssertIsNonNull(comments); + if (commentsUint32 === null) initCommentsBuffer(); + debugAssertIsNonNull(commentsUint32); + + // Early exit for files with no comments + if (commentsLen === 0) return []; const { range } = node, rangeStart = range[0], rangeEnd = range[1]; // Binary search for first comment within `node`'s range - const sliceStart = firstTokenAtOrAfter(comments, rangeStart, 0); + const sliceStart = firstTokenAtOrAfter(commentsUint32, rangeStart, 0, commentsLen); // Binary search for first comment outside `node`'s range. // Its index is used as `sliceEnd`, which is exclusive of the slice. - const sliceEnd = firstTokenAtOrAfter(comments, rangeEnd, sliceStart); + const sliceEnd = firstTokenAtOrAfter(commentsUint32, rangeEnd, sliceStart, commentsLen); - return comments.slice(sliceStart, sliceEnd); + // Deserialize only the comments we're returning + for (let i = sliceStart; i < sliceEnd; i++) { + deserializeCommentIfNeeded(i); + } + return cachedComments.slice(sliceStart, sliceEnd); } /** @@ -147,16 +178,26 @@ export function commentsExistBetween( nodeOrToken1: NodeOrToken, nodeOrToken2: NodeOrToken, ): boolean { - if (comments === null) initComments(); - debugAssertIsNonNull(comments); + if (commentsUint32 === null) initCommentsBuffer(); + debugAssertIsNonNull(commentsUint32); + + // Early exit for files with no comments + if (commentsLen === 0) return false; // Find the first comment after `nodeOrToken1` ends. const betweenRangeStart = nodeOrToken1.range[1]; - const firstCommentBetween = firstTokenAtOrAfter(comments, betweenRangeStart, 0); - // Check if it ends before `nodeOrToken2` starts. + const firstCommentBetween = firstTokenAtOrAfter( + commentsUint32, + betweenRangeStart, + 0, + commentsLen, + ); + + // Check if its end is before `nodeOrToken2` starts. + // Read `end` from buffer: u32 at offset 1 of the entry. return ( - firstCommentBetween < comments.length && - comments[firstCommentBetween].end <= nodeOrToken2.range[0] + firstCommentBetween < commentsLen && + commentsUint32[(firstCommentBetween << 2) + 1] <= nodeOrToken2.range[0] ); } diff --git a/apps/oxlint/src-js/plugins/source_code.ts b/apps/oxlint/src-js/plugins/source_code.ts index 9dc10fb8f990d..93f33d1d50d67 100644 --- a/apps/oxlint/src-js/plugins/source_code.ts +++ b/apps/oxlint/src-js/plugins/source_code.ts @@ -19,8 +19,8 @@ import { getNodeLoc, initLines, lines, lineStartIndices, resetLines } from "./lo import { resetScopeManager, SCOPE_MANAGER } from "./scope.ts"; import * as scopeMethods from "./scope.ts"; import { resetTokens } from "./tokens.ts"; -import { tokens, tokensAndComments, initTokens, initTokensAndComments } from "./tokens.ts"; import * as tokenMethods from "./tokens_methods.ts"; +import { getTokensAndComments, resetTokensAndComments } from "./tokens_and_comments.ts"; import { debugAssertIsNonNull } from "../utils/asserts.ts"; import type { Program } from "../generated/types.d.ts"; @@ -140,6 +140,7 @@ export function resetSourceAndAst(): void { resetScopeManager(); resetTokens(); resetComments(); + resetTokensAndComments(); } /** @@ -247,12 +248,7 @@ export const SOURCE_CODE = Object.freeze({ */ // This property is present in ESLint's `SourceCode`, but is undocumented get tokensAndComments(): (Token | Comment)[] { - if (tokensAndComments === null) { - if (tokens === null) initTokens(); - initTokensAndComments(); - } - debugAssertIsNonNull(tokensAndComments); - return tokensAndComments; + return getTokensAndComments(); }, /** diff --git a/apps/oxlint/src-js/plugins/tokens.ts b/apps/oxlint/src-js/plugins/tokens.ts index 0f4bedb9f5e90..a3b1a7e0a7b48 100644 --- a/apps/oxlint/src-js/plugins/tokens.ts +++ b/apps/oxlint/src-js/plugins/tokens.ts @@ -3,12 +3,15 @@ */ import { buffer, initSourceText, sourceText } from "./source_code.ts"; -import { comments, initComments } from "./comments.ts"; import { computeLoc } from "./location.ts"; -import { TOKENS_OFFSET_POS_32, TOKENS_LEN_POS_32 } from "../generated/constants.ts"; +import { + COMMENT_SIZE, + DESERIALIZED_FLAG_OFFSET, + TOKENS_OFFSET_POS_32, + TOKENS_LEN_POS_32, +} from "../generated/constants.ts"; import { debugAssert, debugAssertIsNonNull } from "../utils/asserts.ts"; -import type { Comment } from "./comments.ts"; import type { Location, Span } from "./location.ts"; /** @@ -89,16 +92,24 @@ export interface TemplateToken extends BaseToken { type: "Template"; } -export type TokenOrComment = TokenType | Comment; - // Tokens for the current file. // Created lazily only when needed. export let tokens: TokenType[] | null = null; -export let tokensAndComments: TokenOrComment[] | null = null; + +// Typed array views over the tokens region of the buffer. +// These persist for the lifetime of the file (cleared in `resetTokens`). +let tokensUint8: Uint8Array | null = null; +export let tokensUint32: Uint32Array | null = null; + +// Number of tokens for the current file. +export let tokensLen = 0; + +// Whether all tokens have been deserialized into `cachedTokens`. +export let allTokensDeserialized = false; // Cached token objects, reused across files to reduce GC pressure. // Tokens are mutated in place during deserialization, then `tokens` is set to a slice of this array. -const cachedTokens: Token[] = []; +export const cachedTokens: Token[] = []; // Tokens array from previous file. // Reused for next file if next file has less tokens than the previous file (by truncating it to correct length). @@ -155,10 +166,6 @@ class Token { // Make `loc` property enumerable so that `for (const key in token) ...` includes `loc` in the keys it iterates over Object.defineProperty(Token.prototype, "loc", { enumerable: true }); -// Typed array views over the tokens region of the buffer -let tokensUint8: Uint8Array | null = null; -let tokensUint32: Uint32Array | null = null; - // `ESTreeKind` discriminants (set by Rust side) const PRIVATE_IDENTIFIER_KIND = 2; const REGEXP_KIND = 8; @@ -180,28 +187,89 @@ const TOKEN_TYPES: TokenType["type"][] = [ ]; // Details of Rust `Token` type -const TOKEN_SIZE_SHIFT = 4; // 1 << 4 == 16 bytes, the size of `Token` in Rust +export const TOKEN_SIZE = 16; +debugAssert(TOKEN_SIZE === COMMENT_SIZE, "Size of token, comment, and merged entry must be equal"); + +const TOKEN_SIZE_SHIFT = 4; +debugAssert(TOKEN_SIZE === 1 << TOKEN_SIZE_SHIFT); + const KIND_FIELD_OFFSET = 8; const IS_ESCAPED_FIELD_OFFSET = 10; +// Values for the "deserialized" flag byte in buffer. +// * `FLAG_DESERIALIZED` indicates the token/comment is already deserialized. +// * `FLAG_NOT_DESERIALIZED` indicates the token/comment is not yet deserialized. +// `Token` / `Comment` object may be uninitialized, or contain stale data. +export const FLAG_NOT_DESERIALIZED = 0; +export const FLAG_DESERIALIZED = 1; + /** - * Initialize tokens for current file. + * Deserialize all tokens and build the `tokens` array. + * Called by `ast.tokens` getter. */ -export function initTokens() { +export function initTokens(): void { debugAssert(tokens === null, "Tokens already initialized"); - // Deserialize tokens from buffer - if (sourceText === null) initSourceText(); - debugAssertIsNonNull(sourceText); + if (!allTokensDeserialized) deserializeTokens(); + + // Create `tokens` array as a slice of `cachedTokens` array. + // + // Use `slice` rather than copying tokens one-by-one into a new array. + // V8 implements `slice` with a single `memcpy` of the backing store, which is faster + // than N individual `push` calls with bounds checking and potential resizing. + // + // If the tokens array from previous file is longer than the current one, + // reuse it and truncate it to avoid the memcpy entirely. + // Assuming random distribution of file sizes, this cheaper branch should be hit on 50% of files. + if (previousTokens.length >= tokensLen) { + previousTokens.length = tokensLen; + tokens = previousTokens as TokenType[]; + } else { + tokens = (previousTokens = cachedTokens.slice(0, tokensLen)) as TokenType[]; + } +} + +/** + * Deserialize all tokens into `cachedTokens`. + * Does NOT build the `tokens` array - use `initTokens` for that. + */ +export function deserializeTokens(): void { + debugAssert(!allTokensDeserialized, "Tokens already deserialized"); + + if (tokensUint32 === null) initTokensBuffer(); + + for (let i = 0; i < tokensLen; i++) { + deserializeTokenIfNeeded(i); + } + + allTokensDeserialized = true; + + debugCheckDeserializedTokens(); +} + +/** + * Initialize typed array views over the tokens region of the buffer. + * + * Populates `tokensUint8`, `tokensUint32`, and `tokensLen`, and grows `cachedTokens` if needed. + * Does NOT deserialize tokens - they are deserialized lazily via `deserializeTokenIfNeeded`. + */ +export function initTokensBuffer(): void { + debugAssert(tokensUint8 === null && tokensUint32 === null, "Tokens buffer already initialized"); debugAssertIsNonNull(buffer); + // Various tokens methods rely on `sourceText` being initialized after `initTokensBuffer`, + // so we always initialize it here, even if there are no tokens (empty file) + if (sourceText === null) initSourceText(); + debugAssertIsNonNull(sourceText); + const { uint32 } = buffer; const tokensPos = uint32[TOKENS_OFFSET_POS_32]; - const tokensLen = uint32[TOKENS_LEN_POS_32]; + tokensLen = uint32[TOKENS_LEN_POS_32]; // Create typed array views over just the tokens region of the buffer. // These are zero-copy views over the same underlying `ArrayBuffer`. + // Views persist for the lifetime of the file (cleared in `resetTokens`). const arrayBuffer = buffer.buffer, absolutePos = buffer.byteOffset + tokensPos; tokensUint8 = new Uint8Array(arrayBuffer, absolutePos, tokensLen << TOKEN_SIZE_SHIFT); @@ -212,45 +280,50 @@ export function initTokens() { cachedTokens.push(new Token()); } - // Deserialize into cached token objects - for (let i = 0; i < tokensLen; i++) { - deserializeTokenInto(cachedTokens[i], i); - } - - tokensUint8 = null; - tokensUint32 = null; - - // Use `slice` rather than copying tokens one-by-one into a new array. - // V8 implements `slice` with a single `memcpy` of the backing store, which is faster - // than N individual `push` calls with bounds checking and potential resizing. - // - // If the tokens array from previous file is longer than the current one, - // reuse it and truncate it to avoid the memcpy entirely. - // Assuming random distribution of file sizes, this cheaper branch should be hit on 50% of files. - if (previousTokens.length >= tokensLen) { - previousTokens.length = tokensLen; - tokens = previousTokens as TokenType[]; - } else { - tokens = (previousTokens = cachedTokens.slice(0, tokensLen)) as TokenType[]; - } + // Check buffer data has valid ranges and ascending order + debugCheckValidRanges(); +} - // Check `tokens` have valid ranges and are in ascending order - debugCheckValidRanges(tokens, "token"); +/** + * Get token at `index`, deserializing if needed. + * + * Caller must ensure `initTokensBuffer()` has been called before calling this function. + * + * @param index - Token index in the tokens buffer + * @returns Deserialized token + */ +export function getToken(index: number): TokenType { + const token = deserializeTokenIfNeeded(index); + return (token === null ? cachedTokens[index] : token) as TokenType; } /** - * Deserialize token `i` from buffer into an existing token object. - * @param token - Token object to mutate - * @param index - Token index + * Deserialize token at `index` if not already deserialized. + * + * Caller must ensure `initTokensBuffer()` has been called before calling this function. + * + * @param index - Token index in the tokens buffer + * @returns `Token` object if newly deserialized, or `null` if already deserialized */ -function deserializeTokenInto(token: Token, index: number): void { - const pos32 = index << 2; - const start = tokensUint32![pos32], - end = tokensUint32![pos32 + 1]; +export function deserializeTokenIfNeeded(index: number): Token | null { + const pos = index << TOKEN_SIZE_SHIFT; + + // Fast path: If already deserialized, exit + const flagPos = pos + DESERIALIZED_FLAG_OFFSET; + if (tokensUint8![flagPos] !== FLAG_NOT_DESERIALIZED) return null; + + // Mark token as deserialized, so it won't be deserialized again + tokensUint8![flagPos] = FLAG_DESERIALIZED; + + // Deserialize token into a cached `Token` object + const token = cachedTokens[index]; - const pos = pos32 << (TOKEN_SIZE_SHIFT - 2); const kind = tokensUint8![pos + KIND_FIELD_OFFSET]; + const pos32 = pos >> 2, + start = tokensUint32![pos32], + end = tokensUint32![pos32 + 1]; + // Get `value` as slice of source text `start..end`. // Slice `start + 1..end` for private identifiers, to strip leading `#`. let value = sourceText!.slice(start + +(kind === PRIVATE_IDENTIFIER_KIND), end); @@ -283,6 +356,8 @@ function deserializeTokenInto(token: Token, index: number): void { token.value = value; token.range[0] = token.start = start; token.range[1] = token.end = end; + + return token; } /** @@ -301,146 +376,50 @@ function unescapeIdentifier(name: string): string { } /** - * Check `tokens` have valid ranges and are in ascending order. + * Check tokens buffer has valid ranges and ascending order. * * Only runs in debug build (tests). In release build, this function is entirely removed by minifier. */ -function debugCheckValidRanges(tokens: TokenOrComment[], description: string): void { +function debugCheckValidRanges(): void { if (!DEBUG) return; let lastEnd = 0; - for (const token of tokens) { - const { start, end } = token; - if (end <= start) throw new Error(`Invalid ${description} range: ${start}-${end}`); + for (let i = 0; i < tokensLen; i++) { + const pos32 = i << 2; + const start = tokensUint32![pos32]; + const end = tokensUint32![pos32 + 1]; + if (end <= start) throw new Error(`Invalid token range: ${start}-${end}`); if (start < lastEnd) { - throw new Error(`Overlapping ${description}s: last end: ${lastEnd}, next start: ${start}`); + throw new Error(`Overlapping tokens: last end: ${lastEnd}, next start: ${start}`); } lastEnd = end; } } /** - * Initialize `tokensAndComments`. - * - * Caller must ensure `tokens` is initialized before calling this function, - * by calling `initTokens()` if `tokens === null`. - */ -export function initTokensAndComments() { - debugAssertIsNonNull(tokens); - - // Ensure comments are initialized - if (comments === null) initComments(); - debugAssertIsNonNull(comments); - - // Fast paths for file with no comments, or file which is only comments - const commentsLength = comments.length; - if (commentsLength === 0) { - tokensAndComments = tokens; - return; - } - - const tokensLength = tokens.length; - if (tokensLength === 0) { - tokensAndComments = comments; - return; - } - - // File contains both tokens and comments. - // Fill `tokensAndComments` with the 2 arrays interleaved in source order. - tokensAndComments = []; - - let tokenIndex = 0, - commentIndex = 0, - token = tokens[0], - comment = comments[0], - tokenStart = token.start, - commentStart = comment.start; - - // Push any leading comments - while (commentStart < tokenStart) { - // Push current comment - tokensAndComments.push(comment); - - // If that was last comment, push all remaining tokens, and exit - if (++commentIndex === commentsLength) { - tokensAndComments.push(...tokens.slice(tokenIndex)); - debugCheckTokensAndComments(); - return; - } - - // Get next comment - comment = comments[commentIndex]; - commentStart = comment.start; - } - - // Push a run of tokens, then a run of comments, and so on, until all tokens and comments are exhausted - while (true) { - // There's at least 1 token and 1 comment remaining, and token is first. - // Push tokens until we reach the next comment or the end. - do { - // Push current token - tokensAndComments.push(token); - - // If that was last token, push all remaining comments, and exit - if (++tokenIndex === tokensLength) { - tokensAndComments.push(...comments.slice(commentIndex)); - debugCheckTokensAndComments(); - return; - } - - // Get next token - token = tokens[tokenIndex]; - tokenStart = token.start; - } while (tokenStart < commentStart); - - // There's at least 1 token and 1 comment remaining, and comment is first. - // Push comments until we reach the next token or the end. - do { - // Push current comment - tokensAndComments.push(comment); - - // If that was last comment, push all remaining tokens, and exit - if (++commentIndex === commentsLength) { - tokensAndComments.push(...tokens.slice(tokenIndex)); - debugCheckTokensAndComments(); - return; - } - - // Get next comment - comment = comments[commentIndex]; - commentStart = comment.start; - } while (commentStart < tokenStart); - } - - debugAssert(false, "End of `initTokensAndComments` should be unreachable"); -} - -/** - * Check `tokensAndComments` contains all tokens and comments, in ascending order. + * Check all deserialized tokens are in ascending order. * * Only runs in debug build (tests). In release build, this function is entirely removed by minifier. */ -function debugCheckTokensAndComments() { +function debugCheckDeserializedTokens(): void { if (!DEBUG) return; - debugAssertIsNonNull(tokens); - debugAssertIsNonNull(comments); - debugAssertIsNonNull(tokensAndComments); - - const expected = [...tokens, ...comments]; - expected.sort((a, b) => a.start - b.start); - - if (tokensAndComments.length !== expected.length) { - throw new Error("`tokensAndComments` has wrong length"); - } + let lastEnd = 0; + for (let i = 0; i < tokensLen; i++) { + const flagPos = (i << TOKEN_SIZE_SHIFT) + DESERIALIZED_FLAG_OFFSET; + if (tokensUint8![flagPos] !== FLAG_DESERIALIZED) { + throw new Error(`Token ${i} not marked as deserialized after \`deserializeTokens()\` call`); + } - for (let i = 0; i < tokensAndComments.length; i++) { - if (tokensAndComments[i] !== expected[i]) { - throw new Error("`tokensAndComments` is not correctly ordered"); + const { start, end } = cachedTokens[i]; + if (end <= start) throw new Error(`Invalid deserialized token range: ${start}-${end}`); + if (start < lastEnd) { + throw new Error( + `Deserialized tokens not in order: last end: ${lastEnd}, next start: ${start}`, + ); } + lastEnd = end; } - - debugCheckValidRanges(tokensAndComments, "token/comment"); } /** @@ -461,5 +440,8 @@ export function resetTokens() { tokensWithRegex.length = 0; tokens = null; - tokensAndComments = null; + tokensUint8 = null; + tokensUint32 = null; + tokensLen = 0; + allTokensDeserialized = false; } diff --git a/apps/oxlint/src-js/plugins/tokens_and_comments.ts b/apps/oxlint/src-js/plugins/tokens_and_comments.ts new file mode 100644 index 0000000000000..39c9fbfeff80d --- /dev/null +++ b/apps/oxlint/src-js/plugins/tokens_and_comments.ts @@ -0,0 +1,376 @@ +/** + * Initialization and deserialization of merged tokens-and-comments array and buffer. + */ + +import { + allCommentsDeserialized, + cachedComments, + comments, + commentsUint32, + commentsLen, + deserializeComments, + getComment, + initComments, + initCommentsBuffer, +} from "./comments.ts"; +import { + allTokensDeserialized, + cachedTokens, + deserializeTokens, + initTokens, + getToken, + initTokensBuffer, + tokens, + tokensLen, + tokensUint32, +} from "./tokens.ts"; +import { COMMENT_SIZE } from "../generated/constants.ts"; +import { debugAssert, debugAssertIsNonNull } from "../utils/asserts.ts"; + +import type { Comment } from "./comments.ts"; +import type { Token } from "./tokens.ts"; + +export type TokenOrComment = Token | Comment; + +// `tokensAndCommentsUint32` is a buffer containing 16-byte entries, +// representing merged set of all tokens and comments, interleaved in source order. +// +// If they were Rust structs, they would be defined like this: +// +// ```rs +// #[repr(C)] +// struct MergedEntry { +// /// Start offset of the token/comment in source text +// start: u32, +// /// Index of the token/comment within the set of tokens or comments +// index: u32, +// /// Is this a token or a comment? +// type: MergedType, +// /// (4 bytes padding) +// } +// +// #[repr(u32)] +// enum MergedType { +// Token = 0, +// Comment = 1, +// } +// ``` +// +// These constants define the shape of the data stored in `tokensAndCommentsUint32` as per the above. +const MERGED_SIZE = 16; +const MERGED_SIZE32_SHIFT = 2; // 4 x u32s per entry (16 bytes) +const MERGED_SIZE32 = 1 << MERGED_SIZE32_SHIFT; // 4 x u32s per entry +debugAssert(MERGED_SIZE === MERGED_SIZE32 * 4); +debugAssert(MERGED_SIZE === COMMENT_SIZE, "Size of token, comment, and merged entry must be equal"); + +const MERGED_ORIGINAL_INDEX_OFFSET32 = 1; // u32 index of the `original_index` field within an entry +const MERGED_TYPE_OFFSET32 = 2; // u32 index of the `is_comment` field within an entry + +// Type of merged entry. +// "Poor man's enum" which optimizes better than a TS enum. +type MergedType = typeof MERGED_TYPE_TOKEN | typeof MERGED_TYPE_COMMENT; +const MERGED_TYPE_TOKEN = 0; +const MERGED_TYPE_COMMENT = 1; + +// Cached `tokensAndComments` array, returned by `getTokensAndComments`. +// Set to `null` on reset, rebuilt on next access. +let tokensAndComments: TokenOrComment[] | null = null; + +// Reusable array for the merged case (when file has both tokens and comments). +// Grows and shrinks as needed. Persists across files to avoid repeated allocation. +let previousTokensAndComments: TokenOrComment[] = []; + +// Merged tokens-and-comments buffer (created lazily by `initTokensAndCommentsBuffer`). +// Each entry is 4 x u32s: `{ start, index, is_comment, padding }`. +export let tokensAndCommentsUint32: Uint32Array | null = null; + +// Number of entries in the tokens-and-comments buffer. +export let tokensAndCommentsLen = 0; + +// Backing buffer reused across files. +// Grows when needed (doubled), never shrinks. +// `tokensAndCommentsUint32` is a view over this buffer's prefix. +let tokensAndCommentsBackingUint32 = new Uint32Array(0); + +// Minimum capacity (in `u32`s) of `tokensAndCommentsBackingUint32`, when not empty. +// 256 elements = 1 KiB. +const MERGED_BACKING_MIN_CAPACITY = 256; + +/** + * Initialize tokens-and-comments buffer. + * + * Creates a buffer containing tokens and comments interleaved in ascending order of `start`. + * + * Each token/comment in the input buffers is 16 bytes, with `start` as the first `u32`. + * + * `tokensAndCommentsUint32` contains 16-byte entries with the layout: + * `{ start: u32, index: u32, type: u32, 4 bytes padding }`. + * + * `index` is the index of the token/comment within its original buffer (in 16-byte units). + */ +export function initTokensAndCommentsBuffer(): void { + debugAssert(tokensAndCommentsUint32 === null, "`tokensAndComments` already initialized"); + + // Ensure tokens and comments are initialized + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + + if (commentsUint32 === null) initCommentsBuffer(); + debugAssertIsNonNull(commentsUint32); + + tokensAndCommentsLen = tokensLen + commentsLen; + + // Reuse backing buffer across files. Grow if needed, never shrink. + // After warm-up over first few files, the buffer will be large enough to hold all tokens and comments + // for all files, so we avoid allocating a large buffer each time. + // `Uint32Array`s can't grow in place, so allocate a new one. + // First allocation uses minimum capacity. Subsequent growths double, to avoid frequent reallocations. + const requiredLen32 = tokensAndCommentsLen << MERGED_SIZE32_SHIFT; + const backingLen = tokensAndCommentsBackingUint32.length; + if (backingLen < requiredLen32) { + tokensAndCommentsBackingUint32 = new Uint32Array( + Math.max(requiredLen32, backingLen === 0 ? MERGED_BACKING_MIN_CAPACITY : backingLen << 1), + ); + } + + tokensAndCommentsUint32 = tokensAndCommentsBackingUint32; + + // Fast paths for files containing no comments, and no tokens (empty file except for comments) + if (commentsLen === 0) { + fillMergedEntries(MERGED_TYPE_TOKEN, tokensUint32, 0, 0, tokensLen); + } else if (tokensLen === 0) { + fillMergedEntries(MERGED_TYPE_COMMENT, commentsUint32, 0, 0, commentsLen); + } else { + mergeTokensAndComments(tokensUint32, commentsUint32); + } + + debugCheckMergedOrder(); +} + +/** + * Merge tokens and comments in ascending order of `start`. + * + * Uses two separate inner loops (one for token runs, one for comment runs) + * so the branch predictor sees a consistent "continue" pattern within each run, + * only mispredicting once at each run transition. + */ +function mergeTokensAndComments(tokensUint32: Uint32Array, commentsUint32: Uint32Array): void { + let tokenIndex = 0, + commentIndex = 0, + mergedPos32 = 0; + let tokenStart = tokensUint32[0], + commentStart = commentsUint32[0]; + + // Push any leading comments + while (commentStart < tokenStart) { + writeMergedEntry(MERGED_TYPE_COMMENT, mergedPos32, commentIndex, commentStart); + mergedPos32 += MERGED_SIZE32; + if (++commentIndex === commentsLen) { + fillMergedEntries(MERGED_TYPE_TOKEN, tokensUint32, mergedPos32, tokenIndex, tokensLen); + return; + } + commentStart = commentsUint32[commentIndex << MERGED_SIZE32_SHIFT]; + } + + // Alternate between runs of tokens and runs of comments + while (true) { + // Process run of tokens + do { + writeMergedEntry(MERGED_TYPE_TOKEN, mergedPos32, tokenIndex, tokenStart); + mergedPos32 += MERGED_SIZE32; + if (++tokenIndex === tokensLen) { + fillMergedEntries( + MERGED_TYPE_COMMENT, + commentsUint32, + mergedPos32, + commentIndex, + commentsLen, + ); + return; + } + tokenStart = tokensUint32[tokenIndex << MERGED_SIZE32_SHIFT]; + } while (tokenStart < commentStart); + + // Process run of comments + do { + writeMergedEntry(MERGED_TYPE_COMMENT, mergedPos32, commentIndex, commentStart); + mergedPos32 += MERGED_SIZE32; + if (++commentIndex === commentsLen) { + fillMergedEntries(MERGED_TYPE_TOKEN, tokensUint32, mergedPos32, tokenIndex, tokensLen); + return; + } + commentStart = commentsUint32[commentIndex << MERGED_SIZE32_SHIFT]; + } while (commentStart < tokenStart); + } +} + +/** + * Check that merged entries are in ascending order of `start`. + * + * Only runs in debug build (tests). In release build, this function is entirely removed by minifier. + */ +function debugCheckMergedOrder(): void { + if (!DEBUG) return; + + let lastStart = -1; + for (let i = 0; i < tokensAndCommentsLen; i++) { + const start = tokensAndCommentsUint32![i << MERGED_SIZE32_SHIFT]; + if (start <= lastStart) { + throw new Error( + `Merged tokens/comments not in order: entry ${i} start ${start} <= previous start ${lastStart}`, + ); + } + lastStart = start; + } +} + +/** + * Write a single entry to the merged buffer. + */ +function writeMergedEntry( + type: MergedType, + mergedPos32: number, + originalIndex: number, + start: number, +): void { + tokensAndCommentsUint32![mergedPos32] = start; + tokensAndCommentsUint32![mergedPos32 + MERGED_ORIGINAL_INDEX_OFFSET32] = originalIndex; + tokensAndCommentsUint32![mergedPos32 + MERGED_TYPE_OFFSET32] = type; + // out[outPos + 3] is padding, no need to write it +} + +/** + * Fill output entries from a single source buffer (tokens or comments) sequentially. + * Used for fast paths and for appending remaining items after the merge loop. + */ +function fillMergedEntries( + type: MergedType, + srcUint32: Uint32Array, + mergedPos32: number, + srcIndex: number, + srcLen: number, +): void { + let srcPos32 = srcIndex << MERGED_SIZE32_SHIFT; + + for (; srcIndex < srcLen; srcIndex++) { + tokensAndCommentsUint32![mergedPos32] = srcUint32[srcPos32]; + tokensAndCommentsUint32![mergedPos32 + MERGED_ORIGINAL_INDEX_OFFSET32] = srcIndex; + tokensAndCommentsUint32![mergedPos32 + MERGED_TYPE_OFFSET32] = type; + mergedPos32 += MERGED_SIZE32; + srcPos32 += MERGED_SIZE32; + } +} + +/** + * Get token or comment from the merged buffer at `index`. + * Deserializes the underlying token/comment if needed. + * + * @param index - Index in the merged buffer + * @returns Deserialized token or comment + */ +export function getTokenOrComment(index: number): TokenOrComment { + const pos32 = index << MERGED_SIZE32_SHIFT; + const originalIndex = tokensAndCommentsUint32![pos32 + MERGED_ORIGINAL_INDEX_OFFSET32]; + + return tokensAndCommentsUint32![pos32 + MERGED_TYPE_OFFSET32] === MERGED_TYPE_TOKEN + ? getToken(originalIndex) + : getComment(originalIndex); +} + +/** + * Get the `end` value for an entry in the merged `tokensAndComments` buffer, + * by looking it up in the original tokens or comments buffer. + * + * @param entryIndex - Index in the merged buffer + * @returns The `end` offset of the token/comment in source text + */ +export function getTokenOrCommentEnd(entryIndex: number): number { + const pos32 = entryIndex << MERGED_SIZE32_SHIFT; + const originalIndex = tokensAndCommentsUint32![pos32 + MERGED_ORIGINAL_INDEX_OFFSET32]; + const originalEndPos32 = (originalIndex << MERGED_SIZE32_SHIFT) + 1; + + return tokensAndCommentsUint32![pos32 + MERGED_TYPE_OFFSET32] === MERGED_TYPE_TOKEN + ? tokensUint32![originalEndPos32] + : commentsUint32![originalEndPos32]; +} + +/** + * Get all tokens and comments in source order. + * + * Builds and caches the merged array on first call. + * Subsequent calls return the same cached array. + * + * @returns Array of all tokens and comments, sorted by source position + */ +export function getTokensAndComments(): TokenOrComment[] { + // If `getTokensAndComments` has already been called, return same array again + if (tokensAndComments !== null) return tokensAndComments; + + // Init tokens and comments (to get lengths), but don't build `tokens`/`comments` arrays yet + if (tokensUint32 === null) initTokensBuffer(); + if (commentsUint32 === null) initCommentsBuffer(); + + // Fast path: No comments - build `tokens` array and return it directly + if (commentsLen === 0) { + if (tokens === null) initTokens(); + debugAssertIsNonNull(tokens); + return (tokensAndComments = tokens); + } + + // Fast path: No tokens - build `comments` array and return it directly + if (tokensLen === 0) { + if (comments === null) initComments(); + debugAssertIsNonNull(comments); + return (tokensAndComments = comments); + } + + // General case: Deserialize all entries into `cachedTokens` / `cachedComments`, + // but skip building the `tokens` and `comments` arrays (they aren't needed here) + if (!allTokensDeserialized) deserializeTokens(); + if (!allCommentsDeserialized) deserializeComments(); + + // Ensure merged buffer is built + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + + // Since `deserializeTokens` and `deserializeComments` are called first, all entries are already deserialized. + // We just need to look up entries from `cachedTokens` and `cachedComments` by index. + // + // Reuse the array from the previous file to avoid allocation: + // - If big enough: Truncate to target length (V8 shrinks FixedArray in place, no realloc), + // then overwrite existing slots. + // - If too small: `new Array(n).fill(0)` to pre-allocate exact capacity as a PACKED array, + // then overwrite. This avoids copying stale elements (which growing the old array would do). + if (previousTokensAndComments.length >= tokensAndCommentsLen) { + tokensAndComments = previousTokensAndComments; + tokensAndComments.length = tokensAndCommentsLen; + } else { + // `new Array(n)` creates a HOLEY array. `.fill(0)` fills all slots, making it PACKED. + // `0` is used because SMI 0 is all-zero bits, so V8 can use `memset(0)` which is ~10% faster + // than filling with a non-zero value like `null` (CPUs have dedicated fast-zeroing support). + // Write `null` into first entry to transition array to PACKED_ELEMENTS. + // oxlint-disable-next-line unicorn/no-new-array -- `Array.from` is 12x slower (benchmarked) + tokensAndComments = previousTokensAndComments = new Array(tokensAndCommentsLen).fill(0); + tokensAndComments[0] = null!; + } + + for (let i = 0; i < tokensAndCommentsLen; i++) { + const pos32 = i << MERGED_SIZE32_SHIFT; + const originalIndex = tokensAndCommentsUint32[pos32 + MERGED_ORIGINAL_INDEX_OFFSET32]; + tokensAndComments[i] = + tokensAndCommentsUint32[pos32 + MERGED_TYPE_OFFSET32] === MERGED_TYPE_TOKEN + ? (cachedTokens[originalIndex] as Token) + : cachedComments[originalIndex]; + } + + return tokensAndComments; +} + +/** + * Reset merged tokens-and-comments array and buffer after file has been linted. + */ +export function resetTokensAndComments() { + tokensAndComments = null; + tokensAndCommentsUint32 = null; + tokensAndCommentsLen = 0; +} diff --git a/apps/oxlint/src-js/plugins/tokens_methods.ts b/apps/oxlint/src-js/plugins/tokens_methods.ts index 95c742983e862..21bf31bb41482 100644 --- a/apps/oxlint/src-js/plugins/tokens_methods.ts +++ b/apps/oxlint/src-js/plugins/tokens_methods.ts @@ -2,11 +2,26 @@ * `SourceCode` methods related to tokens. */ -import { tokens, tokensAndComments, initTokens, initTokensAndComments } from "./tokens.ts"; +import { + cachedTokens, + tokensUint32, + tokensLen, + initTokensBuffer, + getToken, + deserializeTokenIfNeeded, +} from "./tokens.ts"; +import { + tokensAndCommentsUint32, + tokensAndCommentsLen, + getTokenOrComment, + getTokenOrCommentEnd, + initTokensAndCommentsBuffer, +} from "./tokens_and_comments.ts"; import { debugAssertIsNonNull } from "../utils/asserts.ts"; import type { Node, NodeOrToken } from "./types.ts"; -import type { Token, TokenOrComment } from "./tokens.ts"; +import type { Token } from "./tokens.ts"; +import type { TokenOrComment } from "./tokens_and_comments.ts"; /** * Options for various `SourceCode` methods e.g. `getFirstToken`. @@ -87,9 +102,6 @@ export function getTokens[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Maximum number of tokens to return let count = typeof countOptions === "object" && countOptions !== null ? countOptions.count : null; @@ -111,21 +123,19 @@ export function getTokens 0; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) { allTokens.push(token); count--; @@ -184,9 +192,6 @@ export function getFirstToken | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens at the beginning of the given node to skip let skip = typeof skipOptions === "number" @@ -203,21 +208,19 @@ export function getFirstToken= tokensLength) return null; - - const token = tokenList[skipTo]; - if (token.start >= rangeEnd) return null; - return token as Result; + if (skipTo >= len) return null; + if (entryStart(skipTo, uint32) >= rangeEnd) return null; + return getEntry(skipTo, includeComments) as Result; } if (typeof skip !== "number") { - for (let i = startIndex; i < tokensLength; i++) { - const token = tokenList[i]; - if (token.start >= rangeEnd) return null; // Token is outside the node + for (let i = startIndex; i < len; i++) { + if (entryStart(i, uint32) >= rangeEnd) return null; + const token = getEntry(i, includeComments); if (filter(token)) return token as Result; } } else { - for (let i = startIndex; i < tokensLength; i++) { - const token = tokenList[i]; - if (token.start >= rangeEnd) return null; // Token is outside the node + for (let i = startIndex; i < len; i++) { + if (entryStart(i, uint32) >= rangeEnd) return null; + const token = getEntry(i, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -274,9 +273,6 @@ export function getFirstTokens[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const count = typeof countOptions === "number" ? countOptions @@ -291,19 +287,19 @@ export function getFirstTokens | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens at the end of the given node to skip let skip = typeof skipOptions === "number" @@ -369,21 +368,19 @@ export function getLastToken= 0; i--) { - const token = tokenList[i]; - if (token.start < rangeStart) return null; + if (entryStart(i, uint32) < rangeStart) return null; + const token = getEntry(i, includeComments); if (filter(token)) return token as Result; } } else { for (let i = lastTokenIndex; i >= 0; i--) { - const token = tokenList[i]; - if (token.start < rangeStart) return null; + if (entryStart(i, uint32) < rangeStart) return null; + const token = getEntry(i, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -438,9 +433,6 @@ export function getLastTokens[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Maximum number of tokens to return const count = typeof countOptions === "number" @@ -457,21 +449,19 @@ export function getLastTokens= sliceStart && lastTokens.length < count; i--) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) lastTokens.unshift(token); } } @@ -520,9 +516,6 @@ export function getTokenBefore | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens preceding the given node to skip let skip = typeof skipOptions === "number" @@ -538,44 +531,41 @@ export function getTokenBefore= 0) { - const token = tokenList[beforeIndex]; + const token = getEntry(beforeIndex, includeComments); if (filter(token)) return token as Result; beforeIndex--; } } else { while (beforeIndex >= 0) { - const token = tokenList[beforeIndex]; + const token = getEntry(beforeIndex, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -621,9 +611,6 @@ export function getTokensBefore< // so we use `Result` alias + casts on return statements type Result = TokenResult[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Maximum number of tokens to return const count = typeof countOptions === "number" @@ -640,44 +627,42 @@ export function getTokensBefore< ? countOptions.filter : null; - // Whether to return comment tokens - const includeComments = - typeof countOptions === "object" && - countOptions !== null && - "includeComments" in countOptions && - countOptions.includeComments; - - // Source array of tokens to search in - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(countOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } const targetStart = nodeOrToken.range[0]; // Binary search for first token past `nodeOrToken`'s start - const sliceEnd = firstTokenAtOrAfter(tokenList, targetStart, 0); + const sliceEnd = firstTokenAtOrAfter(uint32, targetStart, 0, len); // Fast path for the common case if (typeof filter !== "function") { - if (typeof count !== "number") return tokenList.slice(0, sliceEnd) as Result; - return tokenList.slice(sliceEnd - count, sliceEnd) as Result; + if (typeof count !== "number") return collectEntries(0, sliceEnd, includeComments) as Result; + return collectEntries(Math.max(0, sliceEnd - count), sliceEnd, includeComments) as Result; } const tokensBefore: TokenOrComment[] = []; if (typeof count !== "number") { for (let i = 0; i < sliceEnd; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBefore.push(token); } } else { // Count is the number of preceding tokens, so we iterate in reverse for (let i = sliceEnd - 1; i >= 0 && tokensBefore.length < count; i--) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBefore.unshift(token); } } @@ -700,9 +685,6 @@ export function getTokenAfter | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens following the given node to skip let skip = typeof skipOptions === "number" @@ -718,44 +700,41 @@ export function getTokenAfter= tokensLength) return null; - return tokenList[skipTo] as Result; + if (skipTo >= len) return null; + return getEntry(skipTo, includeComments) as Result; } if (typeof skip !== "number") { - for (let i = startIndex; i < tokensLength; i++) { - const token = tokenList[i]; + for (let i = startIndex; i < len; i++) { + const token = getEntry(i, includeComments); if (filter(token)) return token as Result; } } else { - for (let i = startIndex; i < tokensLength; i++) { - const token = tokenList[i]; + for (let i = startIndex; i < len; i++) { + const token = getEntry(i, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -801,9 +780,6 @@ export function getTokensAfter[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const count = typeof countOptions === "number" ? countOptions @@ -818,41 +794,43 @@ export function getTokensAfter[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const count = typeof countOptions === "object" && countOptions !== null ? countOptions.count : null; @@ -895,19 +870,19 @@ export function getTokensBetween< ? countOptions.filter : null; - const includeComments = - typeof countOptions === "object" && - countOptions !== null && - "includeComments" in countOptions && - countOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(countOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // This range is not invariant over node order. @@ -917,28 +892,34 @@ export function getTokensBetween< rangeEnd = right.range[0]; // Binary search for first token past "between" range start - let sliceStart = firstTokenAtOrAfter(tokenList, rangeStart, 0); + let sliceStart = firstTokenAtOrAfter(uint32, rangeStart, 0, len); // Binary search for first token past "between" range end - let sliceEnd = firstTokenAtOrAfter(tokenList, rangeEnd, sliceStart); + let sliceEnd = firstTokenAtOrAfter(uint32, rangeEnd, sliceStart, len); // Apply padding sliceStart = Math.max(0, sliceStart - padding); - sliceEnd += padding; + sliceEnd = Math.min(sliceEnd + padding, len); if (typeof filter !== "function") { - if (typeof count !== "number") return tokenList.slice(sliceStart, sliceEnd) as Result; - return tokenList.slice(sliceStart, Math.min(sliceStart + count, sliceEnd)) as Result; + if (typeof count !== "number") { + return collectEntries(sliceStart, sliceEnd, includeComments) as Result; + } + return collectEntries( + sliceStart, + Math.min(sliceStart + count, sliceEnd), + includeComments, + ) as Result; } const tokensBetween: TokenOrComment[] = []; if (typeof count !== "number") { for (let i = sliceStart; i < sliceEnd; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBetween.push(token); } } else { for (let i = sliceStart; i < sliceEnd && tokensBetween.length < count; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBetween.push(token); } } @@ -961,9 +942,6 @@ export function getFirstTokenBetween< // so we use `Result` alias + casts on return statements type Result = TokenResult | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens at the beginning of the "between" range to skip let skip = typeof skipOptions === "number" @@ -979,19 +957,19 @@ export function getFirstTokenBetween< ? skipOptions.filter : null; - const includeComments = - typeof skipOptions === "object" && - skipOptions !== null && - "includeComments" in skipOptions && - skipOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(skipOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // This range is not invariant over node order. @@ -1000,30 +978,26 @@ export function getFirstTokenBetween< const rangeStart = left.range[1], rangeEnd = right.range[0]; - const tokensLength = tokenList.length; - // Binary search for token immediately following `left` - const firstTokenIndex = firstTokenAtOrAfter(tokenList, rangeStart, 0); + const firstTokenIndex = firstTokenAtOrAfter(uint32, rangeStart, 0, len); if (typeof filter !== "function") { const skipTo = firstTokenIndex + (skip ?? 0); - // Avoid indexing out of bounds - if (skipTo >= tokensLength) return null; - const token = tokenList[skipTo]; - if (token.start >= rangeEnd) return null; - return token as Result; + if (skipTo >= len) return null; + if (entryStart(skipTo, uint32) >= rangeEnd) return null; + return getEntry(skipTo, includeComments) as Result; } if (typeof skip !== "number") { - for (let i = firstTokenIndex; i < tokensLength; i++) { - const token = tokenList[i]; - if (token.start >= rangeEnd) return null; + for (let i = firstTokenIndex; i < len; i++) { + if (entryStart(i, uint32) >= rangeEnd) return null; + const token = getEntry(i, includeComments); if (filter(token)) return token as Result; } } else { - for (let i = firstTokenIndex; i < tokensLength; i++) { - const token = tokenList[i]; - if (token.start >= rangeEnd) return null; + for (let i = firstTokenIndex; i < len; i++) { + if (entryStart(i, uint32) >= rangeEnd) return null; + const token = getEntry(i, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -1050,9 +1024,6 @@ export function getFirstTokensBetween< // so we use `Result` alias + casts on return statements type Result = TokenResult[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const count = typeof countOptions === "number" ? countOptions @@ -1067,19 +1038,19 @@ export function getFirstTokensBetween< ? countOptions.filter : null; - const includeComments = - typeof countOptions === "object" && - countOptions !== null && - "includeComments" in countOptions && - countOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(countOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // This range is not invariant over node order. @@ -1089,24 +1060,30 @@ export function getFirstTokensBetween< rangeEnd = right.range[0]; // Find the first token after `left` - const sliceStart = firstTokenAtOrAfter(tokenList, rangeStart, 0); + const sliceStart = firstTokenAtOrAfter(uint32, rangeStart, 0, len); // Find the first token at or after `right` - const sliceEnd = firstTokenAtOrAfter(tokenList, rangeEnd, sliceStart); + const sliceEnd = firstTokenAtOrAfter(uint32, rangeEnd, sliceStart, len); if (typeof filter !== "function") { - if (typeof count !== "number") return tokenList.slice(sliceStart, sliceEnd) as Result; - return tokenList.slice(sliceStart, Math.min(sliceStart + count, sliceEnd)) as Result; + if (typeof count !== "number") { + return collectEntries(sliceStart, sliceEnd, includeComments) as Result; + } + return collectEntries( + sliceStart, + Math.min(sliceStart + count, sliceEnd), + includeComments, + ) as Result; } const firstTokens: TokenOrComment[] = []; if (typeof count !== "number") { for (let i = sliceStart; i < sliceEnd; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) firstTokens.push(token); } } else { for (let i = sliceStart; i < sliceEnd && firstTokens.length < count; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) firstTokens.push(token); } } @@ -1129,9 +1106,6 @@ export function getLastTokenBetween< // so we use `Result` alias + casts on return statements type Result = TokenResult | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - // Number of tokens at the end of the "between" range to skip let skip = typeof skipOptions === "number" @@ -1147,19 +1121,19 @@ export function getLastTokenBetween< ? skipOptions.filter : null; - const includeComments = - typeof skipOptions === "object" && - skipOptions !== null && - "includeComments" in skipOptions && - skipOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(skipOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // This range is not invariant over node order. @@ -1169,29 +1143,27 @@ export function getLastTokenBetween< rangeEnd = right.range[0]; // Binary search for token immediately preceding `right`. - // The found token may be within the left node if there are no tokens between the nodes. - const lastTokenIndex = firstTokenAtOrAfter(tokenList, rangeEnd, 0) - 1; + // The found token may be within the left node if there are no entries between the nodes. + const lastTokenIndex = firstTokenAtOrAfter(uint32, rangeEnd, 0, len) - 1; // Fast path for the common case if (typeof filter !== "function") { const skipTo = lastTokenIndex - (skip ?? 0); - // Avoid indexing out of bounds if (skipTo < 0) return null; - const token = tokenList[skipTo]; - if (token.start < rangeStart) return null; - return token as Result; + if (entryStart(skipTo, uint32) < rangeStart) return null; + return getEntry(skipTo, includeComments) as Result; } if (typeof skip !== "number") { for (let i = lastTokenIndex; i >= 0; i--) { - const token = tokenList[i]; - if (token.start < rangeStart) return null; + if (entryStart(i, uint32) < rangeStart) return null; + const token = getEntry(i, includeComments); if (filter(token)) return token as Result; } } else { for (let i = lastTokenIndex; i >= 0; i--) { - const token = tokenList[i]; - if (token.start < rangeStart) return null; + if (entryStart(i, uint32) < rangeStart) return null; + const token = getEntry(i, includeComments); if (filter(token)) { if (skip <= 0) return token as Result; skip--; @@ -1218,9 +1190,6 @@ export function getLastTokensBetween< // so we use `Result` alias + casts on return statements type Result = TokenResult[]; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const count = typeof countOptions === "number" ? countOptions @@ -1235,19 +1204,19 @@ export function getLastTokensBetween< ? countOptions.filter : null; - const includeComments = - typeof countOptions === "object" && - countOptions !== null && - "includeComments" in countOptions && - countOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + const includeComments = getIncludeComments(countOptions); + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // This range is not invariant over node order. @@ -1257,26 +1226,32 @@ export function getLastTokensBetween< rangeEnd = right.range[0]; // Binary search for first token past "between" range start - const sliceStart = firstTokenAtOrAfter(tokenList, rangeStart, 0); + const sliceStart = firstTokenAtOrAfter(uint32, rangeStart, 0, len); // Binary search for first token past "between" range end - const sliceEnd = firstTokenAtOrAfter(tokenList, rangeEnd, sliceStart); + const sliceEnd = firstTokenAtOrAfter(uint32, rangeEnd, sliceStart, len); // Fast path for the common case if (typeof filter !== "function") { - if (typeof count !== "number") return tokenList.slice(sliceStart, sliceEnd) as Result; - return tokenList.slice(Math.max(sliceStart, sliceEnd - count), sliceEnd) as Result; + if (typeof count !== "number") { + return collectEntries(sliceStart, sliceEnd, includeComments) as Result; + } + return collectEntries( + Math.max(sliceStart, sliceEnd - count), + sliceEnd, + includeComments, + ) as Result; } const tokensBetween: TokenOrComment[] = []; if (typeof count !== "number") { for (let i = sliceStart; i < sliceEnd; i++) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBetween.push(token); } } else { // Count is the number of preceding tokens, so we iterate in reverse for (let i = sliceEnd - 1; i >= sliceStart && tokensBetween.length < count; i--) { - const token = tokenList[i]; + const token = getEntry(i, includeComments); if (filter(token)) tokensBetween.unshift(token); } } @@ -1285,47 +1260,47 @@ export function getLastTokensBetween< /** * Get the token starting at the specified index. - * @param index - Index of the start of the token's range. + * @param offset - Start offset of the token. * @param rangeOptions - Options object. * @returns `Token` (or `Token | Comment` if `includeComments` is `true`), or `null` if none found. */ export function getTokenByRangeStart( - index: number, + offset: number, rangeOptions?: Options, ): TokenResult | null { // TypeScript cannot verify conditional return types within the function body, // so we use `Result` alias + casts on return statements type Result = TokenResult | null; - if (tokens === null) initTokens(); - debugAssertIsNonNull(tokens); - const includeComments = typeof rangeOptions === "object" && rangeOptions !== null && "includeComments" in rangeOptions && - rangeOptions.includeComments; - - let tokenList: TokenOrComment[]; - if (includeComments) { - if (tokensAndComments === null) initTokensAndComments(); - debugAssertIsNonNull(tokensAndComments); - tokenList = tokensAndComments; + !!rangeOptions.includeComments; + + let uint32: Uint32Array, len: number; + if (includeComments === false) { + if (tokensUint32 === null) initTokensBuffer(); + debugAssertIsNonNull(tokensUint32); + uint32 = tokensUint32; + len = tokensLen; } else { - tokenList = tokens; + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); + uint32 = tokensAndCommentsUint32; + len = tokensAndCommentsLen; } // Binary search for token starting at the given index - for (let lo = 0, hi = tokenList.length; lo < hi; ) { - const mid = (lo + hi) >> 1; - const token = tokenList[mid], - tokenStart = token.start; - if (tokenStart < index) { + for (let lo = 0, hi = len; lo < hi; ) { + const mid = (lo + hi) >>> 1; + const tokenStart = uint32[mid << 2]; + if (tokenStart < offset) { lo = mid + 1; - } else if (tokenStart > index) { + } else if (tokenStart > offset) { hi = mid; } else { - return token as Result; + return getEntry(mid, includeComments) as Result; } } @@ -1349,11 +1324,8 @@ const JSX_WHITESPACE_REGEXP = /\s/u; * any of the tokens found between the two given nodes or tokens. */ export function isSpaceBetween(first: NodeOrToken, second: NodeOrToken): boolean { - if (tokensAndComments === null) { - if (tokens === null) initTokens(); - initTokensAndComments(); - } - debugAssertIsNonNull(tokensAndComments); + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); const range1 = first.range, range2 = second.range; @@ -1385,21 +1357,15 @@ export function isSpaceBetween(first: NodeOrToken, second: NodeOrToken): boolean // Binary search for the first token past `rangeStart`. // Unless `first` and `second` are adjacent or overlapping, - // the token will be the first token between the two nodes. - let tokenBetweenIndex = firstTokenAtOrAfter(tokensAndComments, rangeStart, 0); - - const tokensAndCommentsLength = tokensAndComments.length; - for ( - let lastTokenEnd = rangeStart; - tokenBetweenIndex < tokensAndCommentsLength; - tokenBetweenIndex++ - ) { - const token = tokensAndComments[tokenBetweenIndex], - tokenStart = token.start; + // the token will be the first token/comment between the two nodes. + let index = firstTokenAtOrAfter(tokensAndCommentsUint32, rangeStart, 0, tokensAndCommentsLen); + + for (let lastTokenEnd = rangeStart; index < tokensAndCommentsLen; index++) { + const tokenStart = tokensAndCommentsUint32[index << 2]; // The first token of the later node should undergo the check in the second branch if (tokenStart > rangeEnd) break; if (tokenStart !== lastTokenEnd) return true; - lastTokenEnd = token.end; + lastTokenEnd = entryEnd(index, true); } return false; @@ -1426,11 +1392,8 @@ export function isSpaceBetween(first: NodeOrToken, second: NodeOrToken): boolean * any of the tokens found between the two given nodes or tokens. */ export function isSpaceBetweenTokens(first: NodeOrToken, second: NodeOrToken): boolean { - if (tokensAndComments === null) { - if (tokens === null) initTokens(); - initTokensAndComments(); - } - debugAssertIsNonNull(tokensAndComments); + if (tokensAndCommentsUint32 === null) initTokensAndCommentsBuffer(); + debugAssertIsNonNull(tokensAndCommentsUint32); const range1 = first.range, range2 = second.range; @@ -1450,20 +1413,17 @@ export function isSpaceBetweenTokens(first: NodeOrToken, second: NodeOrToken): b // Binary search for the first token past `rangeStart`. // Unless `first` and `second` are adjacent or overlapping, - // the token will be the first token between the two nodes. - let tokenBetweenIndex = firstTokenAtOrAfter(tokensAndComments, rangeStart, 0); - - const tokensAndCommentsLength = tokensAndComments.length; - for ( - let lastTokenEnd = rangeStart; - tokenBetweenIndex < tokensAndCommentsLength; - tokenBetweenIndex++ - ) { - const token = tokensAndComments[tokenBetweenIndex], - tokenStart = token.start; + // the token will be the first token/comment between the two nodes. + let index = firstTokenAtOrAfter(tokensAndCommentsUint32, rangeStart, 0, tokensAndCommentsLen); + + for (let lastTokenEnd = rangeStart; index < tokensAndCommentsLen; index++) { + const tokenStart = tokensAndCommentsUint32[index << 2]; // The first token of the later node should undergo the check in the second branch if (tokenStart > rangeEnd) break; + + // Deserialize to check type/value for JSXText whitespace detection + const token = getTokenOrComment(index); if ( tokenStart !== lastTokenEnd || (token.type === "JSXText" && JSX_WHITESPACE_REGEXP.test(token.value)) @@ -1477,48 +1437,119 @@ export function isSpaceBetweenTokens(first: NodeOrToken, second: NodeOrToken): b } /** - * Find the index of the first token in `tokens` whose `start` is >= `offset`, via binary search. - * - * Searched range starts at `startIndex` and ends at `tokens.length`. + * Extract `includeComments` boolean from options. * - * Returns `tokens.length` if all tokens have `start` < `offset`. + * @param options - Options object, number, function, or nullish + * @returns `true` if `options` has `includeComments: true` + */ +function getIncludeComments( + options: SkipOptions | CountOptions | number | FilterFn | null | undefined, +): boolean { + return ( + typeof options === "object" && + options !== null && + "includeComments" in options && + !!options.includeComments + ); +} + +/** + * Get a token at `index`, deserializing if needed. + * For `includeComments` mode, gets from the merged buffer instead. * - * IMPORTANT - * --------- + * @param index - Entry index in the tokens or merged buffer + * @param includeComments - Whether to use the merged tokens-and-comments buffer + * @returns Deserialized token or comment + */ +function getEntry(index: number, includeComments: boolean): TokenOrComment { + return includeComments === true ? getTokenOrComment(index) : getToken(index); +} + +/** + * Get `start` offset of token/comment at `index` from the given buffer. * - * This function is inlined into all call sites by a TSDown plugin, to avoid the overhead of function calls - * (see `tsdown_plugins/inline_search.ts`). + * @param index - Entry index + * @param uint32 - The `Uint32Array` buffer (tokens or merged) + * @returns Start offset in source text + */ +function entryStart(index: number, uint32: Uint32Array): number { + return uint32[index << 2]; +} + +/** + * Get `end` offset of token/comment at `index`. + * For tokens-only, reads from `tokensUint32`. For `includeComments`, looks up from the original buffer. * - * For the plugin to work, the following conditions must be met: + * @param index - Entry index in the tokens or merged buffer + * @param includeComments - Whether to use the merged tokens-and-comments buffer + * @returns End offset in source text + */ +function entryEnd(index: number, includeComments: boolean): number { + return includeComments === true ? getTokenOrCommentEnd(index) : tokensUint32![(index << 2) + 1]; +} + +/** + * Collect tokens/comments from `startIndex` (inclusive) to `endIndex` (exclusive) into an array. + * Deserializes each token/comment on demand. * - * 1. All call sites must follow this pattern: - * * `const result = firstTokenAtOrAfter(a, b, c);` or `let result = firstTokenAtOrAfter(a, b, c);` - * * `result` can be any variable name. - * * `a`, `b`, and `c` can be any variables, or literals (e.g. `0`). - * * Optionally, the call expression can be the left side of a binary expression - * e.g. `const result = firstTokenAtOrAfter(a, b, c) - 1;`. + * For tokens-only mode, batch-deserializes then slices `cachedTokens`. + * For `includeComments` mode, builds the array entry by entry from the merged buffer. * - * 2. If renaming this function, the TSDown plugin must be updated to match. + * @param startIndex - First entry index (inclusive) + * @param endIndex - Last entry index (exclusive) + * @param includeComments - Whether to use the merged tokens-and-comments buffer + * @returns Array of tokens (and optionally comments) + */ +function collectEntries( + startIndex: number, + endIndex: number, + includeComments: boolean, +): TokenOrComment[] { + if (includeComments === false) { + // Batch-deserialize tokens in range, then slice from `cachedTokens` array + for (let i = startIndex; i < endIndex; i++) { + deserializeTokenIfNeeded(i); + } + return cachedTokens!.slice(startIndex, endIndex) as TokenOrComment[]; + } + + const len = endIndex - startIndex; + if (len === 0) return []; + + // Pre-allocate with correct size. Write `null` into first entry to transition to PACKED_ELEMENTS before the loop. + // oxlint-disable-next-line unicorn/no-new-array + const tokensAndCommentsSubset: TokenOrComment[] = new Array(len).fill(0); + tokensAndCommentsSubset[0] = null!; + let i = 0; + do { + tokensAndCommentsSubset[i] = getTokenOrComment(startIndex + i); + } while (++i < len); + return tokensAndCommentsSubset; +} + +/** + * Find the index of the first entry in a `Uint32Array` buffer whose `start` is >= `offset`, via binary search. * - * 3. The function body is inlined except for the final `return` statement. - * If altering this function's body, ensure it does not define any vars at top-level of the function, - * as they could conflict with other vars in the call site's scope. + * Each entry occupies 4 x u32s (16 bytes), with `start` as the first u32. + * Searched range starts at `startIndex` and ends at `length`. * - * If any calls cannot be inlined, it will produce an error at build time. + * Returns `length` if all entries have `start` < `offset`. * - * @param tokens - Sorted array of tokens/comments + * @param u32 - Uint32Array buffer (tokens, comments, or tokensAndComments) * @param offset - Source offset to search for - * @param startIndex - Starting index for the search - * @returns Index of first token with `start >= offset` + * @param startIndex - Starting entry index for the search + * @param length - Total number of entries in the buffer + * @returns Index of first entry with `start >= offset` */ export function firstTokenAtOrAfter( - tokens: TokenOrComment[], + uint32: Uint32Array, offset: number, startIndex: number, + length: number, ): number { - for (let endIndex = tokens.length; startIndex < endIndex; ) { - const mid = (startIndex + endIndex) >> 1; - if (tokens[mid].start < offset) { + for (let endIndex = length; startIndex < endIndex; ) { + const mid = (startIndex + endIndex) >>> 1; + if (uint32[mid << 2] < offset) { startIndex = mid + 1; } else { endIndex = mid; diff --git a/apps/oxlint/src/js_plugins/parse.rs b/apps/oxlint/src/js_plugins/parse.rs index 8578081b6770d..d1c5333547d7c 100644 --- a/apps/oxlint/src/js_plugins/parse.rs +++ b/apps/oxlint/src/js_plugins/parse.rs @@ -7,7 +7,7 @@ use napi::bindgen_prelude::Uint8Array; use napi_derive::napi; use oxc_allocator::Allocator; -use oxc_ast::ast::{Comment, CommentKind}; +use oxc_ast::ast::{Comment, CommentContent, CommentKind}; use oxc_ast_visit::utf8_to_utf16::Utf8ToUtf16; use oxc_estree_tokens::{ESTreeTokenOptionsJS, update_tokens}; use oxc_linter::RawTransferMetadata2 as RawTransferMetadata; @@ -215,7 +215,7 @@ unsafe fn parse_raw_impl( ); } - // Convert spans to UTF-16. + // Create span converter. // If source starts with BOM, create converter which ignores the BOM. let span_converter = if has_bom { #[expect(clippy::cast_possible_truncation)] @@ -224,10 +224,25 @@ unsafe fn parse_raw_impl( Utf8ToUtf16::new(source_text) }; + // Convert token spans to UTF-16 and update token kinds update_tokens(&mut tokens, program, &span_converter, ESTreeTokenOptionsJS); + // Convert AST spans to UTF-16 span_converter.convert_program(program); - span_converter.convert_comments(&mut program.comments); + + // Convert comment spans to UTF-16. + // Also set the `content` field (byte 15) of each comment to `None` (0). + // JS side uses this byte as a "deserialized" flag for tracking lazy deserialization. + if let Some(mut converter) = span_converter.converter() { + for comment in &mut program.comments { + converter.convert_span(&mut comment.span); + comment.content = CommentContent::None; + } + } else { + for comment in &mut program.comments { + comment.content = CommentContent::None; + } + } let tokens_offset = tokens.as_ptr() as u32; #[expect(clippy::cast_possible_truncation)] diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/.oxlintrc.json b/apps/oxlint/test/fixtures/tokens_and_comments_order/.oxlintrc.json new file mode 100644 index 0000000000000..aac83e9231eb5 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/.oxlintrc.json @@ -0,0 +1,7 @@ +{ + "jsPlugins": ["./plugin.ts"], + "categories": { "correctness": "off" }, + "rules": { + "tokens-and-comments-order-plugin/tokens-and-comments-order": "error" + } +} diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/001.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/001.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/001.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/002.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/002.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/002.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/003.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/003.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/003.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/004.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/004.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/004.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/005.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/005.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/005.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/006.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/006.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/006.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/007.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/007.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/007.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/008.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/008.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/008.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/009.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/009.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/009.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/010.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/010.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/010.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/011.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/011.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/011.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/012.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/012.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/012.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/013.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/013.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/013.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/014.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/014.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/014.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/015.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/015.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/015.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/016.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/016.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/016.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/017.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/017.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/017.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/018.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/018.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/018.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/019.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/019.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/019.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/020.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/020.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/020.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/021.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/021.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/021.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/022.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/022.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/022.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/023.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/023.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/023.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/024.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/024.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/024.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/025.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/025.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/025.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/026.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/026.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/026.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/027.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/027.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/027.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/028.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/028.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/028.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/029.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/029.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/029.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/030.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/030.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/030.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/031.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/031.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/031.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/032.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/032.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/032.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/033.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/033.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/033.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/034.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/034.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/034.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/035.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/035.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/035.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/036.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/036.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/036.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/037.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/037.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/037.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/038.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/038.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/038.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/039.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/039.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/039.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/040.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/040.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/040.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/041.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/041.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/041.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/042.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/042.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/042.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/043.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/043.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/043.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/044.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/044.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/044.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/045.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/045.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/045.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/046.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/046.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/046.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/047.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/047.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/047.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/048.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/048.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/048.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/049.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/049.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/049.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/050.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/050.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/050.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/051.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/051.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/051.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/052.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/052.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/052.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/053.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/053.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/053.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/054.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/054.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/054.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/055.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/055.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/055.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/056.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/056.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/056.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/057.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/057.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/057.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/058.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/058.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/058.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/059.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/059.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/059.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/060.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/060.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/060.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/061.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/061.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/061.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/062.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/062.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/062.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/063.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/063.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/063.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/064.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/064.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/064.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/065.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/065.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/065.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/066.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/066.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/066.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/067.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/067.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/067.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/068.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/068.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/068.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/069.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/069.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/069.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/070.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/070.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/070.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/071.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/071.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/071.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/072.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/072.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/072.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/073.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/073.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/073.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/074.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/074.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/074.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/075.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/075.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/075.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/076.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/076.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/076.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/077.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/077.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/077.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/078.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/078.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/078.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/079.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/079.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/079.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/080.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/080.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/080.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/081.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/081.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/081.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/082.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/082.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/082.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/083.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/083.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/083.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/084.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/084.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/084.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/085.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/085.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/085.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/086.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/086.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/086.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/087.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/087.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/087.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/088.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/088.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/088.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/089.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/089.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/089.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/090.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/090.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/090.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/091.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/091.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/091.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/092.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/092.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/092.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/093.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/093.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/093.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/094.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/094.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/094.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/095.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/095.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/095.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/096.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/096.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/096.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/097.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/097.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/097.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/098.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/098.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/098.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/099.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/099.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/099.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/100.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/100.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/100.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/101.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/101.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/101.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/102.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/102.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/102.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/103.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/103.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/103.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/104.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/104.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/104.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/105.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/105.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/105.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/106.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/106.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/106.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/107.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/107.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/107.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/108.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/108.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/108.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/109.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/109.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/109.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/110.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/110.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/110.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/111.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/111.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/111.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/112.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/112.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/112.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/113.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/113.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/113.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/114.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/114.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/114.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/115.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/115.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/115.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/116.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/116.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/116.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/117.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/117.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/117.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/118.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/118.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/118.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/119.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/119.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/119.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/files/120.js b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/120.js new file mode 100644 index 0000000000000..576a729dc4203 --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/files/120.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/output.snap.md b/apps/oxlint/test/fixtures/tokens_and_comments_order/output.snap.md new file mode 100644 index 0000000000000..8a178806f27ea --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/output.snap.md @@ -0,0 +1,1332 @@ +# Exit code +1 + +# stdout +``` + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 1: + | [tokens, comments, tokensAndComments, lastToken, lastComment] + ,-[files/001.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 2: + | [comments, tokens, tokensAndComments, lastToken, lastComment] + ,-[files/002.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 3: + | [tokensAndComments, tokens, comments, lastToken, lastComment] + ,-[files/003.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 4: + | [lastToken, tokens, comments, tokensAndComments, lastComment] + ,-[files/004.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 5: + | [lastComment, tokens, comments, tokensAndComments, lastToken] + ,-[files/005.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 6: + | [tokens, tokensAndComments, comments, lastToken, lastComment] + ,-[files/006.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 7: + | [comments, tokensAndComments, tokens, lastToken, lastComment] + ,-[files/007.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 8: + | [tokensAndComments, comments, tokens, lastToken, lastComment] + ,-[files/008.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 9: + | [lastToken, comments, tokens, tokensAndComments, lastComment] + ,-[files/009.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 10: + | [lastComment, comments, tokens, tokensAndComments, lastToken] + ,-[files/010.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 11: + | [tokens, lastToken, comments, tokensAndComments, lastComment] + ,-[files/011.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 12: + | [comments, lastToken, tokens, tokensAndComments, lastComment] + ,-[files/012.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 13: + | [tokensAndComments, lastToken, tokens, comments, lastComment] + ,-[files/013.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 14: + | [lastToken, tokensAndComments, tokens, comments, lastComment] + ,-[files/014.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 15: + | [lastComment, tokensAndComments, tokens, comments, lastToken] + ,-[files/015.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 16: + | [tokens, lastComment, comments, tokensAndComments, lastToken] + ,-[files/016.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 17: + | [comments, lastComment, tokens, tokensAndComments, lastToken] + ,-[files/017.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 18: + | [tokensAndComments, lastComment, tokens, comments, lastToken] + ,-[files/018.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 19: + | [lastToken, lastComment, tokens, comments, tokensAndComments] + ,-[files/019.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 20: + | [lastComment, lastToken, tokens, comments, tokensAndComments] + ,-[files/020.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 21: + | [tokens, comments, lastToken, tokensAndComments, lastComment] + ,-[files/021.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 22: + | [comments, tokens, lastToken, tokensAndComments, lastComment] + ,-[files/022.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 23: + | [tokensAndComments, tokens, lastToken, comments, lastComment] + ,-[files/023.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 24: + | [lastToken, tokens, tokensAndComments, comments, lastComment] + ,-[files/024.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 25: + | [lastComment, tokens, tokensAndComments, comments, lastToken] + ,-[files/025.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 26: + | [tokens, tokensAndComments, lastToken, comments, lastComment] + ,-[files/026.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 27: + | [comments, tokensAndComments, lastToken, tokens, lastComment] + ,-[files/027.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 28: + | [tokensAndComments, comments, lastToken, tokens, lastComment] + ,-[files/028.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 29: + | [lastToken, comments, tokensAndComments, tokens, lastComment] + ,-[files/029.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 30: + | [lastComment, comments, tokensAndComments, tokens, lastToken] + ,-[files/030.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 31: + | [tokens, lastToken, tokensAndComments, comments, lastComment] + ,-[files/031.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 32: + | [comments, lastToken, tokensAndComments, tokens, lastComment] + ,-[files/032.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 33: + | [tokensAndComments, lastToken, comments, tokens, lastComment] + ,-[files/033.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 34: + | [lastToken, tokensAndComments, comments, tokens, lastComment] + ,-[files/034.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 35: + | [lastComment, tokensAndComments, comments, tokens, lastToken] + ,-[files/035.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 36: + | [tokens, lastComment, tokensAndComments, comments, lastToken] + ,-[files/036.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 37: + | [comments, lastComment, tokensAndComments, tokens, lastToken] + ,-[files/037.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 38: + | [tokensAndComments, lastComment, comments, tokens, lastToken] + ,-[files/038.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 39: + | [lastToken, lastComment, comments, tokens, tokensAndComments] + ,-[files/039.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 40: + | [lastComment, lastToken, comments, tokens, tokensAndComments] + ,-[files/040.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 41: + | [tokens, comments, lastComment, tokensAndComments, lastToken] + ,-[files/041.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 42: + | [comments, tokens, lastComment, tokensAndComments, lastToken] + ,-[files/042.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 43: + | [tokensAndComments, tokens, lastComment, comments, lastToken] + ,-[files/043.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 44: + | [lastToken, tokens, lastComment, comments, tokensAndComments] + ,-[files/044.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 45: + | [lastComment, tokens, lastToken, comments, tokensAndComments] + ,-[files/045.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 46: + | [tokens, tokensAndComments, lastComment, comments, lastToken] + ,-[files/046.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 47: + | [comments, tokensAndComments, lastComment, tokens, lastToken] + ,-[files/047.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 48: + | [tokensAndComments, comments, lastComment, tokens, lastToken] + ,-[files/048.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 49: + | [lastToken, comments, lastComment, tokens, tokensAndComments] + ,-[files/049.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 50: + | [lastComment, comments, lastToken, tokens, tokensAndComments] + ,-[files/050.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 51: + | [tokens, lastToken, lastComment, comments, tokensAndComments] + ,-[files/051.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 52: + | [comments, lastToken, lastComment, tokens, tokensAndComments] + ,-[files/052.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 53: + | [tokensAndComments, lastToken, lastComment, tokens, comments] + ,-[files/053.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 54: + | [lastToken, tokensAndComments, lastComment, tokens, comments] + ,-[files/054.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 55: + | [lastComment, tokensAndComments, lastToken, tokens, comments] + ,-[files/055.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 56: + | [tokens, lastComment, lastToken, comments, tokensAndComments] + ,-[files/056.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 57: + | [comments, lastComment, lastToken, tokens, tokensAndComments] + ,-[files/057.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 58: + | [tokensAndComments, lastComment, lastToken, tokens, comments] + ,-[files/058.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 59: + | [lastToken, lastComment, tokensAndComments, tokens, comments] + ,-[files/059.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 60: + | [lastComment, lastToken, tokensAndComments, tokens, comments] + ,-[files/060.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 61: + | [tokens, comments, tokensAndComments, lastComment, lastToken] + ,-[files/061.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 62: + | [comments, tokens, tokensAndComments, lastComment, lastToken] + ,-[files/062.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 63: + | [tokensAndComments, tokens, comments, lastComment, lastToken] + ,-[files/063.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 64: + | [lastToken, tokens, comments, lastComment, tokensAndComments] + ,-[files/064.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 65: + | [lastComment, tokens, comments, lastToken, tokensAndComments] + ,-[files/065.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 66: + | [tokens, tokensAndComments, comments, lastComment, lastToken] + ,-[files/066.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 67: + | [comments, tokensAndComments, tokens, lastComment, lastToken] + ,-[files/067.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 68: + | [tokensAndComments, comments, tokens, lastComment, lastToken] + ,-[files/068.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 69: + | [lastToken, comments, tokens, lastComment, tokensAndComments] + ,-[files/069.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 70: + | [lastComment, comments, tokens, lastToken, tokensAndComments] + ,-[files/070.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 71: + | [tokens, lastToken, comments, lastComment, tokensAndComments] + ,-[files/071.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 72: + | [comments, lastToken, tokens, lastComment, tokensAndComments] + ,-[files/072.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 73: + | [tokensAndComments, lastToken, tokens, lastComment, comments] + ,-[files/073.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 74: + | [lastToken, tokensAndComments, tokens, lastComment, comments] + ,-[files/074.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 75: + | [lastComment, tokensAndComments, tokens, lastToken, comments] + ,-[files/075.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 76: + | [tokens, lastComment, comments, lastToken, tokensAndComments] + ,-[files/076.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 77: + | [comments, lastComment, tokens, lastToken, tokensAndComments] + ,-[files/077.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 78: + | [tokensAndComments, lastComment, tokens, lastToken, comments] + ,-[files/078.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 79: + | [lastToken, lastComment, tokens, tokensAndComments, comments] + ,-[files/079.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 80: + | [lastComment, lastToken, tokens, tokensAndComments, comments] + ,-[files/080.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 81: + | [tokens, comments, lastToken, lastComment, tokensAndComments] + ,-[files/081.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 82: + | [comments, tokens, lastToken, lastComment, tokensAndComments] + ,-[files/082.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 83: + | [tokensAndComments, tokens, lastToken, lastComment, comments] + ,-[files/083.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 84: + | [lastToken, tokens, tokensAndComments, lastComment, comments] + ,-[files/084.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 85: + | [lastComment, tokens, tokensAndComments, lastToken, comments] + ,-[files/085.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 86: + | [tokens, tokensAndComments, lastToken, lastComment, comments] + ,-[files/086.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 87: + | [comments, tokensAndComments, lastToken, lastComment, tokens] + ,-[files/087.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 88: + | [tokensAndComments, comments, lastToken, lastComment, tokens] + ,-[files/088.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 89: + | [lastToken, comments, tokensAndComments, lastComment, tokens] + ,-[files/089.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 90: + | [lastComment, comments, tokensAndComments, lastToken, tokens] + ,-[files/090.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 91: + | [tokens, lastToken, tokensAndComments, lastComment, comments] + ,-[files/091.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 92: + | [comments, lastToken, tokensAndComments, lastComment, tokens] + ,-[files/092.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 93: + | [tokensAndComments, lastToken, comments, lastComment, tokens] + ,-[files/093.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 94: + | [lastToken, tokensAndComments, comments, lastComment, tokens] + ,-[files/094.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 95: + | [lastComment, tokensAndComments, comments, lastToken, tokens] + ,-[files/095.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 96: + | [tokens, lastComment, tokensAndComments, lastToken, comments] + ,-[files/096.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 97: + | [comments, lastComment, tokensAndComments, lastToken, tokens] + ,-[files/097.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 98: + | [tokensAndComments, lastComment, comments, lastToken, tokens] + ,-[files/098.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 99: + | [lastToken, lastComment, comments, tokensAndComments, tokens] + ,-[files/099.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 100: + | [lastComment, lastToken, comments, tokensAndComments, tokens] + ,-[files/100.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 101: + | [tokens, comments, lastComment, lastToken, tokensAndComments] + ,-[files/101.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 102: + | [comments, tokens, lastComment, lastToken, tokensAndComments] + ,-[files/102.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 103: + | [tokensAndComments, tokens, lastComment, lastToken, comments] + ,-[files/103.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 104: + | [lastToken, tokens, lastComment, tokensAndComments, comments] + ,-[files/104.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 105: + | [lastComment, tokens, lastToken, tokensAndComments, comments] + ,-[files/105.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 106: + | [tokens, tokensAndComments, lastComment, lastToken, comments] + ,-[files/106.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 107: + | [comments, tokensAndComments, lastComment, lastToken, tokens] + ,-[files/107.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 108: + | [tokensAndComments, comments, lastComment, lastToken, tokens] + ,-[files/108.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 109: + | [lastToken, comments, lastComment, tokensAndComments, tokens] + ,-[files/109.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 110: + | [lastComment, comments, lastToken, tokensAndComments, tokens] + ,-[files/110.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 111: + | [tokens, lastToken, lastComment, tokensAndComments, comments] + ,-[files/111.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 112: + | [comments, lastToken, lastComment, tokensAndComments, tokens] + ,-[files/112.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 113: + | [tokensAndComments, lastToken, lastComment, comments, tokens] + ,-[files/113.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 114: + | [lastToken, tokensAndComments, lastComment, comments, tokens] + ,-[files/114.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 115: + | [lastComment, tokensAndComments, lastToken, comments, tokens] + ,-[files/115.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 116: + | [tokens, lastComment, lastToken, tokensAndComments, comments] + ,-[files/116.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 117: + | [comments, lastComment, lastToken, tokensAndComments, tokens] + ,-[files/117.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 118: + | [tokensAndComments, lastComment, lastToken, comments, tokens] + ,-[files/118.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 119: + | [lastToken, lastComment, tokensAndComments, comments, tokens] + ,-[files/119.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + + x tokens-and-comments-order-plugin(tokens-and-comments-order): OK + | Permutation 120: + | [lastComment, lastToken, tokensAndComments, comments, tokens] + ,-[files/120.js:1:1] + 1 | ,-> #!/usr/bin/env node + 2 | | // Leading comment + 3 | | let x = /* inline */ 1; + 4 | | let y = 2; + 5 | `-> // Trailing comment + `---- + +Found 0 warnings and 120 errors. +Finished in Xms on 120 files with 1 rules using X threads. +``` + +# stderr +``` +``` diff --git a/apps/oxlint/test/fixtures/tokens_and_comments_order/plugin.ts b/apps/oxlint/test/fixtures/tokens_and_comments_order/plugin.ts new file mode 100644 index 0000000000000..a3ce4f6eccb1a --- /dev/null +++ b/apps/oxlint/test/fixtures/tokens_and_comments_order/plugin.ts @@ -0,0 +1,302 @@ +/** + * Test that tokens/comments initialization works correctly regardless of the order + * in which APIs are accessed. + * + * Each file in `files/` has the same content (hashbang + comments + tokens). + * Files are named `001.js` through `120.js` (5! = 120 permutations). + * The filename number determines the order in which 5 operations are performed. + * + * After performing all operations, we verify: + * 1. Each operation returns correct results. + * 2. Object identity is preserved - the same token/comment objects are returned by all methods that return them. + */ +import assert from "node:assert"; + +import type { Plugin, Rule, SourceCode, Token, Comment } from "#oxlint/plugins"; + +const SOURCE_TEXT = `#!/usr/bin/env node +// Leading comment +let x = /* inline */ 1; +let y = 2; +// Trailing comment +`; + +// Expected snapshot strings for each operation. +// Each line is a JSON-serialized `{type, value, range}` for one token/comment. +// prettier-ignore +const TOKENS_SNAPSHOT = [ + '{"type":"Keyword","value":"let","range":[39,42]}', + '{"type":"Identifier","value":"x","range":[43,44]}', + '{"type":"Punctuator","value":"=","range":[45,46]}', + '{"type":"Numeric","value":"1","range":[60,61]}', + '{"type":"Punctuator","value":";","range":[61,62]}', + '{"type":"Keyword","value":"let","range":[63,66]}', + '{"type":"Identifier","value":"y","range":[67,68]}', + '{"type":"Punctuator","value":"=","range":[69,70]}', + '{"type":"Numeric","value":"2","range":[71,72]}', + '{"type":"Punctuator","value":";","range":[72,73]}', +].join("\n"); + +// prettier-ignore +const COMMENTS_SNAPSHOT = [ + '{"type":"Shebang","value":"/usr/bin/env node","range":[0,19]}', + '{"type":"Line","value":" Leading comment","range":[20,38]}', + '{"type":"Block","value":" inline ","range":[47,59]}', + '{"type":"Line","value":" Trailing comment","range":[74,93]}', +].join("\n"); + +// prettier-ignore +const TOKENS_AND_COMMENTS_SNAPSHOT = [ + '{"type":"Shebang","value":"/usr/bin/env node","range":[0,19]}', + '{"type":"Line","value":" Leading comment","range":[20,38]}', + '{"type":"Keyword","value":"let","range":[39,42]}', + '{"type":"Identifier","value":"x","range":[43,44]}', + '{"type":"Punctuator","value":"=","range":[45,46]}', + '{"type":"Block","value":" inline ","range":[47,59]}', + '{"type":"Numeric","value":"1","range":[60,61]}', + '{"type":"Punctuator","value":";","range":[61,62]}', + '{"type":"Keyword","value":"let","range":[63,66]}', + '{"type":"Identifier","value":"y","range":[67,68]}', + '{"type":"Punctuator","value":"=","range":[69,70]}', + '{"type":"Numeric","value":"2","range":[71,72]}', + '{"type":"Punctuator","value":";","range":[72,73]}', + '{"type":"Line","value":" Trailing comment","range":[74,93]}', +].join("\n"); + +const LAST_TOKEN_SNAPSHOT = '{"type":"Punctuator","value":";","range":[72,73]}'; + +const LAST_COMMENT_SNAPSHOT = '{"type":"Line","value":" Trailing comment","range":[74,93]}'; + +// A node-like object covering the entire source text, used for token retrieval methods +const PROGRAM_NODE = { + type: "Program" as const, + start: 0, + end: SOURCE_TEXT.length, + range: [0, SOURCE_TEXT.length] as [number, number], + loc: { + start: { line: 1, column: 0 }, + end: { line: SOURCE_TEXT.split("\n").length, column: 0 }, + }, +}; + +// Operation functions. +// Each returns a value that's stored and later checked for cross-consistency. + +interface Results { + tokens: Token[] | null; + comments: Comment[] | null; + tokensAndComments: (Token | Comment)[] | null; + lastToken: Token | null; + lastComment: Token | Comment | null; +} + +type OpName = keyof Results; + +const OP_NAMES: OpName[] = ["tokens", "comments", "tokensAndComments", "lastToken", "lastComment"]; + +function runOp(op: OpName, sourceCode: SourceCode): Results[OpName] { + switch (op) { + case "tokens": + return opTokens(sourceCode); + case "comments": + return opComments(sourceCode); + case "tokensAndComments": + return opTokensAndComments(sourceCode); + case "lastToken": + return opLastToken(sourceCode); + case "lastComment": + return opLastComment(sourceCode); + } +} + +function opTokens(sourceCode: SourceCode): Token[] { + const { tokens } = sourceCode.ast; + + assert.equal(snapAll(tokens), TOKENS_SNAPSHOT); + + // Calling again should return same array + assert(sourceCode.ast.tokens === tokens, "`ast.tokens` getter should return cached array"); + + return tokens; +} + +function opComments(sourceCode: SourceCode): Comment[] { + const comments = sourceCode.getAllComments(); + + assert.equal(snapAll(comments), COMMENTS_SNAPSHOT); + + // Calling again should return same array + assert(sourceCode.getAllComments() === comments, "`getAllComments()` should return cached array"); + + return comments; +} + +function opTokensAndComments(sourceCode: SourceCode): (Token | Comment)[] { + const { tokensAndComments } = sourceCode; + + assert.equal(snapAll(tokensAndComments), TOKENS_AND_COMMENTS_SNAPSHOT); + + // Calling again should return same array + assert( + sourceCode.tokensAndComments === tokensAndComments, + "`tokensAndComments` getter should return cached array", + ); + + return tokensAndComments; +} + +function opLastToken(sourceCode: SourceCode): Token { + const token = sourceCode.getLastToken(PROGRAM_NODE); + assert(token !== null, "`getLastToken()` should return a token"); + + assert.equal(snap(token), LAST_TOKEN_SNAPSHOT); + + return token; +} + +function opLastComment(sourceCode: SourceCode): Token | Comment { + const comment = sourceCode.getLastToken(PROGRAM_NODE, { includeComments: true }); + assert(comment !== null, "`getLastToken({ includeComments: true })` should return a comment"); + + assert.equal(snap(comment), LAST_COMMENT_SNAPSHOT); + + return comment; +} + +// ---- Rule ---- + +const rule: Rule = { + create(context) { + const { sourceCode } = context; + const { filename } = context; + + // Parse the number from the filename (e.g. "001.js" -> 0, "120.js" -> 119) + const match = filename.match(/(\d+)\.js$/); + if (!match) throw new Error(`Unexpected filename: ${filename}`); + const permIndex = parseInt(match[1], 10) - 1; // 0-based + + // Determine the order of operations for this file + const ops = nthPermutation(permIndex, OP_NAMES); + + // Run each operation in the determined order + const results: Results = { + tokens: null, + comments: null, + tokensAndComments: null, + lastToken: null, + lastComment: null, + }; + + for (const op of ops) { + results[op] = runOp(op, sourceCode) as never; + } + + // Verify source text. + // Done AFTER all operations to ensure token/comment methods initialize source text themselves. + assert.equal(sourceCode.text, SOURCE_TEXT, "Source text should match"); + + // ---- Verify object identity across results ---- + + const { tokens, comments, tokensAndComments, lastToken, lastComment } = results; + + // `lastToken` should be the same object as the last element of `tokens` + assert.strictEqual( + lastToken, + tokens!.at(-1), + "`lastToken` should be same object as `tokens[-1]`", + ); + + // `lastComment` should be the same object as the last element of `comments` + assert.strictEqual( + lastComment, + comments!.at(-1), + "`lastComment` should be same object as `comments[-1]`", + ); + + // Every token in `tokens` should appear in `tokensAndComments` (same object) + for (const token of tokens!) { + assert( + tokensAndComments!.includes(token), + `Token "${token.value}" at ${token.start} should be in \`tokensAndComments\` (same object)`, + ); + } + + // Every comment in `comments` should appear in `tokensAndComments` (same object) + for (const comment of comments!) { + assert( + tokensAndComments!.includes(comment), + `Comment at ${comment.start} should be in \`tokensAndComments\` (same object)`, + ); + } + + // Total count should match + assert.equal( + tokensAndComments!.length, + tokens!.length + comments!.length, + "`tokensAndComments` length should equal `tokens` + `comments` combined lengths", + ); + + // `lastToken` should be in `tokensAndComments` + assert( + tokensAndComments!.includes(lastToken!), + "`lastToken` should be in `tokensAndComments` (same object)", + ); + + // `lastComment` should be in `tokensAndComments` + assert( + tokensAndComments!.includes(lastComment!), + "`lastComment` should be in `tokensAndComments` (same object)", + ); + + context.report({ + message: `OK\nPermutation ${permIndex + 1}:\n[${ops.join(", ")}]`, + node: PROGRAM_NODE, + }); + + return {}; + }, +}; + +const plugin: Plugin = { + meta: { name: "tokens-and-comments-order-plugin" }, + rules: { "tokens-and-comments-order": rule }, +}; + +export default plugin; + +// ---- Helpers ---- + +/** + * Decode a 0-based permutation index into a permutation of `items`. + * Uses the factorial number system (Lehmer code). + * + * @param n - Permutation index (0 to items.length! - 1) + * @param items - Array of items to permute + * @returns Permuted copy of `items` + */ +function nthPermutation(n: number, items: T[]): T[] { + const remaining = [...items]; + const result: T[] = []; + for (let i = remaining.length; i > 0; i--) { + const index = n % i; + result.push(remaining[index]); + remaining.splice(index, 1); + n = (n - index) / i; + } + return result; +} + +/** + * Format a token/comment for snapshot comparison. + * Only includes `type`, `value`, and `range` - excludes `loc` for brevity. + */ +function snap(entry: Token | Comment): string { + return JSON.stringify({ type: entry.type, value: entry.value, range: entry.range }); +} + +/** + * Format an array of tokens/comments for snapshot comparison. + */ +function snapAll(entries: (Token | Comment)[]): string { + return entries.map(snap).join("\n"); +} diff --git a/apps/oxlint/test/tokens.test-d.ts b/apps/oxlint/test/tokens.test-d.ts index 07d491f8af928..742e8542d4d15 100644 --- a/apps/oxlint/test/tokens.test-d.ts +++ b/apps/oxlint/test/tokens.test-d.ts @@ -35,7 +35,8 @@ import { } from "../src-js/plugins/tokens_methods.ts"; import type { Node } from "../src-js/plugins/types.ts"; -import type { Token, TokenOrComment } from "../src-js/plugins/tokens.ts"; +import type { Token } from "../src-js/plugins/tokens.ts"; +import type { TokenOrComment } from "../src-js/plugins/tokens_and_comments.ts"; import type { CountOptions, SkipOptions } from "../src-js/plugins/tokens_methods.ts"; type IsExact = [T] extends [U] ? ([U] extends [T] ? true : false) : false; diff --git a/apps/oxlint/test/tokens.test.ts b/apps/oxlint/test/tokens.test.ts index 719060c5e6cf1..6893079b8bda4 100644 --- a/apps/oxlint/test/tokens.test.ts +++ b/apps/oxlint/test/tokens.test.ts @@ -31,7 +31,7 @@ import { parse as parseRaw } from "../src-js/package/parse.ts"; import { debugAssertIsNonNull } from "../src-js/utils/asserts.ts"; import type { Node } from "../src-js/plugins/types.ts"; -import type { TokenOrComment } from "../src-js/plugins/tokens.ts"; +import type { TokenOrComment } from "../src-js/plugins/tokens_and_comments.ts"; import type { BinaryExpression } from "../src-js/generated/types.d.ts"; // Source text used for most tests diff --git a/apps/oxlint/tsdown_plugins/inline_search.ts b/apps/oxlint/tsdown_plugins/inline_search.ts index e9e2cb4068343..4de3e0fcd9d17 100644 --- a/apps/oxlint/tsdown_plugins/inline_search.ts +++ b/apps/oxlint/tsdown_plugins/inline_search.ts @@ -32,13 +32,13 @@ const { fnParams, returnParamIndex, fnBodySource } = extractInlinedFunction( * * ```ts * // Original code - * const index = firstTokenAtOrAfter(tokenList, rangeStart, searchFromIndex); + * const index = firstTokenAtOrAfter(uint32, rangeStart, searchFromIndex, length); * * // After transform * let index = searchFromIndex; - * for (let endIndex = tokenList.length; index < endIndex; ) { + * for (let endIndex = length; index < endIndex; ) { * const mid = (index + endIndex) >> 1; - * if (tokenList[mid].start < rangeStart) { + * if (uint32[mid << 2] < rangeStart) { * index = mid + 1; * } else { * endIndex = mid; diff --git a/crates/oxc_linter/src/lib.rs b/crates/oxc_linter/src/lib.rs index adca9e8426d67..2402aaf14183a 100644 --- a/crates/oxc_linter/src/lib.rs +++ b/crates/oxc_linter/src/lib.rs @@ -16,7 +16,7 @@ use std::{ use oxc_allocator::{Allocator, AllocatorPool, CloneIn, TakeIn, Vec as ArenaVec}; use oxc_ast::{ - ast::{Comment, CommentKind, Program}, + ast::{Comment, CommentContent, CommentKind, Program}, ast_kind::AST_TYPE_MAX, }; use oxc_ast_macros::ast; @@ -611,7 +611,7 @@ impl Linter { program.source_text = source_text; } - // Convert spans to UTF-16. + // Create span converter. // If source starts with BOM, create converter which ignores the BOM. let span_converter = if has_bom { #[expect(clippy::cast_possible_truncation)] @@ -620,7 +620,7 @@ impl Linter { Utf8ToUtf16::new(source_text) }; - // Convert tokens for raw transfer + // Convert token spans to UTF-16 and update token kinds #[expect(clippy::if_not_else, clippy::cast_possible_truncation)] let (tokens_offset, tokens_len) = if !tokens.is_empty() { update_tokens(tokens, program, &span_converter, ESTreeTokenOptionsJS); @@ -629,8 +629,22 @@ impl Linter { (0, 0) }; + // Convert AST spans to UTF-16 span_converter.convert_program(program); - span_converter.convert_comments(&mut program.comments); + + // Convert comment spans to UTF-16. + // Also set the `content` field (byte 15) of each comment to `None` (0). + // JS side uses this byte as a "deserialized" flag for tracking lazy deserialization. + if let Some(mut converter) = span_converter.converter() { + for comment in &mut program.comments { + converter.convert_span(&mut comment.span); + comment.content = CommentContent::None; + } + } else { + for comment in &mut program.comments { + comment.content = CommentContent::None; + } + } // Get offset of `Program` within buffer (bottom 32 bits of pointer) let program_offset = ptr::from_ref(program) as u32; diff --git a/napi/parser/src-js/generated/constants.js b/napi/parser/src-js/generated/constants.js index 913aa7ddac3ae..dacc195349cc6 100644 --- a/napi/parser/src-js/generated/constants.js +++ b/napi/parser/src-js/generated/constants.js @@ -81,6 +81,14 @@ export const COMMENT_SIZE = 16; */ export const COMMENT_KIND_OFFSET = 12; +/** + * Byte offset of the deserialized flag within each token/comment entry. + * + * Corresponds to `content` field of `Comment` struct, and unused bytes in `Token`. + * Initialized to 0 by Rust. JS side sets to 1 after deserialization. + */ +export const DESERIALIZED_FLAG_OFFSET = 15; + /** * Discriminant value for `CommentKind::Line`. */ diff --git a/tasks/ast_tools/src/generators/raw_transfer.rs b/tasks/ast_tools/src/generators/raw_transfer.rs index 72468a97be270..6ca8b61a3faa4 100644 --- a/tasks/ast_tools/src/generators/raw_transfer.rs +++ b/tasks/ast_tools/src/generators/raw_transfer.rs @@ -1317,6 +1317,9 @@ struct Constants { comment_size: u32, /// Offset of `kind` field within `Comment` struct comment_kind_offset: u32, + /// Offset of `content` field within `Comment` struct. + /// JS side uses this byte as a "deserialized" flag for lazy deserialization of tokens/comments. + deserialized_flag_offset: u32, /// Discriminant value for `CommentKind::Line` comment_line_kind: u8, /// Size of `RawTransferData` in bytes @@ -1341,6 +1344,7 @@ fn generate_constants(consts: Constants) -> (String, TokenStream) { comments_len_offset, comment_size, comment_kind_offset, + deserialized_flag_offset, comment_line_kind, raw_metadata_size, } = consts; @@ -1431,6 +1435,14 @@ fn generate_constants(consts: Constants) -> (String, TokenStream) { */ export const COMMENT_KIND_OFFSET = {comment_kind_offset}; + /** + * Byte offset of the deserialized flag within each token/comment entry. + * + * Corresponds to `content` field of `Comment` struct, and unused bytes in `Token`. + * Initialized to 0 by Rust. JS side sets to 1 after deserialization. + */ + export const DESERIALIZED_FLAG_OFFSET = {deserialized_flag_offset}; + /** * Discriminant value for `CommentKind::Line`. */ @@ -1532,6 +1544,7 @@ fn get_constants(schema: &Schema) -> Constants { let comment_struct = schema.type_by_name("Comment").as_struct().unwrap(); let comment_size = comment_struct.layout_64().size; let comment_kind_offset = comment_struct.field_by_name("kind").offset_64(); + let deserialized_flag_offset = comment_struct.field_by_name("content").offset_64(); let comment_kind_enum = schema.type_by_name("CommentKind").as_enum().unwrap(); let comment_line_kind = @@ -1553,6 +1566,7 @@ fn get_constants(schema: &Schema) -> Constants { comments_len_offset, comment_size, comment_kind_offset, + deserialized_flag_offset, comment_line_kind, raw_metadata_size, }