diff --git a/src/Features/Lsif/Generator/Generator.cs b/src/Features/Lsif/Generator/Generator.cs index 354e412cf2d71..f885d8521c6d4 100644 --- a/src/Features/Lsif/Generator/Generator.cs +++ b/src/Features/Lsif/Generator/Generator.cs @@ -466,19 +466,22 @@ private static async Task GenerateSemanticTokensAsync( IdFactory idFactory, LsifDocument documentVertex) { + var cancellationToken = CancellationToken.None; + // Compute colorization data. // // Unlike the mainline LSP scenario, where we control both the syntactic colorizer (in-proc syntax tagger) // and the semantic colorizer (LSP semantic tokens) LSIF is more likely to be consumed by clients which may // have different syntactic classification behavior than us, resulting in missing colors. To avoid this, we // include syntax tokens in the generated data. + var text = await document.GetTextAsync(cancellationToken); var data = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( // Just get the pure-lsp semantic tokens here. document, - spans: [], + spans: [text.Lines.GetLinePositionSpan(new TextSpan(0, text.Length))], supportsVisualStudioExtensions: true, options: Classification.ClassificationOptions.Default, - cancellationToken: CancellationToken.None); + cancellationToken); var semanticTokensResult = new SemanticTokensResult(new SemanticTokens { Data = data }, idFactory); var semanticTokensEdge = Edge.Create(Methods.TextDocumentSemanticTokensFullName, documentVertex.GetId(), semanticTokensResult.GetId(), idFactory); diff --git a/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs b/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs index c6efcb1143b5c..d329f0ebc21b0 100644 --- a/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs +++ b/src/LanguageServer/Protocol/ExternalAccess/Razor/SemanticTokensRangesHandler.cs @@ -12,24 +12,20 @@ namespace Microsoft.CodeAnalysis.LanguageServer.ExternalAccess.Razor; [Method(SemanticRangesMethodName)] -internal class SemanticTokensRangesHandler : ILspServiceDocumentRequestHandler +internal sealed class SemanticTokensRangesHandler( + IGlobalOptionService globalOptions, + SemanticTokensRefreshQueue semanticTokensRefreshQueue) + : ILspServiceDocumentRequestHandler { public const string SemanticRangesMethodName = "roslyn/semanticTokenRanges"; - private readonly IGlobalOptionService _globalOptions; - private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue; + + private readonly IGlobalOptionService _globalOptions = globalOptions; + private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue = semanticTokensRefreshQueue; public bool MutatesSolutionState => false; public bool RequiresLSPSolution => true; - public SemanticTokensRangesHandler( - IGlobalOptionService globalOptions, - SemanticTokensRefreshQueue semanticTokensRefreshQueue) - { - _globalOptions = globalOptions; - _semanticTokenRefreshQueue = semanticTokensRefreshQueue; - } - public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangesParams request) { Contract.ThrowIfNull(request.TextDocument); @@ -37,15 +33,14 @@ public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangesPara } public async Task HandleRequestAsync( - SemanticTokensRangesParams request, - RequestContext context, - CancellationToken cancellationToken) + SemanticTokensRangesParams request, + RequestContext context, + CancellationToken cancellationToken) { Contract.ThrowIfNull(request.TextDocument, "TextDocument is null."); - if (request.Ranges.Length == 0) - return new SemanticTokens { Data = [] }; - var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(_globalOptions, _semanticTokenRefreshQueue, request.Ranges, context, cancellationToken).ConfigureAwait(false); + var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync( + _globalOptions, _semanticTokenRefreshQueue, request.Ranges, context, cancellationToken).ConfigureAwait(false); return new SemanticTokens { Data = tokensData }; } } diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs index 36242ddf9e6b1..35c8400eec316 100644 --- a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs +++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensFullHandler.cs @@ -34,8 +34,10 @@ public TextDocumentIdentifier GetTextDocumentIdentifier(LSP.SemanticTokensFullPa CancellationToken cancellationToken) { Contract.ThrowIfNull(request.TextDocument); - // Passing an empty array of ranges will cause the helper to return tokens for the entire document. - var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(_globalOptions, _semanticTokenRefreshQueue, ranges: [], context, cancellationToken).ConfigureAwait(false); + + // Passing an null array of ranges will cause the helper to return tokens for the entire document. + var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync( + _globalOptions, _semanticTokenRefreshQueue, ranges: null, context, cancellationToken).ConfigureAwait(false); return new LSP.SemanticTokens { Data = tokensData }; } } diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs index 62e5f9f0470b9..8b290487620c1 100644 --- a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs +++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs @@ -17,367 +17,359 @@ using Microsoft.CodeAnalysis.Text; using LSP = Roslyn.LanguageServer.Protocol; -namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens +namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens; + +internal static class SemanticTokensHelpers { - internal static class SemanticTokensHelpers + private static readonly ObjectPool> s_tokenListPool = new(() => new List(capacity: 1000)); + + /// The ranges to get semantic tokens for. If null then the entire document will be + /// processed. + internal static async Task HandleRequestHelperAsync( + IGlobalOptionService globalOptions, + SemanticTokensRefreshQueue semanticTokensRefreshQueue, + LSP.Range[]? ranges, + RequestContext context, + CancellationToken cancellationToken) { - private static readonly ObjectPool> s_tokenListPool = new ObjectPool>(() => new List(capacity: 1000)); - - internal static async Task HandleRequestHelperAsync( - IGlobalOptionService globalOptions, - SemanticTokensRefreshQueue semanticTokensRefreshQueue, - LSP.Range[] ranges, - RequestContext context, - CancellationToken cancellationToken) - { - var contextDocument = context.GetRequiredDocument(); - var project = contextDocument.Project; - var options = globalOptions.GetClassificationOptions(project.Language); - var supportsVisualStudioExtensions = context.GetRequiredClientCapabilities().HasVisualStudioLspCapability(); - - var spans = new FixedSizeArrayBuilder(ranges.Length); - foreach (var range in ranges) - spans.Add(ProtocolConversions.RangeToLinePositionSpan(range)); - - var tokensData = await HandleRequestHelperAsync(contextDocument, spans.MoveToImmutable(), supportsVisualStudioExtensions, options, cancellationToken).ConfigureAwait(false); - - // The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick - // off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does - // we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new - // solution snapshot. - await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false); - return tokensData; - } + var contextDocument = context.GetRequiredDocument(); - public static async Task HandleRequestHelperAsync(Document document, ImmutableArray spans, bool supportsVisualStudioExtensions, ClassificationOptions options, CancellationToken cancellationToken) - { - // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate - // results but will speed up how quickly we can respond to the client's request. - document = document.WithFrozenPartialSemantics(cancellationToken); - options = options with { FrozenPartialSemantics = true }; - - // The results from the range handler should not be cached since we don't want to cache - // partial token results. In addition, a range request is only ever called with a whole - // document request, so caching range results is unnecessary since the whole document - // handler will cache the results anyway. - return await ComputeSemanticTokensDataAsync( - document, - spans, - supportsVisualStudioExtensions, - options, - cancellationToken).ConfigureAwait(false); - } + // If the client didn't provide any ranges, we'll just return the entire document. + var text = await contextDocument.GetTextAsync(cancellationToken).ConfigureAwait(false); + ranges ??= [ProtocolConversions.TextSpanToRange(new TextSpan(0, text.Length), text)]; - /// - /// Returns the semantic tokens data for a given document with an optional ranges. - /// - /// Spans to compute tokens for. If empty, the whole document will be used. - public static async Task ComputeSemanticTokensDataAsync( - Document document, - ImmutableArray spans, - bool supportsVisualStudioExtensions, - ClassificationOptions options, - CancellationToken cancellationToken) - { - var tokenTypesToIndex = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeToIndex; - var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false); - var text = await document.GetValueTextAsync(cancellationToken).ConfigureAwait(false); - using var _1 = Classifier.GetPooledList(out var classifiedSpans); - using var _2 = Classifier.GetPooledList(out var updatedClassifiedSpans); - - // We either calculate the tokens for the full document span, or the user - // can pass in a range from the full document if they wish. - ImmutableArray textSpans; - if (spans.Length == 0) - { - textSpans = [root.FullSpan]; - } - else - { - var textSpansBuilder = new FixedSizeArrayBuilder(spans.Length); - foreach (var span in spans) - textSpansBuilder.Add(text.Lines.GetTextSpan(span)); + var project = contextDocument.Project; + var options = globalOptions.GetClassificationOptions(project.Language); + var supportsVisualStudioExtensions = context.GetRequiredClientCapabilities().HasVisualStudioLspCapability(); - textSpans = textSpansBuilder.MoveToImmutable(); - } + var spans = new FixedSizeArrayBuilder(ranges.Length); + foreach (var range in ranges) + spans.Add(ProtocolConversions.RangeToLinePositionSpan(range)); - await GetClassifiedSpansForDocumentAsync( - classifiedSpans, document, textSpans, options, cancellationToken).ConfigureAwait(false); + var tokensData = await HandleRequestHelperAsync(contextDocument, spans.MoveToImmutable(), supportsVisualStudioExtensions, options, cancellationToken).ConfigureAwait(false); - // Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe. - classifiedSpans.Sort(ClassifiedSpanComparer.Instance); + // The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick + // off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does + // we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new + // solution snapshot. + await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false); + return tokensData; + } - // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495). - // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans. - ConvertMultiLineToSingleLineSpans(text, classifiedSpans, updatedClassifiedSpans); + public static async Task HandleRequestHelperAsync( + Document document, ImmutableArray spans, bool supportsVisualStudioExtensions, ClassificationOptions options, CancellationToken cancellationToken) + { + // If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate + // results but will speed up how quickly we can respond to the client's request. + document = document.WithFrozenPartialSemantics(cancellationToken); + options = options with { FrozenPartialSemantics = true }; + + // The results from the range handler should not be cached since we don't want to cache + // partial token results. In addition, a range request is only ever called with a whole + // document request, so caching range results is unnecessary since the whole document + // handler will cache the results anyway. + return await ComputeSemanticTokensDataAsync( + document, + spans, + supportsVisualStudioExtensions, + options, + cancellationToken).ConfigureAwait(false); + } - // TO-DO: We should implement support for streaming if LSP adds support for it: - // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300 - return ComputeTokens(text.Lines, updatedClassifiedSpans, supportsVisualStudioExtensions, tokenTypesToIndex); - } + /// + /// Returns the semantic tokens data for a given document with an optional ranges. + /// + /// Spans to compute tokens for. + public static async Task ComputeSemanticTokensDataAsync( + Document document, + ImmutableArray spans, + bool supportsVisualStudioExtensions, + ClassificationOptions options, + CancellationToken cancellationToken) + { + var tokenTypesToIndex = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeToIndex; + var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false); + var text = await document.GetValueTextAsync(cancellationToken).ConfigureAwait(false); + using var _1 = Classifier.GetPooledList(out var classifiedSpans); + using var _2 = Classifier.GetPooledList(out var updatedClassifiedSpans); + + var textSpans = spans.SelectAsArray(static (span, text) => text.Lines.GetTextSpan(span), text); + await GetClassifiedSpansForDocumentAsync( + classifiedSpans, document, textSpans, options, cancellationToken).ConfigureAwait(false); + + // Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe. + classifiedSpans.Sort(ClassifiedSpanComparer.Instance); + + // Multi-line tokens are not supported by VS (tracked by https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1265495). + // Roslyn's classifier however can return multi-line classified spans, so we must break these up into single-line spans. + ConvertMultiLineToSingleLineSpans(text, classifiedSpans, updatedClassifiedSpans); + + // TO-DO: We should implement support for streaming if LSP adds support for it: + // https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300 + return ComputeTokens(text.Lines, updatedClassifiedSpans, supportsVisualStudioExtensions, tokenTypesToIndex); + } - private static async Task GetClassifiedSpansForDocumentAsync( - SegmentedList classifiedSpans, - Document document, - ImmutableArray textSpans, - ClassificationOptions options, - CancellationToken cancellationToken) - { - var classificationService = document.GetRequiredLanguageService(); + private static async Task GetClassifiedSpansForDocumentAsync( + SegmentedList classifiedSpans, + Document document, + ImmutableArray textSpans, + ClassificationOptions options, + CancellationToken cancellationToken) + { + var classificationService = document.GetRequiredLanguageService(); + + // We always return both syntactic and semantic classifications. If there is a syntactic classifier running on the client + // then the semantic token classifications will override them. + + // `includeAdditiveSpans` will add token modifiers such as 'static', which we want to include in LSP. + var spans = await ClassifierHelper.GetClassifiedSpansAsync( + document, textSpans, options, includeAdditiveSpans: true, cancellationToken).ConfigureAwait(false); + + // The spans returned to us may include some empty spans, which we don't care about. We also don't care + // about the 'text' classification. It's added for everything between real classifications (including + // whitespace), and just means 'don't classify this'. No need for us to actually include that in + // semantic tokens as it just wastes space in the result. + var nonEmptySpans = spans.Where(s => !s.TextSpan.IsEmpty && s.ClassificationType != ClassificationTypeNames.Text); + classifiedSpans.AddRange(nonEmptySpans); + } - // We always return both syntactic and semantic classifications. If there is a syntactic classifier running on the client - // then the semantic token classifications will override them. + private static void ConvertMultiLineToSingleLineSpans(SourceText text, SegmentedList classifiedSpans, SegmentedList updatedClassifiedSpans) + { - // `includeAdditiveSpans` will add token modifiers such as 'static', which we want to include in LSP. - var spans = await ClassifierHelper.GetClassifiedSpansAsync( - document, textSpans, options, includeAdditiveSpans: true, cancellationToken).ConfigureAwait(false); + for (var spanIndex = 0; spanIndex < classifiedSpans.Count; spanIndex++) + { + var span = classifiedSpans[spanIndex]; + text.GetLinesAndOffsets(span.TextSpan, out var startLine, out var startOffset, out var endLine, out var endOffSet); - // The spans returned to us may include some empty spans, which we don't care about. We also don't care - // about the 'text' classification. It's added for everything between real classifications (including - // whitespace), and just means 'don't classify this'. No need for us to actually include that in - // semantic tokens as it just wastes space in the result. - var nonEmptySpans = spans.Where(s => !s.TextSpan.IsEmpty && s.ClassificationType != ClassificationTypeNames.Text); - classifiedSpans.AddRange(nonEmptySpans); + // If the start and end of the classified span are not on the same line, we're dealing with a multi-line span. + // Since VS doesn't support multi-line spans/tokens, we need to break the span up into single-line spans. + if (startLine != endLine) + { + ConvertToSingleLineSpan( + text, classifiedSpans, updatedClassifiedSpans, ref spanIndex, span.ClassificationType, + startLine, startOffset, endLine, endOffSet); + } + else + { + // This is already a single-line span, so no modification is necessary. + updatedClassifiedSpans.Add(span); + } } - private static void ConvertMultiLineToSingleLineSpans(SourceText text, SegmentedList classifiedSpans, SegmentedList updatedClassifiedSpans) + static void ConvertToSingleLineSpan( + SourceText text, + SegmentedList originalClassifiedSpans, + SegmentedList updatedClassifiedSpans, + ref int spanIndex, + string classificationType, + int startLine, + int startOffset, + int endLine, + int endOffSet) { + var numLinesInSpan = endLine - startLine + 1; + Contract.ThrowIfTrue(numLinesInSpan < 1); - for (var spanIndex = 0; spanIndex < classifiedSpans.Count; spanIndex++) + for (var currentLine = 0; currentLine < numLinesInSpan; currentLine++) { - var span = classifiedSpans[spanIndex]; - text.GetLinesAndOffsets(span.TextSpan, out var startLine, out var startOffset, out var endLine, out var endOffSet); + TextSpan textSpan; + var line = text.Lines[startLine + currentLine]; - // If the start and end of the classified span are not on the same line, we're dealing with a multi-line span. - // Since VS doesn't support multi-line spans/tokens, we need to break the span up into single-line spans. - if (startLine != endLine) + // Case 1: First line of span + if (currentLine == 0) + { + var absoluteStart = line.Start + startOffset; + + // This start could be past the regular end of the line if it's within the newline character if we have a CRLF newline. In that case, just skip emitting a span for the LF. + // One example where this could happen is an embedded regular expression that we're classifying; regular expression comments contained within a multi-line string + // contain the carriage return but not the linefeed, so the linefeed could be the start of the next classification. + textSpan = TextSpan.FromBounds(Math.Min(absoluteStart, line.End), line.End); + } + // Case 2: Any of the span's middle lines + else if (currentLine != numLinesInSpan - 1) { - ConvertToSingleLineSpan( - text, classifiedSpans, updatedClassifiedSpans, ref spanIndex, span.ClassificationType, - startLine, startOffset, endLine, endOffSet); + textSpan = line.Span; } + // Case 3: Last line of span else { - // This is already a single-line span, so no modification is necessary. - updatedClassifiedSpans.Add(span); + textSpan = new TextSpan(line.Start, endOffSet); } - } - static void ConvertToSingleLineSpan( - SourceText text, - SegmentedList originalClassifiedSpans, - SegmentedList updatedClassifiedSpans, - ref int spanIndex, - string classificationType, - int startLine, - int startOffset, - int endLine, - int endOffSet) - { - var numLinesInSpan = endLine - startLine + 1; - Contract.ThrowIfTrue(numLinesInSpan < 1); + // Omit 0-length spans created in this fashion. + if (textSpan.Length > 0) + { + var updatedClassifiedSpan = new ClassifiedSpan(textSpan, classificationType); + updatedClassifiedSpans.Add(updatedClassifiedSpan); + } - for (var currentLine = 0; currentLine < numLinesInSpan; currentLine++) + // Since spans are expected to be ordered, when breaking up a multi-line span, we may have to insert + // other spans in-between. For example, we may encounter this case when breaking up a multi-line verbatim + // string literal containing escape characters: + // var x = @"one "" + // two"; + // The check below ensures we correctly return the spans in the correct order, i.e. 'one', '""', 'two'. + while (spanIndex + 1 < originalClassifiedSpans.Count && + textSpan.Contains(originalClassifiedSpans[spanIndex + 1].TextSpan)) { - TextSpan textSpan; - var line = text.Lines[startLine + currentLine]; - - // Case 1: First line of span - if (currentLine == 0) - { - var absoluteStart = line.Start + startOffset; - - // This start could be past the regular end of the line if it's within the newline character if we have a CRLF newline. In that case, just skip emitting a span for the LF. - // One example where this could happen is an embedded regular expression that we're classifying; regular expression comments contained within a multi-line string - // contain the carriage return but not the linefeed, so the linefeed could be the start of the next classification. - textSpan = TextSpan.FromBounds(Math.Min(absoluteStart, line.End), line.End); - } - // Case 2: Any of the span's middle lines - else if (currentLine != numLinesInSpan - 1) - { - textSpan = line.Span; - } - // Case 3: Last line of span - else - { - textSpan = new TextSpan(line.Start, endOffSet); - } - - // Omit 0-length spans created in this fashion. - if (textSpan.Length > 0) - { - var updatedClassifiedSpan = new ClassifiedSpan(textSpan, classificationType); - updatedClassifiedSpans.Add(updatedClassifiedSpan); - } - - // Since spans are expected to be ordered, when breaking up a multi-line span, we may have to insert - // other spans in-between. For example, we may encounter this case when breaking up a multi-line verbatim - // string literal containing escape characters: - // var x = @"one "" - // two"; - // The check below ensures we correctly return the spans in the correct order, i.e. 'one', '""', 'two'. - while (spanIndex + 1 < originalClassifiedSpans.Count && - textSpan.Contains(originalClassifiedSpans[spanIndex + 1].TextSpan)) - { - updatedClassifiedSpans.Add(originalClassifiedSpans[spanIndex + 1]); - spanIndex++; - } + updatedClassifiedSpans.Add(originalClassifiedSpans[spanIndex + 1]); + spanIndex++; } } } + } - private static int[] ComputeTokens( - TextLineCollection lines, - SegmentedList classifiedSpans, - bool supportsVisualStudioExtensions, - IReadOnlyDictionary tokenTypesToIndex) - { - // We keep track of the last line number and last start character since tokens are - // reported relative to each other. - var lastLineNumber = 0; - var lastStartCharacter = 0; - - var tokenTypeMap = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeMap; + private static int[] ComputeTokens( + TextLineCollection lines, + SegmentedList classifiedSpans, + bool supportsVisualStudioExtensions, + IReadOnlyDictionary tokenTypesToIndex) + { + // We keep track of the last line number and last start character since tokens are + // reported relative to each other. + var lastLineNumber = 0; + var lastStartCharacter = 0; - using var pooledData = s_tokenListPool.GetPooledObject(); - var data = pooledData.Object; + var tokenTypeMap = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeMap; - // Items in the pool may not have been cleared - data.Clear(); + using var pooledData = s_tokenListPool.GetPooledObject(); + var data = pooledData.Object; - for (var currentClassifiedSpanIndex = 0; currentClassifiedSpanIndex < classifiedSpans.Count; currentClassifiedSpanIndex++) - { - currentClassifiedSpanIndex = ComputeNextToken( - lines, ref lastLineNumber, ref lastStartCharacter, classifiedSpans, - currentClassifiedSpanIndex, tokenTypeMap, tokenTypesToIndex, - out var deltaLine, out var startCharacterDelta, out var tokenLength, - out var tokenType, out var tokenModifiers); - - data.Add(deltaLine); - data.Add(startCharacterDelta); - data.Add(tokenLength); - data.Add(tokenType); - data.Add(tokenModifiers); - } + // Items in the pool may not have been cleared + data.Clear(); - return [.. data]; + for (var currentClassifiedSpanIndex = 0; currentClassifiedSpanIndex < classifiedSpans.Count; currentClassifiedSpanIndex++) + { + currentClassifiedSpanIndex = ComputeNextToken( + lines, ref lastLineNumber, ref lastStartCharacter, classifiedSpans, + currentClassifiedSpanIndex, tokenTypeMap, tokenTypesToIndex, + out var deltaLine, out var startCharacterDelta, out var tokenLength, + out var tokenType, out var tokenModifiers); + + data.Add(deltaLine); + data.Add(startCharacterDelta); + data.Add(tokenLength); + data.Add(tokenType); + data.Add(tokenModifiers); } - private static int ComputeNextToken( - TextLineCollection lines, - ref int lastLineNumber, - ref int lastStartCharacter, - SegmentedList classifiedSpans, - int currentClassifiedSpanIndex, - IReadOnlyDictionary tokenTypeMap, - IReadOnlyDictionary tokenTypesToIndex, - out int deltaLineOut, - out int startCharacterDeltaOut, - out int tokenLengthOut, - out int tokenTypeOut, - out int tokenModifiersOut) + return [.. data]; + } + + private static int ComputeNextToken( + TextLineCollection lines, + ref int lastLineNumber, + ref int lastStartCharacter, + SegmentedList classifiedSpans, + int currentClassifiedSpanIndex, + IReadOnlyDictionary tokenTypeMap, + IReadOnlyDictionary tokenTypesToIndex, + out int deltaLineOut, + out int startCharacterDeltaOut, + out int tokenLengthOut, + out int tokenTypeOut, + out int tokenModifiersOut) + { + // Each semantic token is represented in LSP by five numbers: + // 1. Token line number delta, relative to the previous token + // 2. Token start character delta, relative to the previous token + // 3. Token length + // 4. Token type (index) - looked up in SemanticTokensLegend.tokenTypes + // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers + + var classifiedSpan = classifiedSpans[currentClassifiedSpanIndex]; + var originalTextSpan = classifiedSpan.TextSpan; + var linePosition = lines.GetLinePositionSpan(originalTextSpan).Start; + var lineNumber = linePosition.Line; + + // 1. Token line number delta, relative to the previous token + var deltaLine = lineNumber - lastLineNumber; + Contract.ThrowIfTrue(deltaLine < 0, $"deltaLine is less than 0: {deltaLine}"); + + // 2. Token start character delta, relative to the previous token + // (Relative to 0 or the previous token’s start if they're on the same line) + var deltaStartCharacter = linePosition.Character; + if (lastLineNumber == lineNumber) { - // Each semantic token is represented in LSP by five numbers: - // 1. Token line number delta, relative to the previous token - // 2. Token start character delta, relative to the previous token - // 3. Token length - // 4. Token type (index) - looked up in SemanticTokensLegend.tokenTypes - // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers - - var classifiedSpan = classifiedSpans[currentClassifiedSpanIndex]; - var originalTextSpan = classifiedSpan.TextSpan; - var linePosition = lines.GetLinePositionSpan(originalTextSpan).Start; - var lineNumber = linePosition.Line; - - // 1. Token line number delta, relative to the previous token - var deltaLine = lineNumber - lastLineNumber; - Contract.ThrowIfTrue(deltaLine < 0, $"deltaLine is less than 0: {deltaLine}"); - - // 2. Token start character delta, relative to the previous token - // (Relative to 0 or the previous token’s start if they're on the same line) - var deltaStartCharacter = linePosition.Character; - if (lastLineNumber == lineNumber) - { - deltaStartCharacter -= lastStartCharacter; - } + deltaStartCharacter -= lastStartCharacter; + } - lastLineNumber = lineNumber; - lastStartCharacter = linePosition.Character; + lastLineNumber = lineNumber; + lastStartCharacter = linePosition.Character; - // 3. Token length - var tokenLength = originalTextSpan.Length; - Contract.ThrowIfFalse(tokenLength > 0); + // 3. Token length + var tokenLength = originalTextSpan.Length; + Contract.ThrowIfFalse(tokenLength > 0); - // We currently only have one modifier (static). The logic below will need to change in the future if other - // modifiers are added in the future. - var modifierBits = TokenModifiers.None; - var tokenTypeIndex = 0; + // We currently only have one modifier (static). The logic below will need to change in the future if other + // modifiers are added in the future. + var modifierBits = TokenModifiers.None; + var tokenTypeIndex = 0; - // Classified spans with the same text span should be combined into one token. - while (classifiedSpans[currentClassifiedSpanIndex].TextSpan == originalTextSpan) + // Classified spans with the same text span should be combined into one token. + while (classifiedSpans[currentClassifiedSpanIndex].TextSpan == originalTextSpan) + { + var classificationType = classifiedSpans[currentClassifiedSpanIndex].ClassificationType; + if (classificationType == ClassificationTypeNames.StaticSymbol) { - var classificationType = classifiedSpans[currentClassifiedSpanIndex].ClassificationType; - if (classificationType == ClassificationTypeNames.StaticSymbol) - { - // 4. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers - modifierBits |= TokenModifiers.Static; - } - else if (classificationType == ClassificationTypeNames.ReassignedVariable) - { - // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers - modifierBits |= TokenModifiers.ReassignedVariable; - } - else if (classificationType == ClassificationTypeNames.ObsoleteSymbol) - { - // 6. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers - modifierBits |= TokenModifiers.Deprecated; - } - else if (classificationType == ClassificationTypeNames.TestCode) - { - // Skip additive types that are not being converted to token modifiers. - } - else - { - // 7. Token type - looked up in SemanticTokensLegend.tokenTypes (language server defined mapping - // from integer to LSP token types). - tokenTypeIndex = GetTokenTypeIndex(classificationType); - } - - // Break out of the loop if we have no more classified spans left, or if the next classified span has - // a different text span than our current text span. - if (currentClassifiedSpanIndex + 1 >= classifiedSpans.Count || classifiedSpans[currentClassifiedSpanIndex + 1].TextSpan != originalTextSpan) - { - break; - } + // 4. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers + modifierBits |= TokenModifiers.Static; + } + else if (classificationType == ClassificationTypeNames.ReassignedVariable) + { + // 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers + modifierBits |= TokenModifiers.ReassignedVariable; + } + else if (classificationType == ClassificationTypeNames.ObsoleteSymbol) + { + // 6. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers + modifierBits |= TokenModifiers.Deprecated; + } + else if (classificationType == ClassificationTypeNames.TestCode) + { + // Skip additive types that are not being converted to token modifiers. + } + else + { + // 7. Token type - looked up in SemanticTokensLegend.tokenTypes (language server defined mapping + // from integer to LSP token types). + tokenTypeIndex = GetTokenTypeIndex(classificationType); + } - currentClassifiedSpanIndex++; + // Break out of the loop if we have no more classified spans left, or if the next classified span has + // a different text span than our current text span. + if (currentClassifiedSpanIndex + 1 >= classifiedSpans.Count || classifiedSpans[currentClassifiedSpanIndex + 1].TextSpan != originalTextSpan) + { + break; } - deltaLineOut = deltaLine; - startCharacterDeltaOut = deltaStartCharacter; - tokenLengthOut = tokenLength; - tokenTypeOut = tokenTypeIndex; - tokenModifiersOut = (int)modifierBits; + currentClassifiedSpanIndex++; + } - return currentClassifiedSpanIndex; + deltaLineOut = deltaLine; + startCharacterDeltaOut = deltaStartCharacter; + tokenLengthOut = tokenLength; + tokenTypeOut = tokenTypeIndex; + tokenModifiersOut = (int)modifierBits; - int GetTokenTypeIndex(string classificationType) - { - if (!tokenTypeMap.TryGetValue(classificationType, out var tokenTypeStr)) - { - tokenTypeStr = classificationType; - } + return currentClassifiedSpanIndex; - Contract.ThrowIfFalse(tokenTypesToIndex.TryGetValue(tokenTypeStr, out var tokenTypeIndex), "No matching token type index found."); - return tokenTypeIndex; + int GetTokenTypeIndex(string classificationType) + { + if (!tokenTypeMap.TryGetValue(classificationType, out var tokenTypeStr)) + { + tokenTypeStr = classificationType; } + + Contract.ThrowIfFalse(tokenTypesToIndex.TryGetValue(tokenTypeStr, out var tokenTypeIndex), "No matching token type index found."); + return tokenTypeIndex; } + } - private class ClassifiedSpanComparer : IComparer - { - public static readonly ClassifiedSpanComparer Instance = new(); + private class ClassifiedSpanComparer : IComparer + { + public static readonly ClassifiedSpanComparer Instance = new(); - public int Compare(ClassifiedSpan x, ClassifiedSpan y) => x.TextSpan.CompareTo(y.TextSpan); - } + public int Compare(ClassifiedSpan x, ClassifiedSpan y) => x.TextSpan.CompareTo(y.TextSpan); } } diff --git a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs index 9173376a56c1b..4aa2df67382c2 100644 --- a/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs +++ b/src/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensRangeHandler.cs @@ -8,40 +8,34 @@ using Roslyn.LanguageServer.Protocol; using LSP = Roslyn.LanguageServer.Protocol; -namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens +namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens; + +[Method(Methods.TextDocumentSemanticTokensRangeName)] +internal sealed class SemanticTokensRangeHandler( + IGlobalOptionService globalOptions, + SemanticTokensRefreshQueue semanticTokensRefreshQueue) : ILspServiceDocumentRequestHandler { - [Method(Methods.TextDocumentSemanticTokensRangeName)] - internal class SemanticTokensRangeHandler : ILspServiceDocumentRequestHandler - { - private readonly IGlobalOptionService _globalOptions; - private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue; + private readonly IGlobalOptionService _globalOptions = globalOptions; + private readonly SemanticTokensRefreshQueue _semanticTokenRefreshQueue = semanticTokensRefreshQueue; - public bool MutatesSolutionState => false; - public bool RequiresLSPSolution => true; + public bool MutatesSolutionState => false; + public bool RequiresLSPSolution => true; - public SemanticTokensRangeHandler( - IGlobalOptionService globalOptions, - SemanticTokensRefreshQueue semanticTokensRefreshQueue) - { - _globalOptions = globalOptions; - _semanticTokenRefreshQueue = semanticTokensRefreshQueue; - } + public TextDocumentIdentifier GetTextDocumentIdentifier(SemanticTokensRangeParams request) + { + Contract.ThrowIfNull(request.TextDocument); + return request.TextDocument; + } - public TextDocumentIdentifier GetTextDocumentIdentifier(LSP.SemanticTokensRangeParams request) - { - Contract.ThrowIfNull(request.TextDocument); - return request.TextDocument; - } + public async Task HandleRequestAsync( + SemanticTokensRangeParams request, + RequestContext context, + CancellationToken cancellationToken) + { + Contract.ThrowIfNull(request.TextDocument, "TextDocument is null."); - public async Task HandleRequestAsync( - SemanticTokensRangeParams request, - RequestContext context, - CancellationToken cancellationToken) - { - Contract.ThrowIfNull(request.TextDocument, "TextDocument is null."); - var ranges = new[] { request.Range }; - var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync(_globalOptions, _semanticTokenRefreshQueue, ranges, context, cancellationToken).ConfigureAwait(false); - return new LSP.SemanticTokens { Data = tokensData }; - } + var tokensData = await SemanticTokensHelpers.HandleRequestHelperAsync( + _globalOptions, _semanticTokenRefreshQueue, [request.Range], context, cancellationToken).ConfigureAwait(false); + return new LSP.SemanticTokens { Data = tokensData }; } } diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs index 3270b63e4e603..8d9496ebf7700 100644 --- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs +++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensFullTests.cs @@ -2,13 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System.Collections.Immutable; using System.Linq; -using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.Classification; using Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens; -using Microsoft.CodeAnalysis.Text; using Roslyn.LanguageServer.Protocol; using Roslyn.Test.Utilities; using Xunit; @@ -25,9 +22,11 @@ public sealed class SemanticTokensFullTests(ITestOutputHelper testOutputHelper) public async Task TestGetSemanticTokensFull_FullDocAsync(bool mutatingLspWorkspace, bool isVS) { var markup = -@"{|caret:|}// Comment -static class C { } -"; + """ + {|caret:|}// Comment + static class C { } + + """; await using var testLspServer = await CreateTestLspServerAsync( markup, mutatingLspWorkspace, GetCapabilities(isVS)); diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs index 34ff01d65aa4e..029e55d893df5 100644 --- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs +++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangeTests.cs @@ -17,581 +17,592 @@ #pragma warning disable format // We want to force explicit column spacing within the collection literals in this file, so we disable formatting. -namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens +namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens; + +public sealed class SemanticTokensRangeTests(ITestOutputHelper testOutputHelper) + : AbstractSemanticTokensTests(testOutputHelper) { - public class SemanticTokensRangeTests : AbstractSemanticTokensTests + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRange_FullDocAsync(bool mutatingLspWorkspace, bool isVS) { - public SemanticTokensRangeTests(ITestOutputHelper testOutputHelper) : base(testOutputHelper) + var markup = + """ + {|caret:|}// Comment + static class C { } + + """; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); + + var range = new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) }; + var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); + + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; } - - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRange_FullDocAsync(bool mutatingLspWorkspace, bool isVS) + else { - var markup = -@"{|caret:|}// Comment -static class C { } -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); - - var range = new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) }; - var results = await RunGetSemanticTokensRangeAsync(testLspServer, testLspServer.GetLocations("caret").First(), range); - - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - else + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } + + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data)); + } + + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRanges_ComputesTokensWithMultipleRanges(bool mutatingLspWorkspace, bool isVS) + { + // Razor docs should be returning semantic + syntactic results. + var markup = + """ + {|caret:|}// + #pragma warning disable 1591 + namespace Razor { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; + #line hidden + public class Template + { + #pragma warning disable 219 + private void __RazorDirectiveTokenHelpers__() { + ((global::System.Action)(() => { + #nullable restore + #line 1 "test.cshtml" + var z = 1; + + #line default + #line hidden + #nullable disable + } + ))(); + } + #pragma warning restore 219 + #pragma warning disable 0414 + private static object __o = null; + #pragma warning restore 0414 + #pragma warning disable 1998 + public async override global::System.Threading.Tasks.Task ExecuteAsync() + { + #nullable restore + #line 2 "test.cshtml" + var x = + + #line default + #line hidden + #nullable disable + } + #pragma warning restore 1998 + } } + #pragma warning restore 1591 - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data)); - } + """; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); + + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + ImmutableArray spans = [ + new LinePositionSpan(new LinePosition(12, 0), new LinePosition(13, 0)), + new LinePositionSpan(new LinePosition(29, 0), new LinePosition(30, 0)), + ]; + + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans, isVS, options, CancellationToken.None); - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRanges_ComputesTokensWithMultipleRanges(bool mutatingLspWorkspace, bool isVS) + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) { - // Razor docs should be returning semantic + syntactic results. - var markup = -@"{|caret:|}// -#pragma warning disable 1591 -namespace Razor -{ - #line hidden - public class Template - { - #pragma warning disable 219 - private void __RazorDirectiveTokenHelpers__() { - ((global::System.Action)(() => { -#nullable restore -#line 1 ""test.cshtml"" -var z = 1; - -#line default -#line hidden -#nullable disable - } - ))(); + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 12, 0, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'z' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 17, 3, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + ]; } - #pragma warning restore 219 - #pragma warning disable 0414 - private static object __o = null; - #pragma warning restore 0414 - #pragma warning disable 1998 - public async override global::System.Threading.Tasks.Task ExecuteAsync() + else { -#nullable restore -#line 2 ""test.cshtml"" - var x = + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 12, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'z' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 17, 3, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + ]; + } -#line default -#line hidden -#nullable disable + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); + } + + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRange_PartialDocAsync(bool mutatingLspWorkspace, bool isVS) + { + // Razor docs should be returning semantic + syntactic results. + var markup = + """ + {|caret:|}// Comment + static class C { } + + """; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); + + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + ImmutableArray spans = [new LinePositionSpan(new LinePosition(1, 0), new LinePosition(2, 0))]; + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans, isVS, options, CancellationToken.None); + + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; } - #pragma warning restore 1998 + else + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } + + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); } -} -#pragma warning restore 1591 -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); - - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - ImmutableArray spans = [ - new LinePositionSpan(new LinePosition(12, 0), new LinePosition(13, 0)), - new LinePositionSpan(new LinePosition(29, 0), new LinePosition(30, 0)), + + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRange_MultiLineComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + { + // Testing as a Razor doc so we get both syntactic + semantic results; otherwise the results would be empty. + var markup = + """ + {|caret:|}class C { /* one + + two + three */ } + + """; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); + + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(4, 0))]; + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans, isVS, options, CancellationToken.None); + + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one' + 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two' + 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */' + 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } + else + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one' + 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two' + 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */' + 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' ]; + } - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans, isVS, options, CancellationToken.None); + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); + } - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 12, 0, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'z' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 17, 3, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - ]; - } - else + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRange_StringLiteral_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + { + var markup = + """ + {|caret:|}class C { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 12, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'z' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Number], 0, // '1' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 17, 3, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - ]; + void M() + { + var x = @"one + two "" + three"; + } } - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); - } + """; - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRange_PartialDocAsync(bool mutatingLspWorkspace, bool isVS) - { - // Razor docs should be returning semantic + syntactic results. - var markup = -@"{|caret:|}// Comment -static class C { } -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); - - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - ImmutableArray spans = [new LinePositionSpan(new LinePosition(1, 0), new LinePosition(2, 0))]; - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans, isVS, options, CancellationToken.None); - - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - else - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); - } + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))]; + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans, isVS, options, CancellationToken.None); - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRange_MultiLineComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) { - // Testing as a Razor doc so we get both syntactic + semantic results; otherwise the results would be empty. - var markup = -@"{|caret:|}class C { /* one - -two -three */ } -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); - - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(4, 0))]; - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans, isVS, options, CancellationToken.None); - - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one' - 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two' - 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */' - 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - else - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 6, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '/* one' - 2, 0, 3, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'two' - 1, 0, 8, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // 'three */' - 0, 9, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' + 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 8, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 5, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one' + 1, 0, 4, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two ' + 0, 4, 2, tokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' + 1, 0, 6, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; } - - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRange_StringLiteral_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + else { - var markup = -@"{|caret:|}class C -{ - void M() - { - var x = @""one -two """" -three""; - } -} -"; + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' + 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 8, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 5, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"one' + 1, 0, 4, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'two ' + 0, 4, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringEscapeCharacter], 0, // '""' + 1, 0, 6, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'three"' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); + } - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))]; - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans, isVS, options, CancellationToken.None); + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRange_Regex_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + { + var markup = + """ + {|caret:|}using System.Text.RegularExpressions; - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' - 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 8, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 5, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"one' - 1, 0, 4, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'two ' - 0, 4, 2, tokenTypeToIndex[ClassificationTypeNames.StringEscapeCharacter], 0, // '""' - 1, 0, 6, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // 'three"' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - else + class C { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 4, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' - 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 8, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 5, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"one' - 1, 0, 4, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'two ' - 0, 4, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringEscapeCharacter], 0, // '""' - 1, 0, 6, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // 'three"' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; + void M() + { + var x = new Regex("(abc)*"); + } } - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); - } + """; - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRange_Regex_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); + + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))]; + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans, isVS, options, CancellationToken.None); + + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) { - var markup = -@"{|caret:|}using System.Text.RegularExpressions; + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' + 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions' + 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' + 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' + 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex' + 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '(' + 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc' + 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*' + 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } + ]; + } + else + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' + 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions' + 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' + 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' + 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' + 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex' + 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '(' + 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc' + 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*' + 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } + 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } + ]; + } -class C -{ - void M() - { - var x = new Regex(""(abc)*""); + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); } -} -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); - - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - ImmutableArray spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))]; - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans, isVS, options, CancellationToken.None); + [Theory, CombinatorialData] + [WorkItem("https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1710519")] + public async Task TestGetSemanticTokensRange_RegexWithComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) + { + var markup = + """ + {|caret:|}using System.Text.RegularExpressions; - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' - 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions' - 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' - 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' - 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex' - 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '(' - 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc' - 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*' - 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } - ]; - } - else + class C { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' - 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions' - 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' - 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' - 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' - 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex' - 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '(' - 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc' - 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*' - 0, 1, 1, tokenTypeToIndex[SemanticTokenTypes.String], 0, // '"' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } - 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } - ]; + void M() + { + var x = new Regex(@"(abc)* #comment + ", RegexOptions.IgnorePatternWhitespace); + } } - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); - } + """; - [Theory, CombinatorialData] - [WorkItem("https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1710519")] - public async Task TestGetSemanticTokensRange_RegexWithComment_IncludeSyntacticClassificationsAsync(bool mutatingLspWorkspace, bool isVS) - { - var markup = -@"{|caret:|}using System.Text.RegularExpressions; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); -class C -{ - void M() - { - var x = new Regex(@""(abc)* #comment - "", RegexOptions.IgnorePatternWhitespace); - } -} -"; - - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); + var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); + var text = await document.GetTextAsync(); + var options = ClassificationOptions.Default; + var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( + document, spans: [text.Lines.GetLinePositionSpan(new(0, text.Length))], isVS, options: options, cancellationToken: CancellationToken.None); - var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First(); - var options = ClassificationOptions.Default; - var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync( - document, spans: [], isVS, options: options, cancellationToken: CancellationToken.None); + var expectedResults = new LSP.SemanticTokens(); - var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' + 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions' + 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' + 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' + 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex' + 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' + 0, 1, 2, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '(' + 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc' + 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // ' ' + 0, 1, 9, tokenTypeToIndex[ClassificationTypeNames.RegexComment], 0, // '#comment' + 1, 0, 27, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '"' + 0, 27, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ',' + 0, 2, 12, tokenTypeToIndex[ClassificationTypeNames.EnumName], 0, // 'RegexOptions' + 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 23, tokenTypeToIndex[ClassificationTypeNames.EnumMemberName], 0, // 'IgnorePatternWhitespace' + 0, 23, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' + 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } + 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } + ]; + } + else + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' + 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions' + 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' + 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' + 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' + 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' + 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' + 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' + 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' + 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' + 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' + 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex' + 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' + 0, 1, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"' + 0, 2, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '(' + 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc' + 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // ' ' + 0, 1, 9, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexComment], 0, // '#comment' + 1, 0, 27, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '"' + 0, 27, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ',' + 0, 2, 12, tokenTypeToIndex[SemanticTokenTypes.Enum], 0, // 'RegexOptions' + 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' + 0, 1, 23, tokenTypeToIndex[SemanticTokenTypes.EnumMember], 0, // 'IgnorePatternWhitespace' + 0, 23, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' + 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' + 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } + 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } + ]; + } - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' - 0, 6, 6, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'System' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 4, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'Text' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 18, tokenTypeToIndex[ClassificationTypeNames.NamespaceName], 0, // 'RegularExpressions' - 0, 18, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.MethodName], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' - 1, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 1, 2, 3, tokenTypeToIndex[ClassificationTypeNames.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[ClassificationTypeNames.LocalName], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' - 0, 4, 5, tokenTypeToIndex[ClassificationTypeNames.ClassName], 0, // 'Regex' - 0, 5, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '(' - 0, 1, 2, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '@"' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // '(' - 0, 1, 3, tokenTypeToIndex[ClassificationTypeNames.RegexText], 0, // 'abc' - 0, 3, 1, tokenTypeToIndex[ClassificationTypeNames.RegexGrouping], 0, // ')' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.RegexQuantifier], 0, // '*' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // ' ' - 0, 1, 9, tokenTypeToIndex[ClassificationTypeNames.RegexComment], 0, // '#comment' - 1, 0, 27, tokenTypeToIndex[ClassificationTypeNames.VerbatimStringLiteral], 0, // '"' - 0, 27, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ',' - 0, 2, 12, tokenTypeToIndex[ClassificationTypeNames.EnumName], 0, // 'RegexOptions' - 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 23, tokenTypeToIndex[ClassificationTypeNames.EnumMemberName], 0, // 'IgnorePatternWhitespace' - 0, 23, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ')' - 0, 1, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } - 1, 0, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // } - ]; - } - else - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'using' - 0, 6, 6, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'System' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'Text' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 18, tokenTypeToIndex[SemanticTokenTypes.Namespace], 0, // 'RegularExpressions' - 0, 18, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' - 2, 0, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'C' - 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 1, 4, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'void' - 0, 5, 1, tokenTypeToIndex[SemanticTokenTypes.Method], 0, // 'M' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' - 1, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '{' - 1, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'var' - 0, 4, 1, tokenTypeToIndex[SemanticTokenTypes.Variable], 0, // 'x' - 0, 2, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '=' - 0, 2, 3, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'new' - 0, 4, 5, tokenTypeToIndex[SemanticTokenTypes.Class], 0, // 'Regex' - 0, 5, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // '(' - 0, 1, 2, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '@"' - 0, 2, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // '(' - 0, 1, 3, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexText], 0, // 'abc' - 0, 3, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexGrouping], 0, // ')' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexQuantifier], 0, // '*' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // ' ' - 0, 1, 9, tokenTypeToIndex[CustomLspSemanticTokenNames.RegexComment], 0, // '#comment' - 1, 0, 27, tokenTypeToIndex[CustomLspSemanticTokenNames.StringVerbatim], 0, // '"' - 0, 27, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ',' - 0, 2, 12, tokenTypeToIndex[SemanticTokenTypes.Enum], 0, // 'RegexOptions' - 0, 12, 1, tokenTypeToIndex[SemanticTokenTypes.Operator], 0, // '.' - 0, 1, 23, tokenTypeToIndex[SemanticTokenTypes.EnumMember], 0, // 'IgnorePatternWhitespace' - 0, 23, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ')' - 0, 1, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // ';' - 1, 4, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } - 1, 0, 1, tokenTypeToIndex[CustomLspSemanticTokenNames.Punctuation], 0, // } - ]; - } + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); + } - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results)); - } + [Theory, CombinatorialData] + public void TestGetSemanticTokensRange_AssertCustomTokenTypes(bool isVS) + { + var capabilities = GetCapabilities(isVS); + var schema = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability()); - [Theory, CombinatorialData] - public void TestGetSemanticTokensRange_AssertCustomTokenTypes(bool isVS) + var expectedNames = ClassificationTypeNames.AllTypeNames.Where(s => !ClassificationTypeNames.AdditiveTypeNames.Contains(s)); + foreach (var expectedClassificationName in expectedNames) { - var capabilities = GetCapabilities(isVS); - var schema = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability()); + // Assert that the classification type name exists and is mapped to a semantic token name. + Assert.True(schema.TokenTypeMap.ContainsKey(expectedClassificationName), $"Missing token type for {expectedClassificationName}."); - var expectedNames = ClassificationTypeNames.AllTypeNames.Where(s => !ClassificationTypeNames.AdditiveTypeNames.Contains(s)); - foreach (var expectedClassificationName in expectedNames) - { - // Assert that the classification type name exists and is mapped to a semantic token name. - Assert.True(schema.TokenTypeMap.ContainsKey(expectedClassificationName), $"Missing token type for {expectedClassificationName}."); - - var tokenName = schema.TokenTypeMap[expectedClassificationName]; - Assert.True(schema.AllTokenTypes.Contains(tokenName)); - } + var tokenName = schema.TokenTypeMap[expectedClassificationName]; + Assert.True(schema.AllTokenTypes.Contains(tokenName)); } } } diff --git a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs index 3b7b832625688..a2cea4ca9be85 100644 --- a/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs +++ b/src/LanguageServer/ProtocolUnitTests/SemanticTokens/SemanticTokensRangesTests.cs @@ -12,60 +12,57 @@ using Xunit.Abstractions; using LSP = Roslyn.LanguageServer.Protocol; -namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens +namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.SemanticTokens; + +public sealed class SemanticTokensRangesTests(ITestOutputHelper testOutputHelper) : AbstractSemanticTokensTests(testOutputHelper) { - public class SemanticTokensRangesTests : AbstractSemanticTokensTests + [Theory, CombinatorialData] + public async Task TestGetSemanticTokensRanges_FullDocAsync(bool mutatingLspWorkspace, bool isVS) { - public SemanticTokensRangesTests(ITestOutputHelper testOutputHelper) : base(testOutputHelper) - { - } + var markup = + """ + {|caret:|}// Comment + static class C { } - [Theory, CombinatorialData] - public async Task TestGetSemanticTokensRanges_FullDocAsync(bool mutatingLspWorkspace, bool isVS) - { - var markup = -@"{|caret:|}// Comment -static class C { } -"; - await using var testLspServer = await CreateTestLspServerAsync( - markup, mutatingLspWorkspace, GetCapabilities(isVS)); + """; + await using var testLspServer = await CreateTestLspServerAsync( + markup, mutatingLspWorkspace, GetCapabilities(isVS)); - var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) } }; - var results = await RunGetSemanticTokensRangesAsync(testLspServer, testLspServer.GetLocations("caret").First(), ranges); + var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(2, 0) } }; + var results = await RunGetSemanticTokensRangesAsync(testLspServer, testLspServer.GetLocations("caret").First(), ranges); - var expectedResults = new LSP.SemanticTokens(); - var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); - if (isVS) - { - expectedResults.Data = + var expectedResults = new LSP.SemanticTokens(); + var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer); + if (isVS) + { + expectedResults.Data = #pragma warning disable format // Force explicit column spacing. - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } - else - { - expectedResults.Data = - [ - // Line | Char | Len | Token type | Modifier - 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' - 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' - 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' - 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' - 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' - ]; - } + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[ClassificationTypeNames.ClassName], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } + else + { + expectedResults.Data = + [ + // Line | Char | Len | Token type | Modifier + 0, 0, 10, tokenTypeToIndex[SemanticTokenTypes.Comment], 0, // '// Comment' + 1, 0, 6, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'static' + 0, 7, 5, tokenTypeToIndex[SemanticTokenTypes.Keyword], 0, // 'class' + 0, 6, 1, tokenTypeToIndex[SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '{' + 0, 2, 1, tokenTypeToIndex[ClassificationTypeNames.Punctuation], 0, // '}' + ]; + } #pragma warning restore format - await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false); - AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data)); - } + await VerifyBasicInvariantsAndNoMultiLineTokens(testLspServer, results.Data).ConfigureAwait(false); + AssertEx.Equal(ConvertToReadableFormat(testLspServer.ClientCapabilities, expectedResults.Data), ConvertToReadableFormat(testLspServer.ClientCapabilities, results.Data)); } }