From 1f9cc10960874059e5edd05b5d2ddcf785500224 Mon Sep 17 00:00:00 2001 From: Nikola Jichev Date: Mon, 20 May 2024 19:53:15 +0300 Subject: [PATCH] feat: add full semantic token support --- lib/next_ls.ex | 13 +++ lib/next_ls/helpers/ast_helpers/variables.ex | 59 +++++----- lib/next_ls/semantic_tokens.ex | 112 +++++++++++++++++++ test/next_ls/semantic_tokens_test.exs | 34 ++++++ 4 files changed, 186 insertions(+), 32 deletions(-) create mode 100644 lib/next_ls/semantic_tokens.ex create mode 100644 test/next_ls/semantic_tokens_test.exs diff --git a/lib/next_ls.ex b/lib/next_ls.ex index f77c6440..e21e7e7a 100644 --- a/lib/next_ls.ex +++ b/lib/next_ls.ex @@ -24,6 +24,7 @@ defmodule NextLS do alias GenLSP.Requests.TextDocumentFormatting alias GenLSP.Requests.TextDocumentHover alias GenLSP.Requests.TextDocumentReferences + alias GenLSP.Requests.TextDocumentSemanticTokensFull alias GenLSP.Requests.WorkspaceApplyEdit alias GenLSP.Requests.WorkspaceSymbol alias GenLSP.Structures.ApplyWorkspaceEditParams @@ -158,6 +159,12 @@ defmodule NextLS do nil end, document_formatting_provider: true, + semantic_tokens_provider: %GenLSP.Structures.SemanticTokensRegistrationOptions{ + document_selector: [%{language: "elixir"}], + legend: NextLS.SemanticTokens.legend(), + range: true, + full: %{delta: true} + }, execute_command_provider: %GenLSP.Structures.ExecuteCommandOptions{ commands: [ "to-pipe", @@ -365,6 +372,12 @@ defmodule NextLS do {:reply, locations, lsp} end + def handle_request(%TextDocumentSemanticTokensFull{params: %{text_document: %{uri: uri}}}, lsp) do + document = lsp.assigns.documents[uri] + + {:reply, document |> NextLS.SemanticTokens.new() |> IO.inspect(label: "REPLY"), lsp} + end + def handle_request(%TextDocumentHover{params: %{position: position, text_document: %{uri: uri}}}, lsp) do file = URI.parse(uri).path line = position.line + 1 diff --git a/lib/next_ls/helpers/ast_helpers/variables.ex b/lib/next_ls/helpers/ast_helpers/variables.ex index 9e2b8b43..fd86a8d2 100644 --- a/lib/next_ls/helpers/ast_helpers/variables.ex +++ b/lib/next_ls/helpers/ast_helpers/variables.ex @@ -42,44 +42,39 @@ defmodule NextLS.ASTHelpers.Variables do # end # end - @spec list_variable_references(String.t(), {integer(), integer()}) :: [{atom(), {Range.t(), Range.t()}}] - def list_variable_references(file, position) do + @spec list_variable_references(Path.t() | Macro.t(), {integer(), integer()}) :: [{atom(), {Range.t(), Range.t()}}] + def list_variable_references(file, position) when is_binary(file) or is_list(file) do file = File.read!(file) case NextLS.Parser.parse(file, columns: true) do - {:ok, ast} -> - {_ast, %{vars: vars}} = - Macro.traverse( - ast, - %{vars: [], symbols: %{}, sym_ranges: [], scope: []}, - &prewalk/2, - &postwalk/2 - ) - - symbol = - Enum.find_value(vars, fn %{name: name, sym_range: range, ref_range: ref_range} -> - if position_in_range?(position, ref_range), do: {name, range}, else: nil - end) - - position = - case symbol do - nil -> position - {_, {line.._//_, column.._//_}} -> {line, column} - end - - Enum.reduce(vars, [], fn val, acc -> - if position_in_range?(position, val.sym_range) do - [{val.name, val.ref_range} | acc] - else - acc - end - end) - - _error -> - [] + {:ok, ast} -> list_variable_references(ast, position) + _error -> [] end end + def list_variable_references(ast, position) do + {_ast, %{vars: vars}} = + Macro.traverse( + ast, + %{vars: [], symbols: %{}, sym_ranges: [], scope: []}, + &prewalk/2, + &postwalk/2 + ) + + position = + Enum.find_value(vars, position, fn %{sym_range: {line.._//_, column.._//_}, ref_range: ref_range} -> + if position_in_range?(position, ref_range), do: {line, column} + end) + + Enum.reduce(vars, [], fn val, acc -> + if position_in_range?(position, val.sym_range) do + [{val.name, val.ref_range} | acc] + else + acc + end + end) + end + # search symbols in function and macro definition args and increase scope defp prewalk({operation, meta, [args | _]} = ast, acc) when operation in @defs_with_args do acc = increase_scope_nesting(acc, meta[:line]) diff --git a/lib/next_ls/semantic_tokens.ex b/lib/next_ls/semantic_tokens.ex new file mode 100644 index 00000000..54666976 --- /dev/null +++ b/lib/next_ls/semantic_tokens.ex @@ -0,0 +1,112 @@ +defmodule NextLS.SemanticTokens do + @moduledoc false + + # alias GenLSP.Enumerations.SemanticTokenModifiers + alias GenLSP.Enumerations.SemanticTokenTypes + alias GenLSP.Structures.SemanticTokens + alias GenLSP.Structures.SemanticTokensLegend + + @token_types %{ + SemanticTokenTypes.parameter() => 0 + } + def legend do + %SemanticTokensLegend{ + token_types: Map.keys(@token_types), + token_modifiers: [] + } + end + + def new(document) do + code = Enum.join(document, "\n") + + {:ok, ast} = parse(code) + + result = + code + |> String.to_charlist() + |> :spitfire_tokenizer.tokenize(1, 1, []) + + case result do + {:ok, _, _, _, tokens} -> + data = build_response(tokens, ast) + %SemanticTokens{data: data} + + {:error, message} -> + %GenLSP.ErrorResponse{code: GenLSP.Enumerations.ErrorCodes.parse_error(), message: inspect(message)} + end + end + + defp parse(code) do + code + |> Spitfire.parse(literal_encoder: &{:ok, {:__block__, &2, [&1]}}) + |> case do + {:error, ast, _errors} -> + {:ok, ast} + + other -> + other + end + end + + defp build_response(tokens, ast) do + do_build_response(tokens, ast, []) + end + + defp do_build_response([], _ast, acc), do: acc |> Enum.sort_by(&{&1.line, &1.col}) |> build_deltas() + # TODO: this should be made to work with macros such as `test "it works", %{foo: foo} do ...` + defp do_build_response([{:identifier, _, definition}, {:paren_identifier, _, _}, {:"(", _} | rest], ast, acc) + when definition in [:def, :defp, :defmacro, :defmacrop] do + {parameters, rest} = take_parameters(rest, ast) + do_build_response(rest, ast, parameters ++ acc) + end + + defp do_build_response([_h | tail], ast, acc), do: do_build_response(tail, ast, acc) + + defp take_parameters(rest, ast) do + {identifiers, rest} = + Enum.split_while(rest, fn + {:")", _} -> false + _ -> true + end) + + parameters = + identifiers + |> Enum.filter(&match?({:identifier, _, _}, &1)) + |> Enum.reduce([], fn {:identifier, {line, col, name}, _}, acc -> + var_refs = NextLS.ASTHelpers.Variables.list_variable_references(ast, {line, col}) + + parameters = + Enum.map(var_refs, fn {_name, {line.._line_end//_, col..col_end//_}} -> + {line, col, col_end - col + 1} + end) + + [{line, col, length(name)} | parameters] ++ acc + end) + |> Enum.map(fn {line, col, length} -> + make_token(line, col, length, SemanticTokenTypes.parameter()) + end) + + {parameters, rest} + end + + defp make_token(line, col, length, type, modifiers \\ []) do + %{line: line - 1, col: col - 1, length: length, type: type, modifiers: modifiers} + end + + defp build_deltas([]), do: [] + + defp build_deltas([first | _] = tokens) do + modifiers = 0 + + encoded_tokens = + tokens + |> Enum.chunk_every(2, 1, :discard) + |> Enum.flat_map(fn [previous, current] -> + delta_line = current.line - previous.line + delta_char = if delta_line == 0, do: current.col - previous.col, else: current.col + [delta_line, delta_char, current.length, @token_types[current.type], modifiers] + end) + + [first.line, first.col, first.length, @token_types[first.type], modifiers] ++ encoded_tokens + end +end diff --git a/test/next_ls/semantic_tokens_test.exs b/test/next_ls/semantic_tokens_test.exs new file mode 100644 index 00000000..a55d24fa --- /dev/null +++ b/test/next_ls/semantic_tokens_test.exs @@ -0,0 +1,34 @@ +defmodule NextLS.DocsTest do + use ExUnit.Case, async: true + + alias NextLS.SemanticTokens + + describe "parameters" do + test "it returns an encoding of the parameters" do + code = + String.split( + """ + defmodule TestSemanticTokens do + def hello(var) do + "Hello " <> var + end + end + """, + "\n" + ) + + tokens = SemanticTokens.new(code) + + data = Enum.chunk_every(tokens.data, 5) + modifier = 0 + parameter = 0 + + # var 1 is on line 1, char 12 + + assert [ + [1, 12, 3, parameter, modifier], + [1, 15, 3, parameter, modifier] + ] == data + end + end +end