Skip to content

Commit

Permalink
feat: add full semantic token support
Browse files Browse the repository at this point in the history
  • Loading branch information
NJichev committed May 20, 2024
1 parent 59e57ce commit 1f9cc10
Show file tree
Hide file tree
Showing 4 changed files with 186 additions and 32 deletions.
13 changes: 13 additions & 0 deletions lib/next_ls.ex
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ defmodule NextLS do
alias GenLSP.Requests.TextDocumentFormatting
alias GenLSP.Requests.TextDocumentHover
alias GenLSP.Requests.TextDocumentReferences
alias GenLSP.Requests.TextDocumentSemanticTokensFull
alias GenLSP.Requests.WorkspaceApplyEdit
alias GenLSP.Requests.WorkspaceSymbol
alias GenLSP.Structures.ApplyWorkspaceEditParams
Expand Down Expand Up @@ -158,6 +159,12 @@ defmodule NextLS do
nil
end,
document_formatting_provider: true,
semantic_tokens_provider: %GenLSP.Structures.SemanticTokensRegistrationOptions{
document_selector: [%{language: "elixir"}],
legend: NextLS.SemanticTokens.legend(),
range: true,
full: %{delta: true}
},
execute_command_provider: %GenLSP.Structures.ExecuteCommandOptions{
commands: [
"to-pipe",
Expand Down Expand Up @@ -365,6 +372,12 @@ defmodule NextLS do
{:reply, locations, lsp}
end

def handle_request(%TextDocumentSemanticTokensFull{params: %{text_document: %{uri: uri}}}, lsp) do
document = lsp.assigns.documents[uri]

{:reply, document |> NextLS.SemanticTokens.new() |> IO.inspect(label: "REPLY"), lsp}
end

def handle_request(%TextDocumentHover{params: %{position: position, text_document: %{uri: uri}}}, lsp) do
file = URI.parse(uri).path
line = position.line + 1
Expand Down
59 changes: 27 additions & 32 deletions lib/next_ls/helpers/ast_helpers/variables.ex
Original file line number Diff line number Diff line change
Expand Up @@ -42,44 +42,39 @@ defmodule NextLS.ASTHelpers.Variables do
# end
# end

@spec list_variable_references(String.t(), {integer(), integer()}) :: [{atom(), {Range.t(), Range.t()}}]
def list_variable_references(file, position) do
@spec list_variable_references(Path.t() | Macro.t(), {integer(), integer()}) :: [{atom(), {Range.t(), Range.t()}}]
def list_variable_references(file, position) when is_binary(file) or is_list(file) do
file = File.read!(file)

case NextLS.Parser.parse(file, columns: true) do
{:ok, ast} ->
{_ast, %{vars: vars}} =
Macro.traverse(
ast,
%{vars: [], symbols: %{}, sym_ranges: [], scope: []},
&prewalk/2,
&postwalk/2
)

symbol =
Enum.find_value(vars, fn %{name: name, sym_range: range, ref_range: ref_range} ->
if position_in_range?(position, ref_range), do: {name, range}, else: nil
end)

position =
case symbol do
nil -> position
{_, {line.._//_, column.._//_}} -> {line, column}
end

Enum.reduce(vars, [], fn val, acc ->
if position_in_range?(position, val.sym_range) do
[{val.name, val.ref_range} | acc]
else
acc
end
end)

_error ->
[]
{:ok, ast} -> list_variable_references(ast, position)
_error -> []
end
end

def list_variable_references(ast, position) do
{_ast, %{vars: vars}} =
Macro.traverse(
ast,
%{vars: [], symbols: %{}, sym_ranges: [], scope: []},
&prewalk/2,
&postwalk/2
)

position =
Enum.find_value(vars, position, fn %{sym_range: {line.._//_, column.._//_}, ref_range: ref_range} ->
if position_in_range?(position, ref_range), do: {line, column}
end)

Enum.reduce(vars, [], fn val, acc ->
if position_in_range?(position, val.sym_range) do
[{val.name, val.ref_range} | acc]
else
acc
end
end)
end

# search symbols in function and macro definition args and increase scope
defp prewalk({operation, meta, [args | _]} = ast, acc) when operation in @defs_with_args do
acc = increase_scope_nesting(acc, meta[:line])
Expand Down
112 changes: 112 additions & 0 deletions lib/next_ls/semantic_tokens.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
defmodule NextLS.SemanticTokens do
@moduledoc false

# alias GenLSP.Enumerations.SemanticTokenModifiers
alias GenLSP.Enumerations.SemanticTokenTypes
alias GenLSP.Structures.SemanticTokens
alias GenLSP.Structures.SemanticTokensLegend

@token_types %{
SemanticTokenTypes.parameter() => 0
}
def legend do
%SemanticTokensLegend{
token_types: Map.keys(@token_types),
token_modifiers: []
}
end

def new(document) do
code = Enum.join(document, "\n")

{:ok, ast} = parse(code)

result =
code
|> String.to_charlist()
|> :spitfire_tokenizer.tokenize(1, 1, [])

case result do
{:ok, _, _, _, tokens} ->
data = build_response(tokens, ast)
%SemanticTokens{data: data}

{:error, message} ->

Check warning on line 34 in lib/next_ls/semantic_tokens.ex

View workflow job for this annotation

GitHub Actions / dialyzer

pattern_match

The pattern can never match the type
%GenLSP.ErrorResponse{code: GenLSP.Enumerations.ErrorCodes.parse_error(), message: inspect(message)}
end
end

defp parse(code) do
code
|> Spitfire.parse(literal_encoder: &{:ok, {:__block__, &2, [&1]}})
|> case do
{:error, ast, _errors} ->
{:ok, ast}

other ->
other
end
end

defp build_response(tokens, ast) do
do_build_response(tokens, ast, [])
end

defp do_build_response([], _ast, acc), do: acc |> Enum.sort_by(&{&1.line, &1.col}) |> build_deltas()
# TODO: this should be made to work with macros such as `test "it works", %{foo: foo} do ...`
defp do_build_response([{:identifier, _, definition}, {:paren_identifier, _, _}, {:"(", _} | rest], ast, acc)
when definition in [:def, :defp, :defmacro, :defmacrop] do
{parameters, rest} = take_parameters(rest, ast)
do_build_response(rest, ast, parameters ++ acc)
end

defp do_build_response([_h | tail], ast, acc), do: do_build_response(tail, ast, acc)

defp take_parameters(rest, ast) do
{identifiers, rest} =
Enum.split_while(rest, fn
{:")", _} -> false
_ -> true
end)

parameters =
identifiers
|> Enum.filter(&match?({:identifier, _, _}, &1))
|> Enum.reduce([], fn {:identifier, {line, col, name}, _}, acc ->
var_refs = NextLS.ASTHelpers.Variables.list_variable_references(ast, {line, col})

parameters =
Enum.map(var_refs, fn {_name, {line.._line_end//_, col..col_end//_}} ->
{line, col, col_end - col + 1}
end)

[{line, col, length(name)} | parameters] ++ acc
end)
|> Enum.map(fn {line, col, length} ->
make_token(line, col, length, SemanticTokenTypes.parameter())
end)

{parameters, rest}
end

defp make_token(line, col, length, type, modifiers \\ []) do
%{line: line - 1, col: col - 1, length: length, type: type, modifiers: modifiers}
end

defp build_deltas([]), do: []

defp build_deltas([first | _] = tokens) do
modifiers = 0

encoded_tokens =
tokens
|> Enum.chunk_every(2, 1, :discard)
|> Enum.flat_map(fn [previous, current] ->
delta_line = current.line - previous.line
delta_char = if delta_line == 0, do: current.col - previous.col, else: current.col
[delta_line, delta_char, current.length, @token_types[current.type], modifiers]
end)

[first.line, first.col, first.length, @token_types[first.type], modifiers] ++ encoded_tokens
end
end
34 changes: 34 additions & 0 deletions test/next_ls/semantic_tokens_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
defmodule NextLS.DocsTest do
use ExUnit.Case, async: true

alias NextLS.SemanticTokens

describe "parameters" do
test "it returns an encoding of the parameters" do
code =
String.split(
"""
defmodule TestSemanticTokens do
def hello(var) do
"Hello " <> var
end
end
""",
"\n"
)

tokens = SemanticTokens.new(code)

data = Enum.chunk_every(tokens.data, 5)
modifier = 0
parameter = 0

# var 1 is on line 1, char 12

assert [
[1, 12, 3, parameter, modifier],
[1, 15, 3, parameter, modifier]
] == data
end
end
end

0 comments on commit 1f9cc10

Please sign in to comment.