diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index ab36997c7016..c5be8fb6ae0f 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -50,6 +50,7 @@ | `:lsp-workspace-command` | Open workspace command picker | | `:lsp-restart` | Restarts the Language Server that is in use by the current doc | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | +| `:semantic-tokens` | Display the semantic tokens, primarily for theming and development. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | | `:debug-eval` | Evaluate expression in current debug context. | diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 3f88b3523f80..6a23d7758bb7 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -10,13 +10,13 @@ use lsp::PositionEncodingKind; use lsp_types as lsp; use serde::Deserialize; use serde_json::Value; -use std::collections::HashMap; use std::future::Future; use std::process::Stdio; use std::sync::{ atomic::{AtomicU64, Ordering}, - Arc, + Arc, RwLock, }; +use std::{collections::HashMap, sync::RwLockReadGuard}; use tokio::{ io::{BufReader, BufWriter}, process::{Child, Command}, @@ -38,6 +38,9 @@ pub struct Client { root_uri: Option, workspace_folders: Vec, req_timeout: u64, + + pub(crate) semantic_token_types_legend: RwLock>>, + pub(crate) semantic_token_modifiers_legend: RwLock>>, } impl Client { @@ -110,6 +113,9 @@ impl Client { root_path, root_uri, workspace_folders, + + semantic_token_types_legend: Default::default(), + semantic_token_modifiers_legend: Default::default(), }; Ok((client, server_rx, initialize_notify)) @@ -161,6 +167,14 @@ impl Client { .unwrap_or_default() } + pub fn types_legend(&self) -> RwLockReadGuard<'_, Vec>> { + self.semantic_token_types_legend.read().unwrap() + } + + pub fn modifiers_legend(&self) -> RwLockReadGuard<'_, Vec>> { + self.semantic_token_modifiers_legend.read().unwrap() + } + pub fn config(&self) -> Option<&Value> { self.config.as_ref() } @@ -315,6 +329,9 @@ impl Client { execute_command: Some(lsp::DynamicRegistrationClientCapabilities { dynamic_registration: Some(false), }), + semantic_tokens: Some(lsp::SemanticTokensWorkspaceClientCapabilities { + refresh_support: Some(false), + }), ..Default::default() }), text_document: Some(lsp::TextDocumentClientCapabilities { @@ -386,6 +403,22 @@ impl Client { publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { ..Default::default() }), + semantic_tokens: Some(lsp::SemanticTokensClientCapabilities { + dynamic_registration: Some(true), + requests: lsp::SemanticTokensClientCapabilitiesRequests { + range: Some(true), + full: Some(lsp::SemanticTokensFullOptions::Bool(false)), + }, + // Don't specify defaults for this, Helix will receive the server response and tell it it + // support the same tokens. + token_types: Vec::new(), + token_modifiers: Vec::new(), + overlapping_token_support: Some(true), + multiline_token_support: Some(true), + server_cancel_support: Some(true), + augments_syntax_tokens: Some(true), + ..Default::default() + }), ..Default::default() }), window: Some(lsp::WindowClientCapabilities { @@ -702,6 +735,37 @@ impl Client { Some(self.call::(completion_item)) } + pub fn text_document_semantic_tokens( + &self, + text_document: lsp::TextDocumentIdentifier, + range: lsp::Range, + work_done_token: Option, + ) -> Option>> { + let capabilites = self.capabilities.get().unwrap(); + + let support_range = match capabilites.semantic_tokens_provider.as_ref()? { + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opt) => opt.range?, + lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(opt) => { + opt.semantic_tokens_options.range? + } + }; + + if !support_range { + return None; + } + + Some(self.call::( + lsp::SemanticTokensRangeParams { + work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, + partial_result_params: lsp::PartialResultParams { + partial_result_token: None, + }, + text_document, + range, + }, + )) + } + pub fn text_document_signature_help( &self, text_document: lsp::TextDocumentIdentifier, diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index 341d4a547b35..a948fca7d5fe 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -621,9 +621,46 @@ fn start_client( }) .await; - if let Err(e) = value { - log::error!("failed to initialize language server: {}", e); - return; + let capabilities = match value { + Ok(value) => value, + Err(e) => { + log::error!("failed to initialize language server: {}", e); + return; + } + }; + + // Compute the legends for semantic tokens and put them in `Arc`s, that way when + // documents use them, the cost is negligible: reasonable servers will send at most a few + // dozen strings but documents can reference each string hundreds of times. + let legends = match capabilities.semantic_tokens_provider.as_ref() { + Some(lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opt)) => { + Some(&opt.legend) + } + Some(lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(opt)) => { + Some(&opt.semantic_tokens_options.legend) + } + _ => None, + }; + + // We assume the names won't have spaces or invalid TOML in them else themes won't be able + // to name them. This has not been a problem yet, if it becomes one we will need to + // implement fixes here. + if let Some(legends) = legends { + let make_style_name = |v| Arc::new(format!("semantic.{v}")); + + let mut types = _client.semantic_token_types_legend.write().unwrap(); + types.clear(); + + for ty in &legends.token_types { + types.push(make_style_name(ty.as_str())); + } + + let mut modifiers = _client.semantic_token_modifiers_legend.write().unwrap(); + modifiers.clear(); + + for mo in &legends.token_modifiers { + modifiers.push(make_style_name(mo.as_str())); + } } // next up, notify diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index fb55ca2a8e72..c795b6f10f57 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -110,17 +110,7 @@ impl<'a> Context<'a> { T: for<'de> serde::Deserialize<'de> + Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { - let callback = Box::pin(async move { - let json = call.await?; - let response = serde_json::from_value(json)?; - let call: job::Callback = Callback::EditorCompositor(Box::new( - move |editor: &mut Editor, compositor: &mut Compositor| { - callback(editor, compositor, response) - }, - )); - Ok(call) - }); - self.jobs.callback(callback); + self.jobs.callback(make_job_callback(call, callback)); } /// Returns 1 if no explicit count was provided @@ -130,6 +120,27 @@ impl<'a> Context<'a> { } } +#[inline] +fn make_job_callback( + call: impl Future> + 'static + Send, + callback: F, +) -> std::pin::Pin>>> +where + T: for<'de> serde::Deserialize<'de> + Send + 'static, + F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, +{ + Box::pin(async move { + let json = call.await?; + let response = serde_json::from_value(json)?; + let call: job::Callback = Callback::EditorCompositor(Box::new( + move |editor: &mut Editor, compositor: &mut Compositor| { + callback(editor, compositor, response) + }, + )); + Ok(call) + }) +} + use helix_view::{align_view, Align}; /// A MappableCommand is either a static command like "jump_view_up" or a Typable command like diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 3b94c9bd5558..fcd0609f4467 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -26,6 +26,9 @@ use crate::{ }, }; +mod semantic_tokens; +pub use semantic_tokens::*; + use std::{ borrow::Cow, cmp::Ordering, collections::BTreeMap, fmt::Write, path::PathBuf, sync::Arc, }; diff --git a/helix-term/src/commands/lsp/semantic_tokens.rs b/helix-term/src/commands/lsp/semantic_tokens.rs new file mode 100644 index 000000000000..61f20f403887 --- /dev/null +++ b/helix-term/src/commands/lsp/semantic_tokens.rs @@ -0,0 +1,194 @@ +//! Semantic tokens computations for documents. +//! +//! The tokens are then used in highlighting. +use std::future::Future; +use std::sync::Arc; + +use helix_lsp::lsp; +use helix_view::document::DocumentSemanticTokens; +use helix_view::editor::Editor; +use helix_view::{Document, View}; + +pub fn compute_semantic_tokens_for_all_views(editor: &mut Editor, jobs: &mut crate::job::Jobs) { + if !editor.config().lsp.enable_semantic_tokens_highlighting { + return; + } + + for (view, _) in editor.tree.views() { + let doc = match editor.documents.get(&view.doc) { + Some(doc) => doc, + None => continue, + }; + if let Some(callback) = compute_semantic_tokens_for_view(view, doc) { + jobs.callback(callback); + } + } +} + +pub(crate) fn compute_semantic_tokens_for_view( + view: &View, + doc: &Document, +) -> Option>>>> { + let language_server = doc.language_server()?; + let capabilities = language_server.capabilities(); + + let view_id = view.id; + let doc_id = view.doc; + + let lsp_support_ranges = match capabilities.semantic_tokens_provider.as_ref()? { + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opt) => opt.range?, + lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(opt) => { + opt.semantic_tokens_options.range? + } + }; + + if !lsp_support_ranges { + return None; + } + + let doc_text = doc.text(); + let len_lines = doc_text.len_lines(); + + // Compute ~3 times the current view height of semantic tokens, that way some scrolling + // will not show half the view with thel and half without while still being faster + // than computing all the hints for the full file (which could be dozens of time + // longer than the view is). + let view_height = view.inner_height(); + let first_visible_line = doc_text.char_to_line(view.offset.anchor); + let first_line = first_visible_line.saturating_sub(view_height); + let last_line = first_visible_line + .saturating_add(view_height.saturating_mul(2)) + .min(len_lines); + + if !doc.semantic_tokens_outdated + && doc.semantic_tokens(view_id).map_or(false, |dst| { + dst.first_line == first_line && dst.last_line == last_line + }) + { + return None; + } + + let doc_slice = doc_text.slice(..); + let first_char_in_range = doc_slice.line_to_char(first_line); + let last_char_in_range = doc_slice.line_to_char(last_line); + + let range = helix_lsp::util::range_to_lsp_range( + doc_text, + helix_core::Range::new(first_char_in_range, last_char_in_range), + language_server.offset_encoding(), + ); + + let future = language_server.text_document_semantic_tokens(doc.identifier(), range, None)?; + + let callback = super::super::make_job_callback( + future, + move |editor, _compositor, response: Option| { + // The config was modified or the window was closed while the request was in flight + if !editor.config().lsp.enable_semantic_tokens_highlighting + || editor.tree.try_get(view_id).is_none() + { + return; + } + + // Add annotations to relevant document, not the current one (it may have changed in between) + let doc = match editor.documents.get_mut(&doc_id) { + Some(doc) => doc, + None => return, + }; + + let mut dst = DocumentSemanticTokens { + first_line, + last_line, + tokens: Vec::new(), + }; + + // Immutable borrow of doc inside, conflicts with the `set_semantic_tokens` at the end + { + let (ls, data) = match (doc.language_server(), response) { + ( + Some(ls), + Some( + lsp::SemanticTokensRangeResult::Tokens(lsp::SemanticTokens { + data, + .. + }) + | lsp::SemanticTokensRangeResult::Partial( + lsp::SemanticTokensPartialResult { data }, + ), + ), + ) if !data.is_empty() => (ls, data), + _ => { + doc.set_semantic_tokens( + view_id, + DocumentSemanticTokens { + first_line, + last_line, + tokens: Vec::new(), + }, + ); + doc.semantic_tokens_outdated = false; + return; + } + }; + + let offset_encoding = ls.offset_encoding(); + let types_legend = ls.types_legend(); + let modifiers_legend = ls.modifiers_legend(); + + let doc_text = doc.text(); + + let mut line = 0_u32; + let mut character = 0; + + for token in data { + line = line.saturating_add(token.delta_line); + character = if token.delta_line > 0 { + token.delta_start + } else { + character.saturating_add(token.delta_start) + }; + + let start = lsp::Position { line, character }; + let end = lsp::Position { + line, + character: character.saturating_add(token.length), + }; + + let range = match helix_lsp::util::lsp_range_to_range( + doc_text, + lsp::Range { start, end }, + offset_encoding, + ) { + Some(r) => r, + None => continue, + }; + + let token_type = match types_legend.get(token.token_type as usize) { + Some(ty) => Arc::clone(ty), + None => continue, + }; + + let mut tokens_for_range = + Vec::with_capacity(token.token_modifiers_bitset.count_ones() as usize + 1); + tokens_for_range.push(token_type); + + for i in 0..u32::BITS { + let mask = 1 << i; + + if token.token_modifiers_bitset & mask != 0 { + if let Some(mo) = modifiers_legend.get(i as usize) { + tokens_for_range.push(Arc::clone(mo)); + } + } + } + + dst.tokens.push((range, tokens_for_range)); + } + } + + doc.set_semantic_tokens(view_id, dst); + }, + ); + + Some(callback) +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index b0fd18a76b79..a44db962b522 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1387,6 +1387,66 @@ fn tree_sitter_scopes( Ok(()) } +fn semantic_tokens( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + if !cx.editor.config().lsp.enable_semantic_tokens_highlighting { + cx.editor + .set_error("Semantic tokens are disabled in the configuration"); + return Ok(()); + } + + let (view, doc) = current!(cx.editor); + + let tokens = match doc.semantic_tokens(view.id) { + Some(dst) => &dst.tokens, + None => { + cx.editor + .set_status("No semantic tokens for this view or document"); + return Ok(()); + } + }; + + let text = doc.text().slice(..); + let pos = doc.selection(view.id).primary().cursor(text); + + let tokens = tokens + .iter() + .filter_map(|(r, s)| (r.anchor <= pos && r.head > pos).then_some(s)) + .collect::>(); + + let contents = if tokens.is_empty() { + cx.editor + .set_status("No semantic tokens for element under cursor"); + return Ok(()); + } else if tokens.len() == 1 { + format!("```json\n{:?}\n```", tokens[0]) + } else { + format!("```json\n{:?}\n```", tokens) + }; + + let callback = async move { + let call: job::Callback = Callback::EditorCompositor(Box::new( + move |editor: &mut Editor, compositor: &mut Compositor| { + let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); + let popup = Popup::new("hover", contents).auto_close(true); + compositor.replace_or_push("hover", popup); + }, + )); + Ok(call) + }; + + cx.jobs.callback(callback); + + Ok(()) +} + fn vsplit( cx: &mut compositor::Context, args: &[Cow], @@ -2355,7 +2415,14 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ doc: "Display tree sitter scopes, primarily for theming and development.", fun: tree_sitter_scopes, completer: None, - }, + }, + TypableCommand { + name: "semantic-tokens", + aliases: &[], + doc: "Display the semantic tokens, primarily for theming and development.", + fun: semantic_tokens, + completer: None, + }, TypableCommand { name: "debug-start", aliases: &["dbg"], diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 59f371bda5dc..86c23b909c66 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -135,6 +135,18 @@ impl EditorView { let mut highlights = Self::doc_syntax_highlights(doc, view.offset.anchor, inner.height, theme); + if editor.config().lsp.enable_semantic_tokens_highlighting { + let semantic_highlights = Self::doc_semantic_highlights( + doc, + view.id, + view.offset.anchor, + inner.height, + theme, + ); + if !semantic_highlights.is_empty() { + highlights = Box::new(syntax::merge(highlights, semantic_highlights)) + } + } let overlay_highlights = Self::overlay_syntax_highlights( doc, view.offset.anchor, @@ -340,6 +352,57 @@ impl EditorView { } } + pub fn doc_semantic_highlights( + doc: &Document, + view_id: helix_view::ViewId, + anchor: usize, + height: u16, + theme: &Theme, + ) -> Vec<(usize, std::ops::Range)> { + // If there is no scope that uses semantic highlighting, no need to do more work + if !theme.scopes().iter().any(|s| s.starts_with("semantic")) { + return Vec::new(); + } + // Same if the semantic tokens for the view are empty/absent + let semantic_ranges = match doc.semantic_tokens(view_id) { + Some(sr) if !sr.tokens.is_empty() => &sr.tokens, + _ => return Vec::new(), + }; + + let text = doc.text().slice(..); + let row = text.char_to_line(anchor.min(text.len_chars())); + + let range = { + // Calculate viewport byte ranges: + // Saturating subs to make it inclusive zero indexing. + let last_line = text.len_lines().saturating_sub(1); + let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line); + let start = text.line_to_byte(row.min(last_line)); + let end = text.line_to_byte(last_visible_line + 1); + + start..end + }; + + let mut result = Vec::new(); + + for (sem_range, semantic_styles) in semantic_ranges { + // Don't send highlights for tokens that are outside the visible range + if range.start > sem_range.anchor || range.end < sem_range.head { + continue; + } + + let sem_range = sem_range.anchor..sem_range.head; + + for sem_style in semantic_styles { + if let Some(idx) = theme.find_scope_index(sem_style.as_str()) { + result.push((idx, sem_range.clone())); + } + } + } + + result + } + /// Get highlight spans for document diagnostics pub fn doc_diagnostics_highlights( doc: &Document, @@ -978,6 +1041,8 @@ impl EditorView { } pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { + commands::compute_semantic_tokens_for_all_views(cx.editor, cx.jobs); + if let Some(completion) = &mut self.completion { return if completion.ensure_item_resolved(cx) { EventResult::Consumed(None) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 579c6725063b..011b966e972b 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -150,6 +150,17 @@ pub struct Document { language_server: Option>, diff_handle: Option, + + pub(crate) semantic_tokens: HashMap, + pub semantic_tokens_outdated: bool, +} + +#[derive(Debug, Clone)] +pub struct DocumentSemanticTokens { + pub first_line: usize, + pub last_line: usize, + + pub tokens: Vec<(Range, Vec>)>, } use std::{fmt, mem}; @@ -172,6 +183,8 @@ impl fmt::Debug for Document { .field("version", &self.version) .field("modified_since_accessed", &self.modified_since_accessed) .field("diagnostics", &self.diagnostics) + .field("semantic_tokens_outdated", &self.semantic_tokens_outdated) + .field("semantic_tokens", &self.semantic_tokens) // .field("language_server", &self.language_server) .finish() } @@ -395,6 +408,8 @@ impl Document { modified_since_accessed: false, language_server: None, diff_handle: None, + semantic_tokens: HashMap::default(), + semantic_tokens_outdated: true, config, } } @@ -809,6 +824,7 @@ impl Document { /// Remove a view's selection from this document. pub fn remove_view(&mut self, view_id: ViewId) { self.selections.remove(&view_id); + self.semantic_tokens.remove(&view_id); } /// Apply a [`Transaction`] to the [`Document`] to change its text. @@ -872,6 +888,15 @@ impl Document { self.diagnostics .sort_unstable_by_key(|diagnostic| diagnostic.range); + self.semantic_tokens_outdated = true; + for sem_ranges in &mut self.semantic_tokens.values_mut() { + let changes = transaction.changes(); + for (range, _) in &mut sem_ranges.tokens { + range.anchor = changes.map_pos(range.anchor, helix_core::Assoc::After); + range.head = changes.map_pos(range.head, helix_core::Assoc::After); + } + } + // emit lsp notification if let Some(language_server) = self.language_server() { let notify = language_server.text_document_did_change( @@ -1204,6 +1229,21 @@ impl Document { ) } + #[inline] + pub fn semantic_tokens(&self, view_id: ViewId) -> Option<&DocumentSemanticTokens> { + self.semantic_tokens.get(&view_id) + } + + #[inline] + pub fn set_semantic_tokens(&mut self, view_id: ViewId, dst: DocumentSemanticTokens) { + self.semantic_tokens.insert(view_id, dst); + } + + #[inline] + pub fn reset_all_semantic_tokens(&mut self) { + self.semantic_tokens = Default::default(); + } + #[inline] pub fn diagnostics(&self) -> &[Diagnostic] { &self.diagnostics diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 50da3ddeac2d..dc33f58aa940 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -377,6 +377,8 @@ pub struct LspConfig { pub auto_signature_help: bool, /// Display docs under signature help popup pub display_signature_help_docs: bool, + /// Enable semantic tokens highlighting + pub enable_semantic_tokens_highlighting: bool, } impl Default for LspConfig { @@ -386,6 +388,7 @@ impl Default for LspConfig { display_messages: false, auto_signature_help: true, display_signature_help_docs: true, + enable_semantic_tokens_highlighting: false, } } } @@ -1146,6 +1149,13 @@ impl Editor { fn _refresh(&mut self) { let config = self.config(); + + if !config.lsp.enable_semantic_tokens_highlighting { + for doc in self.documents_mut() { + doc.reset_all_semantic_tokens(); + } + } + for (view, _) in self.tree.views_mut() { let doc = doc_mut!(self, &view.doc); view.sync_changes(doc);