diff --git a/book/src/keymap.md b/book/src/keymap.md index 0f41b3247f3d5..903a02543f08a 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -12,6 +12,7 @@ - [Match mode](#match-mode) - [Window mode](#window-mode) - [Space mode](#space-mode) + - [Comment mode](#comment-mode) - [Popup](#popup) - [Unimpaired](#unimpaired) - [Insert mode](#insert-mode) @@ -289,6 +290,9 @@ This layer is a kludge of mappings, mostly pickers. | `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | | `'` | Open last fuzzy picker | `last_picker` | | `w` | Enter [window mode](#window-mode) | N/A | +| `c` | Comment/uncomment selections | `toggle_comments` | +| `C` | Block comment/uncomment selections | `toggle_block_comments` | +| `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `y` | Yank selections to clipboard | `yank_to_clipboard` | diff --git a/book/src/languages.md b/book/src/languages.md index 632a9146cb814..773a6df42923a 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -42,7 +42,7 @@ name = "mylang" scope = "source.mylang" injection-regex = "mylang" file-types = ["mylang", "myl"] -comment-token = "#" +comment-tokens = "#" indent = { tab-width = 2, unit = " " } formatter = { command = "mylang-formatter" , args = ["--stdin"] } language-servers = [ "mylang-lsp" ] @@ -61,7 +61,8 @@ These configuration keys are available: | `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | -| `comment-token` | The token to use as a comment-token | +| `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility| +| `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index 9c7e50f335b1a..7af38e2589cbb 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -1,8 +1,11 @@ //! This module contains the functionality toggle comments on lines over the selection //! using the comment character defined in the user's `languages.toml` +use smallvec::SmallVec; + use crate::{ - find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction, + find_first_non_whitespace_char, syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, + Selection, Tendril, Transaction, }; use std::borrow::Cow; @@ -94,6 +97,226 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st Transaction::change(doc, changes.into_iter()) } +fn find_last_non_whitespace_char(text: RopeSlice) -> Option { + text.chars_at(text.len_chars()) + .reversed() + .position(|ch| !ch.is_whitespace()) + .map(|pos| text.len_chars() - pos - 1) +} + +#[derive(Debug, PartialEq, Eq)] +pub enum CommentChange { + Commented { + range: Range, + start_pos: usize, + end_pos: usize, + start_margin: bool, + end_margin: bool, + start_token: String, + end_token: String, + }, + Uncommented { + range: Range, + start_pos: usize, + end_pos: usize, + start_token: String, + end_token: String, + }, + Whitespace { + range: Range, + }, +} + +pub fn find_block_comments( + tokens: &[BlockCommentToken], + text: RopeSlice, + selection: &Selection, +) -> (bool, Vec) { + let mut commented = true; + let mut only_whitespace = true; + let mut comment_changes = Vec::with_capacity(selection.len()); + let default_tokens = tokens.first().cloned().unwrap_or_default(); + let mut start_token = default_tokens.start.clone(); + let mut end_token = default_tokens.end.clone(); + + let mut tokens = tokens.to_vec(); + // sort the tokens by length, so longer tokens will match first + tokens.sort_by(|a, b| { + if a.start.len() == b.start.len() { + b.end.len().cmp(&a.end.len()) + } else { + b.start.len().cmp(&a.start.len()) + } + }); + for range in selection { + let selection_slice = range.slice(text); + if let (Some(start_pos), Some(end_pos)) = ( + find_first_non_whitespace_char(selection_slice), + find_last_non_whitespace_char(selection_slice), + ) { + let mut line_commented = false; + let mut after_start = 0; + let mut before_end = 0; + let len = (end_pos + 1) - start_pos; + + for BlockCommentToken { start, end } in &tokens { + let start_len = start.chars().count(); + let end_len = end.chars().count(); + after_start = start_pos + start_len; + before_end = end_pos.saturating_sub(end_len); + + if len >= start_len + end_len { + let start_fragment = selection_slice.slice(start_pos..after_start); + let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1); + + // block commented with these tokens + if start_fragment == start.as_str() && end_fragment == end.as_str() { + start_token = start.to_string(); + end_token = end.to_string(); + line_commented = true; + break; + } + } + } + + if !line_commented { + comment_changes.push(CommentChange::Uncommented { + range: *range, + start_pos, + end_pos, + start_token: default_tokens.start.clone(), + end_token: default_tokens.end.clone(), + }); + commented = false; + } else { + comment_changes.push(CommentChange::Commented { + range: *range, + start_pos, + end_pos, + start_margin: selection_slice + .get_char(after_start) + .map_or(false, |c| c == ' '), + end_margin: after_start != before_end + && selection_slice + .get_char(before_end) + .map_or(false, |c| c == ' '), + start_token: start_token.to_string(), + end_token: end_token.to_string(), + }); + } + only_whitespace = false; + } else { + comment_changes.push(CommentChange::Whitespace { range: *range }); + } + } + if only_whitespace { + commented = false; + } + (commented, comment_changes) +} + +#[must_use] +pub fn create_block_comment_transaction( + doc: &Rope, + selection: &Selection, + commented: bool, + comment_changes: Vec, +) -> (Transaction, SmallVec<[Range; 1]>) { + let mut changes: Vec = Vec::with_capacity(selection.len() * 2); + let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len()); + let mut offs = 0; + for change in comment_changes { + if commented { + if let CommentChange::Commented { + range, + start_pos, + end_pos, + start_token, + end_token, + start_margin, + end_margin, + } = change + { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos + start_token.len() + start_margin as usize, + None, + )); + changes.push(( + from + end_pos - end_token.len() - end_margin as usize + 1, + from + end_pos + 1, + None, + )); + } + } else { + // uncommented so manually map ranges through changes + match change { + CommentChange::Uncommented { + range, + start_pos, + end_pos, + start_token, + end_token, + } => { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos, + Some(Tendril::from(format!("{} ", start_token))), + )); + changes.push(( + from + end_pos + 1, + from + end_pos + 1, + Some(Tendril::from(format!(" {}", end_token))), + )); + + let offset = start_token.chars().count() + end_token.chars().count() + 2; + ranges.push( + Range::new(from + offs, from + offs + end_pos + 1 + offset) + .with_direction(range.direction()), + ); + offs += offset; + } + CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => { + ranges.push(Range::new(range.from() + offs, range.to() + offs)); + } + } + } + } + (Transaction::change(doc, changes.into_iter()), ranges) +} + +#[must_use] +pub fn toggle_block_comments( + doc: &Rope, + selection: &Selection, + tokens: &[BlockCommentToken], +) -> Transaction { + let text = doc.slice(..); + let (commented, comment_changes) = find_block_comments(tokens, text, selection); + let (mut transaction, ranges) = + create_block_comment_transaction(doc, selection, commented, comment_changes); + if !commented { + transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); + } + transaction +} + +pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection { + let mut ranges = SmallVec::new(); + for range in selection.ranges() { + let (line_start, line_end) = range.line_range(text.slice(..)); + let mut pos = text.line_to_char(line_start); + for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() { + let start = pos; + pos += line.len_chars(); + ranges.push(Range::new(start, pos)); + } + } + Selection::new(ranges, 0) +} + #[cfg(test)] mod test { use super::*; @@ -149,4 +372,49 @@ mod test { // TODO: account for uncommenting with uneven comment indentation } + + #[test] + fn test_find_block_comments() { + // three lines 5 characters. + let mut doc = Rope::from("1\n2\n3"); + // select whole document + let selection = Selection::single(0, doc.len_chars()); + + let text = doc.slice(..); + + let res = find_block_comments(&[BlockCommentToken::default()], text, &selection); + + assert_eq!( + res, + ( + false, + vec![CommentChange::Uncommented { + range: Range::new(0, 5), + start_pos: 0, + end_pos: 4, + start_token: "/*".to_string(), + end_token: "*/".to_string(), + }] + ) + ); + + // comment + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + + assert_eq!(doc, "/* 1\n2\n3 */"); + + // uncomment + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, "1\n2\n3"); + + // don't panic when there is just a space in comment + doc = Rope::from("/* */"); + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, ""); + } } diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 8c7fc4e15c613..522b5ac13eb39 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -103,7 +103,19 @@ pub struct LanguageConfiguration { pub shebangs: Vec, // interpreter(s) associated with language #[serde(default)] pub roots: Vec, // these indicate project roots <.git, Cargo.toml> - pub comment_token: Option, + #[serde( + default, + skip_serializing, + deserialize_with = "from_comment_tokens", + alias = "comment-token" + )] + pub comment_tokens: Option>, + #[serde( + default, + skip_serializing, + deserialize_with = "from_block_comment_tokens" + )] + pub block_comment_tokens: Option>, pub text_width: Option, pub soft_wrap: Option, @@ -229,6 +241,59 @@ impl<'de> Deserialize<'de> for FileType { } } +fn from_comment_tokens<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum CommentTokens { + Multiple(Vec), + Single(String), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + CommentTokens::Single(val) => vec![val], + CommentTokens::Multiple(vals) => vals, + }), + ) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockCommentToken { + pub start: String, + pub end: String, +} + +impl Default for BlockCommentToken { + fn default() -> Self { + BlockCommentToken { + start: "/*".to_string(), + end: "*/".to_string(), + } + } +} + +fn from_block_comment_tokens<'de, D>( + deserializer: D, +) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum BlockCommentTokens { + Multiple(Vec), + Single(BlockCommentToken), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + BlockCommentTokens::Single(val) => vec![val], + BlockCommentTokens::Multiple(vals) => vals, + }), + ) +} + #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[serde(rename_all = "kebab-case")] pub enum LanguageServerFeature { diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index cd053266c2e0a..e94a744283976 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -23,7 +23,7 @@ use helix_core::{ regex::{self, Regex, RegexBuilder}, search::{self, CharMatcher}, selection, shellwords, surround, - syntax::LanguageServerFeature, + syntax::{BlockCommentToken, LanguageServerFeature}, text_annotations::TextAnnotations, textobject, tree_sitter::Node, @@ -413,6 +413,8 @@ impl MappableCommand { completion, "Invoke completion popup", hover, "Show docs for item under cursor", toggle_comments, "Comment/uncomment selections", + toggle_line_comments, "Line comment/uncomment selections", + toggle_block_comments, "Block comment/uncomment selections", rotate_selections_forward, "Rotate selections forward", rotate_selections_backward, "Rotate selections backward", rotate_selection_contents_forward, "Rotate selection contents forward", @@ -4658,18 +4660,124 @@ pub fn completion(cx: &mut Context) { } // comments -fn toggle_comments(cx: &mut Context) { +type CommentTransactionFn = fn( + line_token: Option<&str>, + block_tokens: Option<&[BlockCommentToken]>, + doc: &Rope, + selection: &Selection, +) -> Transaction; + +fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) { let (view, doc) = current!(cx.editor); - let token = doc + let line_token: Option<&str> = doc + .language_config() + .and_then(|lc| lc.comment_tokens.as_ref()) + .and_then(|tc| tc.first()) + .map(|tc| tc.as_str()); + let block_tokens: Option<&[BlockCommentToken]> = doc .language_config() - .and_then(|lc| lc.comment_token.as_ref()) - .map(|tc| tc.as_ref()); - let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token); + .and_then(|lc| lc.block_comment_tokens.as_ref()) + .map(|tc| &tc[..]); + + let transaction = + comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id)); doc.apply(&transaction, view.id); exit_select_mode(cx); } +/// commenting behavior: +/// 1. only line comment tokens -> line comment +/// 2. each line block commented -> uncomment all lines +/// 3. whole selection block commented -> uncomment selection +/// 4. all lines not commented and block tokens -> comment uncommented lines +/// 5. no comment tokens and not block commented -> line comment +fn toggle_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + let text = doc.slice(..); + + // only have line comment tokens + if line_token.is_some() && block_tokens.is_none() { + return comment::toggle_line_comments(doc, selection, line_token); + } + + let split_lines = comment::split_lines_of_selection(text, selection); + + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + + let (line_commented, line_comment_changes) = + comment::find_block_comments(block_comment_tokens, text, &split_lines); + + // block commented by line would also be block commented so check this first + if line_commented { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + let (block_commented, comment_changes) = + comment::find_block_comments(block_comment_tokens, text, selection); + + // check if selection has block comments + if block_commented { + return comment::create_block_comment_transaction( + doc, + selection, + block_commented, + comment_changes, + ) + .0; + } + + // not commented and only have block comment tokens + if line_token.is_none() && block_tokens.is_some() { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + // not block commented at all and don't have any tokens + comment::toggle_line_comments(doc, selection, line_token) + }) +} + +fn toggle_line_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_none() && block_tokens.is_some() { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments( + doc, + &comment::split_lines_of_selection(doc.slice(..), selection), + block_comment_tokens, + ) + } else { + comment::toggle_line_comments(doc, selection, line_token) + } + }); +} + +fn toggle_block_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_some() && block_tokens.is_none() { + comment::toggle_line_comments(doc, selection, line_token) + } else { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments(doc, selection, block_comment_tokens) + } + }); +} + fn rotate_selections(cx: &mut Context, direction: Direction) { let count = cx.count(); let (view, doc) = current!(cx.editor); diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index 763ed4ae71ce5..3171cb9d7bdd7 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -276,6 +276,9 @@ pub fn default() -> HashMap { "k" => hover, "r" => rename_symbol, "h" => select_references_to_symbol_under_cursor, + "c" => toggle_comments, + "C" => toggle_block_comments, + "A-c" => toggle_line_comments, "?" => command_palette, }, "z" => { "View" diff --git a/languages.toml b/languages.toml index dd849c477f3a2..92fa6254c105f 100644 --- a/languages.toml +++ b/languages.toml @@ -187,7 +187,12 @@ injection-regex = "rust" file-types = ["rs"] roots = ["Cargo.toml", "Cargo.lock"] auto-format = true -comment-token = "//" +comment-tokens = ["//", "///", "//!"] +block-comment-tokens = [ + { start = "/*", end = "*/" }, + { start = "/**", end = "*/" }, + { start = "/*!", end = "*/" }, +] language-servers = [ "rust-analyzer" ] indent = { tab-width = 4, unit = " " } @@ -278,6 +283,7 @@ injection-regex = "protobuf" file-types = ["proto"] language-servers = [ "bufls", "pbkit" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -319,6 +325,7 @@ injection-regex = "mint" file-types = ["mint"] shebangs = [] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "mint" ] indent = { tab-width = 2, unit = " " } @@ -376,6 +383,7 @@ scope = "source.c" injection-regex = "c" file-types = ["c"] # TODO: ["h"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "clangd" ] indent = { tab-width = 2, unit = " " } @@ -412,6 +420,7 @@ scope = "source.cpp" injection-regex = "cpp" file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H", "cu", "cuh", "cppm", "h++", "ii", "inl", { suffix = ".hpp.in" }, { suffix = ".h.in" }] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "clangd" ] indent = { tab-width = 2, unit = " " } @@ -459,6 +468,7 @@ injection-regex = "c-?sharp" file-types = ["cs", "csx", "cake"] roots = ["sln", "csproj"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } language-servers = [ "omnisharp" ] @@ -493,6 +503,7 @@ file-types = ["go"] roots = ["go.work", "go.mod"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "gopls", "golangci-lint-lsp" ] # TODO: gopls needs utf-8 offsets? indent = { tab-width = 4, unit = "\t" } @@ -558,6 +569,7 @@ scope = "source.gotmpl" injection-regex = "gotmpl" file-types = ["gotmpl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "gopls" ] indent = { tab-width = 2, unit = " " } @@ -587,6 +599,7 @@ language-id = "javascript" file-types = ["js", "mjs", "cjs", "rules", "es6", "pac", "jakefile"] shebangs = ["node"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -613,6 +626,7 @@ injection-regex = "jsx" language-id = "javascriptreact" file-types = ["jsx"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } grammar = "javascript" @@ -624,6 +638,8 @@ injection-regex = "(ts|typescript)" file-types = ["ts", "mts", "cts"] language-id = "typescript" shebangs = ["deno", "ts-node"] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -637,6 +653,8 @@ scope = "source.tsx" injection-regex = "(tsx)" # |typescript language-id = "typescriptreact" file-types = ["tsx"] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -649,6 +667,7 @@ name = "css" scope = "source.css" injection-regex = "css" file-types = ["css", "scss"] +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "vscode-css-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -662,6 +681,7 @@ name = "scss" scope = "source.scss" injection-regex = "scss" file-types = ["scss"] +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "vscode-css-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -675,6 +695,7 @@ name = "html" scope = "text.html.basic" injection-regex = "html" file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml"] +block-comment-tokens = { start = "" } language-servers = [ "vscode-html-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -843,6 +864,7 @@ injection-regex = "php" file-types = ["php", "inc", "php4", "php5", "phtml", "ctp"] shebangs = ["php"] roots = ["composer.json", "index.php"] +comment-token = "//" language-servers = [ "intelephense" ] indent = { tab-width = 4, unit = " " } @@ -855,6 +877,7 @@ name = "twig" scope = "source.twig" injection-regex = "twig" file-types = ["twig"] +block-comment-tokens = { start = "{#", end = "#}" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -908,6 +931,7 @@ injection-regex = "lean" file-types = ["lean"] roots = [ "lakefile.lean" ] comment-token = "--" +block-comment-tokens = { start = "/-", end = "-/" } language-servers = [ "lean" ] indent = { tab-width = 2, unit = " " } @@ -934,6 +958,7 @@ file-types = ["jl"] shebangs = ["julia"] roots = ["Manifest.toml", "Project.toml"] comment-token = "#" +block-comment-tokens = { start = "#=", end = "=#" } language-servers = [ "julia" ] indent = { tab-width = 4, unit = " " } @@ -984,6 +1009,7 @@ scope = "source.ocaml" injection-regex = "ocaml" file-types = ["ml"] shebangs = ["ocaml", "ocamlrun", "ocamlscript"] +block-comment-tokens = { start = "(*", end = "*)" } comment-token = "(**)" language-servers = [ "ocamllsp" ] indent = { tab-width = 2, unit = " " } @@ -1003,6 +1029,7 @@ name = "ocaml-interface" scope = "source.ocaml.interface" file-types = ["mli"] shebangs = [] +block-comment-tokens = { start = "(*", end = "*)" } comment-token = "(**)" language-servers = [ "ocamllsp" ] indent = { tab-width = 2, unit = " " } @@ -1025,6 +1052,7 @@ file-types = ["lua"] shebangs = ["lua"] roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"] comment-token = "--" +block-comment-tokens = { start = "--[[", end = "--]]" } indent = { tab-width = 2, unit = " " } language-servers = [ "lua-language-server" ] @@ -1050,6 +1078,7 @@ scope = "source.vue" injection-regex = "vue" file-types = ["vue"] roots = ["package.json"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } language-servers = [ "vuels" ] @@ -1077,6 +1106,7 @@ injection-regex = "haskell" file-types = ["hs", "hs-boot"] roots = ["Setup.hs", "stack.yaml", "cabal.project"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "haskell-language-server" ] indent = { tab-width = 2, unit = " " } @@ -1102,6 +1132,7 @@ injection-regex = "purescript" file-types = ["purs"] roots = ["spago.dhall", "bower.json"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "purescript-language-server" ] indent = { tab-width = 2, unit = " " } auto-format = true @@ -1156,6 +1187,7 @@ scope = "source.prolog" file-types = ["pl", "prolog"] shebangs = ["swipl"] comment-token = "%" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "swipl" ] [[language]] @@ -1175,6 +1207,7 @@ name = "cmake" scope = "source.cmake" file-types = ["cmake", "CMakeLists.txt"] comment-token = "#" +block-comment-tokens = { start = "#[[", end = "]]" } indent = { tab-width = 2, unit = " " } language-servers = [ "cmake-language-server" ] injection-regex = "cmake" @@ -1201,6 +1234,7 @@ name = "glsl" scope = "source.glsl" file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } injection-regex = "glsl" @@ -1238,6 +1272,7 @@ file-types = ["rkt", "rktd", "rktl", "scrbl"] shebangs = ["racket"] comment-token = ";" indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "#|", end = "|#" } language-servers = [ "racket" ] grammar = "scheme" @@ -1272,6 +1307,7 @@ name = "wgsl" scope = "source.wgsl" file-types = ["wgsl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "wgsl_analyzer" ] indent = { tab-width = 4, unit = " " } @@ -1318,6 +1354,7 @@ name = "tablegen" scope = "source.tablegen" file-types = ["td"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } injection-regex = "tablegen" @@ -1333,6 +1370,7 @@ file-types = ["md", "markdown", "PULLREQ_EDITMSG", "mkd", "mdwn", "mdown", "mark roots = [".marksman.toml"] language-servers = [ "marksman" ] indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "" } [[grammar]] name = "markdown" @@ -1356,6 +1394,7 @@ file-types = ["dart"] roots = ["pubspec.yaml"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "dart" ] indent = { tab-width = 2, unit = " " } @@ -1369,6 +1408,7 @@ scope = "source.scala" roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"] file-types = ["scala", "sbt", "sc"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "metals" ] @@ -1479,6 +1519,8 @@ scope = "source.graphql" injection-regex = "graphql" file-types = ["gql", "graphql", "graphqls"] language-servers = [ "graphql-language-service" ] +comment-token = "#" +block-comment-tokens = { start = "\"\"\"", end = "\"\"\"" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -1493,6 +1535,7 @@ file-types = ["elm"] roots = ["elm.json"] auto-format = true comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "elm-language-server" ] indent = { tab-width = 4, unit = " " } @@ -1505,6 +1548,7 @@ name = "iex" scope = "source.iex" injection-regex = "iex" file-types = ["iex"] +comment-token = "#" [[grammar]] name = "iex" @@ -1518,6 +1562,7 @@ file-types = ["res"] roots = ["bsconfig.json"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "rescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -1554,6 +1599,7 @@ scope = "source.kotlin" file-types = ["kt", "kts"] roots = ["settings.gradle", "settings.gradle.kts"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "kotlin-language-server" ] @@ -1568,6 +1614,7 @@ injection-regex = "(hcl|tf|nomad)" language-id = "terraform" file-types = ["hcl", "tf", "nomad"] comment-token = "#" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "terraform-ls" ] auto-format = true @@ -1582,6 +1629,7 @@ scope = "source.tfvars" language-id = "terraform-vars" file-types = ["tfvars"] comment-token = "#" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "terraform-ls" ] auto-format = true @@ -1604,6 +1652,7 @@ scope = "source.sol" injection-regex = "(sol|solidity)" file-types = ["sol"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "solc" ] @@ -1632,6 +1681,7 @@ scope = "source.ron" injection-regex = "ron" file-types = ["ron"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } [[grammar]] @@ -1672,6 +1722,7 @@ injection-regex = "(r|R)md" file-types = ["rmd", "Rmd"] indent = { tab-width = 2, unit = " " } grammar = "markdown" +block-comment-tokens = { start = "" } language-servers = [ "r" ] [[language]] @@ -1681,6 +1732,7 @@ injection-regex = "swift" file-types = ["swift"] roots = [ "Package.swift" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } auto-format = true language-servers = [ "sourcekit-lsp" ] @@ -1693,6 +1745,7 @@ name = "erb" scope = "text.html.erb" injection-regex = "erb" file-types = ["erb"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "embedded-template" @@ -1701,6 +1754,7 @@ name = "ejs" scope = "text.html.ejs" injection-regex = "ejs" file-types = ["ejs"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "embedded-template" @@ -1714,6 +1768,7 @@ scope = "source.eex" injection-regex = "eex" file-types = ["eex"] roots = ["mix.exs", "mix.lock"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -1726,6 +1781,7 @@ scope = "source.heex" injection-regex = "heex" file-types = ["heex"] roots = ["mix.exs", "mix.lock"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } language-servers = [ "elixir-ls" ] @@ -1738,6 +1794,7 @@ name = "sql" scope = "source.sql" file-types = ["sql", "dsql"] comment-token = "--" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } injection-regex = "sql" @@ -1796,6 +1853,7 @@ scope = "source.vala" injection-regex = "vala" file-types = ["vala", "vapi"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "vala-language-server" ] @@ -1821,6 +1879,7 @@ scope = "source.devicetree" injection-regex = "(dtsi?|devicetree|fdt)" file-types = ["dts", "dtsi"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } [[grammar]] @@ -1859,6 +1918,7 @@ file-types = ["odin"] roots = ["ols.json"] language-servers = [ "ols" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } [[grammar]] @@ -1909,6 +1969,7 @@ roots = ["v.mod"] language-servers = [ "vlang-language-server" ] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } [[grammar]] @@ -1920,6 +1981,7 @@ name = "verilog" scope = "source.verilog" file-types = ["v", "vh", "sv", "svh"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "svlangserver" ] indent = { tab-width = 2, unit = " " } injection-regex = "verilog" @@ -1956,6 +2018,7 @@ scope = "source.openscad" injection-regex = "openscad" file-types = ["scad"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "openscad-lsp" ] indent = { tab-width = 2, unit = "\t" } @@ -2020,6 +2083,7 @@ injection-regex = "idr" file-types = ["idr"] shebangs = [] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } indent = { tab-width = 2, unit = " " } language-servers = [ "idris2-lsp" ] @@ -2055,6 +2119,7 @@ scope = "source.dot" injection-regex = "dot" file-types = ["dot"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "dot-language-server" ] @@ -2084,6 +2149,7 @@ scope = "source.slint" injection-regex = "slint" file-types = ["slint"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "slint-lsp" ] @@ -2133,6 +2199,7 @@ scope = "source.pascal" injection-regex = "pascal" file-types = ["pas", "pp", "inc", "lpr", "lfm"] comment-token = "//" +block-comment-tokens = { start = "{", end = "}" } indent = { tab-width = 2, unit = " " } language-servers = [ "pasls" ] @@ -2145,7 +2212,7 @@ name = "sml" scope = "source.sml" injection-regex = "sml" file-types = ["sml"] -comment-token = "(*" +block-comment-tokens = { start = "(*", end = "*)" } [[grammar]] name = "sml" @@ -2157,6 +2224,7 @@ scope = "source.jsonnet" file-types = ["libsonnet", "jsonnet"] roots = ["jsonnetfile.json"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "jsonnet-language-server" ] @@ -2169,6 +2237,7 @@ name = "astro" scope = "source.astro" injection-regex = "astro" file-types = ["astro"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -2192,6 +2261,7 @@ source = { git = "https://github.com/vito/tree-sitter-bass", rev = "501133e260d7 name = "wat" scope = "source.wat" comment-token = ";;" +block-comment-tokens = { start = "(;", end = ";)" } file-types = ["wat"] [[grammar]] @@ -2202,6 +2272,7 @@ source = { git = "https://github.com/wasm-lsp/tree-sitter-wasm", rev = "2ca28a9f name = "wast" scope = "source.wast" comment-token = ";;" +block-comment-tokens = { start = "(;", end = ";)" } file-types = ["wast"] [[grammar]] @@ -2213,6 +2284,7 @@ name = "d" scope = "source.d" file-types = [ "d", "dd" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } injection-regex = "d" indent = { tab-width = 4, unit = " "} language-servers = [ "serve-d" ] @@ -2239,6 +2311,7 @@ name = "kdl" scope = "source.kdl" file-types = ["kdl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } injection-regex = "kdl" [[grammar]] @@ -2306,6 +2379,7 @@ file-types = [ "xoml", "musicxml" ] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [language.auto-pairs] @@ -2345,6 +2419,7 @@ scope = "source.wit" injection-regex = "wit" file-types = ["wit"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } [language.auto-pairs] @@ -2409,6 +2484,7 @@ scope = "source.bicep" file-types = ["bicep"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " "} language-servers = [ "bicep-langserver" ] @@ -2421,6 +2497,8 @@ name = "qml" scope = "source.qml" file-types = ["qml"] language-servers = [ "qmlls" ] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } grammar = "qmljs" @@ -2460,6 +2538,7 @@ injection-regex = "pony" roots = ["corral.json", "lock.json"] indent = { tab-width = 2, unit = " " } comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } [[grammar]] name = "ponylang" @@ -2471,6 +2550,7 @@ scope = "source.dhall" injection-regex = "dhall" file-types = ["dhall"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } indent = { tab-width = 2, unit = " " } language-servers = [ "dhall-lsp-server" ] formatter = { command = "dhall" , args = ["format"] } @@ -2494,6 +2574,7 @@ scope = "source.msbuild" injection-regex = "msbuild" file-types = ["proj", "vbproj", "csproj", "fsproj", "targets", "props"] indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "" } grammar = "xml" [language.auto-pairs] @@ -2540,7 +2621,7 @@ scope = "source.tal" injection-regex = "tal" file-types = ["tal"] auto-format = false -comment-token = "(" +block-comment-tokens = { start = "(", end = ")" } [[grammar]] name = "uxntal" @@ -2674,6 +2755,7 @@ injection-regex = "nim" file-types = ["nim", "nims", "nimble"] shebangs = [] comment-token = "#" +block-comment-tokens = { start = "#[", end = "]#" } indent = { tab-width = 2, unit = " " } language-servers = [ "nimlangserver" ] @@ -2712,6 +2794,7 @@ source = { git = "https://github.com/pfeiferj/tree-sitter-hurl", rev = "264c4206 [[language]] name = "markdoc" scope = "text.markdoc" +block-comment-tokens = { start = "" } file-types = ["mdoc"] language-servers = [ "markdoc-ls" ] @@ -2763,6 +2846,7 @@ scope = "source.blueprint" injection-regex = "blueprint" file-types = ["blp"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "blueprint-compiler" ] indent = { tab-width = 4, unit = " " } @@ -2815,6 +2899,7 @@ name = "webc" scope = "text.html.webc" injection-regex = "webc" file-types = ["webc"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "html"