From fc1e9a6ff936f9ceb7bb85c810130f49b42300c9 Mon Sep 17 00:00:00 2001 From: Diego Pontoriero <74719+diegs@users.noreply.github.com> Date: Thu, 4 May 2023 18:02:19 -0700 Subject: [PATCH 001/571] languages: add build.gradle.kts to java and scala roots (#6970) Gradle scripts written in kotlin use a .kts extension: https://docs.gradle.org/current/userguide/kotlin_dsl.html#sec:scripts --- languages.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/languages.toml b/languages.toml index 81ba471e3c40..108e91a1c23c 100644 --- a/languages.toml +++ b/languages.toml @@ -731,7 +731,7 @@ name = "java" scope = "source.java" injection-regex = "java" file-types = ["java"] -roots = ["pom.xml", "build.gradle"] +roots = ["pom.xml", "build.gradle", "build.gradle.kts"] language-server = { command = "jdtls" } indent = { tab-width = 4, unit = " " } @@ -1159,7 +1159,7 @@ source = { git = "https://github.com/UserNobody14/tree-sitter-dart", rev = "2d7f [[language]] name = "scala" scope = "source.scala" -roots = ["build.sbt", "build.sc", "build.gradle", "pom.xml", ".scala-build"] +roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"] file-types = ["scala", "sbt", "sc"] comment-token = "//" indent = { tab-width = 2, unit = " " } From 5ae30f1993180b2c043d13f83e16f8647f7ff129 Mon Sep 17 00:00:00 2001 From: Dave Powers Date: Fri, 5 May 2023 10:43:56 -0400 Subject: [PATCH 002/571] Fix keymap select / extend mode anchor link (#6974) --- book/src/keymap.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/book/src/keymap.md b/book/src/keymap.md index 648dcfa968bc..f530bf6c5d42 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -15,7 +15,7 @@ - [Popup](#popup) - [Unimpaired](#unimpaired) - [Insert mode](#insert-mode) -- [Select / extend mode](#select-extend-mode) +- [Select / extend mode](#select--extend-mode) - [Picker](#picker) - [Prompt](#prompt) From 3a9e77934bfd1332b9f971b365883777e3f44b8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= <54238857+yjhn@users.noreply.github.com> Date: Mon, 8 May 2023 17:11:23 +0000 Subject: [PATCH 003/571] Treat .sty and .cls files as latex (#6986) --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 108e91a1c23c..ea992394b0cc 100644 --- a/languages.toml +++ b/languages.toml @@ -654,7 +654,7 @@ source = { git = "https://github.com/gbprod/tree-sitter-twig", rev = "807b293fec name = "latex" scope = "source.tex" injection-regex = "tex" -file-types = ["tex"] +file-types = ["tex", "sty", "cls"] roots = [] comment-token = "%" language-server = { command = "texlab" } From 3fb0562e7b6eab72bba5b445703d392edd8eff3b Mon Sep 17 00:00:00 2001 From: Chirikumbrah <78883260+Chirikumbrah@users.noreply.github.com> Date: Mon, 8 May 2023 20:12:15 +0300 Subject: [PATCH 004/571] Update dracula menu colors (#6987) --- runtime/themes/dracula.toml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/runtime/themes/dracula.toml b/runtime/themes/dracula.toml index eb46d6d848ac..1ec5b4fe2765 100644 --- a/runtime/themes/dracula.toml +++ b/runtime/themes/dracula.toml @@ -73,11 +73,11 @@ "ui.highlight.frameline" = { fg = "background", bg = "red" } "ui.linenr" = { fg = "comment" } "ui.linenr.selected" = { fg = "foreground" } -"ui.menu" = { fg = "background", bg = "purple" } -"ui.menu.selected" = { fg = "background", bg = "green", modifiers = ["dim"] } -"ui.menu.scroll" = { fg = "background", bg = "purple" } +"ui.menu" = { fg = "foreground", bg = "current_line" } +"ui.menu.selected" = { fg = "current_line", bg = "purple", modifiers = ["dim"] } +"ui.menu.scroll" = { fg = "foreground", bg = "current_line" } "ui.popup" = { fg = "foreground", bg = "black" } -"ui.selection.primary" = { bg = "selection_primary" } +"ui.selection.primary" = { bg = "current_line" } "ui.selection" = { bg = "selection" } "ui.statusline" = { fg = "foreground", bg = "darker" } "ui.statusline.inactive" = { fg = "comment", bg = "darker" } @@ -87,8 +87,8 @@ "ui.text" = { fg = "foreground" } "ui.text.focus" = { fg = "cyan" } "ui.window" = { fg = "foreground" } -"ui.virtual.whitespace" = { fg = "subtle" } -"ui.virtual.wrap" = { fg = "subtle" } +"ui.virtual.whitespace" = { fg = "current_line" } +"ui.virtual.wrap" = { fg = "current_line" } "ui.virtual.ruler" = { bg = "black" } "ui.virtual.inlay-hint" = { fg = "cyan" } "ui.virtual.inlay-hint.parameter" = { fg = "cyan", modifiers = ["italic", "dim"] } @@ -122,13 +122,12 @@ darker = "#222430" black = "#191A21" grey = "#666771" comment = "#6272A4" -selection_primary = "#44475a" +current_line = "#44475a" selection = "#363848" -subtle = "#424450" red = "#ff5555" orange = "#ffb86c" yellow = "#f1fa8c" green = "#50fa7b" purple = "#BD93F9" cyan = "#8be9fd" -pink = "#ff79c6" +pink = "#ff79c6" From 301d45b34b181ecd4f2cb916b572f13665fcdf8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 09:29:43 +0900 Subject: [PATCH 005/571] build(deps): bump termini from 0.1.4 to 1.0.0 (#7001) Bumps [termini](https://github.com/pascalkuthe/termini) from 0.1.4 to 1.0.0. - [Release notes](https://github.com/pascalkuthe/termini/releases) - [Commits](https://github.com/pascalkuthe/termini/compare/v0.1.4...v1.0.0) --- updated-dependencies: - dependency-name: termini dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 38 +++----------------------------------- helix-tui/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 369daed784a1..a074695feb08 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -289,27 +289,6 @@ dependencies = [ "syn 2.0.15", ] -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - [[package]] name = "dunce" version = "1.0.4" @@ -1744,17 +1723,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_users" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" -dependencies = [ - "getrandom", - "redox_syscall 0.2.16", - "thiserror", -] - [[package]] name = "regex" version = "1.8.1" @@ -2049,11 +2017,11 @@ dependencies = [ [[package]] name = "termini" -version = "0.1.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c0f7ecb9c2a380d2686a747e4fc574043712326e8d39fbd220ab3bd29768a12" +checksum = "2ad441d87dd98bc5eeb31cf2fb7e4839968763006b478efb38668a3bf9da0d59" dependencies = [ - "dirs-next", + "home", ] [[package]] diff --git a/helix-tui/Cargo.toml b/helix-tui/Cargo.toml index 7356692984f6..6cd031f89c46 100644 --- a/helix-tui/Cargo.toml +++ b/helix-tui/Cargo.toml @@ -20,7 +20,7 @@ bitflags = "2.2" cassowary = "0.3" unicode-segmentation = "1.10" crossterm = { version = "0.26", optional = true } -termini = "0.1" +termini = "1.0" serde = { version = "1", "optional" = true, features = ["derive"]} once_cell = "1.17" log = "~0.4" From 9cca80bd535976642f34c711895dd1d9ee94d8aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 09:30:11 +0900 Subject: [PATCH 006/571] build(deps): bump libc from 0.2.142 to 0.2.144 (#7000) Bumps [libc](https://github.com/rust-lang/libc) from 0.2.142 to 0.2.144. - [Release notes](https://github.com/rust-lang/libc/releases) - [Commits](https://github.com/rust-lang/libc/compare/0.2.142...0.2.144) --- updated-dependencies: - dependency-name: libc dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- helix-term/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a074695feb08..8cda8054f382 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1445,9 +1445,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.142" +version = "0.2.144" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" +checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" [[package]] name = "libloading" diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 7fb6b890a0da..f7496087a68e 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -68,7 +68,7 @@ grep-searcher = "0.1.11" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } -libc = "0.2.142" +libc = "0.2.144" [build-dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } From 7089bc20f06ceb517bf8af59d7d4620390d903ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 09:30:35 +0900 Subject: [PATCH 007/571] build(deps): bump serde from 1.0.160 to 1.0.162 (#7002) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.160 to 1.0.162. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.160...1.0.162) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8cda8054f382..fecc6124ebe8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1805,18 +1805,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "serde" -version = "1.0.160" +version = "1.0.162" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.160" +version = "1.0.162" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" dependencies = [ "proc-macro2", "quote", From 69332ae3b2fe712019d4caa181bec9946611d7a6 Mon Sep 17 00:00:00 2001 From: David Else <12832280+David-Else@users.noreply.github.com> Date: Tue, 9 May 2023 13:38:36 +0000 Subject: [PATCH 008/571] Add Flathub as third party repository (#6994) --- book/src/install.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/book/src/install.md b/book/src/install.md index 169e6e0b6996..2f770b1d60c2 100644 --- a/book/src/install.md +++ b/book/src/install.md @@ -8,6 +8,7 @@ - [Fedora/RHEL](#fedorarhel) - [Arch Linux community](#arch-linux-community) - [NixOS](#nixos) + - [Flatpak](#flatpak) - [AppImage](#appimage) - [macOS](#macos) - [Homebrew Core](#homebrew-core) @@ -18,6 +19,9 @@ - [MSYS2](#msys2) - [Building from source](#building-from-source) - [Configuring Helix's runtime files](#configuring-helixs-runtime-files) + - [Linux and macOS](#linux-and-macos) + - [Windows](#windows) + - [Multiple runtime directories](#multiple-runtime-directories) - [Validating the installation](#validating-the-installation) - [Configure the desktop shortcut](#configure-the-desktop-shortcut) @@ -88,6 +92,15 @@ If you are using a version of Nix without flakes enabled, [install Cachix CLI](https://docs.cachix.org/installation) and use `cachix use helix` to configure Nix to use cached outputs when possible. +### Flatpak + +Helix is available on [Flathub](https://flathub.org/en-GB/apps/com.helix_editor.Helix): + +```sh +flatpak install flathub com.helix_editor.Helix +flatpak run com.helix_editor.Helix +``` + ### AppImage Install Helix using the Linux [AppImage](https://appimage.org/) format. From 8424f387b582ed793663f95ee570e709dff20dd6 Mon Sep 17 00:00:00 2001 From: Ollie Charles Date: Tue, 9 May 2023 20:31:22 +0100 Subject: [PATCH 009/571] Remove `tree-sitter-cabal` (#6996) --- book/src/generated/lang-support.md | 2 +- languages.toml | 4 ---- runtime/queries/cabal/highlights.scm | 15 --------------- 3 files changed, 1 insertion(+), 20 deletions(-) delete mode 100644 runtime/queries/cabal/highlights.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 0dbbd69d2530..d5cf8f2db5f9 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -9,7 +9,7 @@ | bicep | ✓ | | | `bicep-langserver` | | c | ✓ | ✓ | ✓ | `clangd` | | c-sharp | ✓ | ✓ | | `OmniSharp` | -| cabal | ✓ | | | | +| cabal | | | | | | cairo | ✓ | | | | | capnp | ✓ | | ✓ | | | clojure | ✓ | | | `clojure-lsp` | diff --git a/languages.toml b/languages.toml index ea992394b0cc..175037be0878 100644 --- a/languages.toml +++ b/languages.toml @@ -2467,10 +2467,6 @@ roots = ["cabal.project", "Setup.hs"] indent = { tab-width = 2, unit = " " } comment-token = "--" -[[grammar]] -name = "cabal" -source = { git = "https://gitlab.com/magus/tree-sitter-cabal", rev = "7d5fa6887ae05a0b06d046f1e754c197c8ad869b" } - [[language]] name = "hurl" scope = "source.hurl" diff --git a/runtime/queries/cabal/highlights.scm b/runtime/queries/cabal/highlights.scm deleted file mode 100644 index d6b9f4627054..000000000000 --- a/runtime/queries/cabal/highlights.scm +++ /dev/null @@ -1,15 +0,0 @@ -(comment) @comment - -[ - "cabal-version" - (field_name) -] @type - -(section_name) @type - -[ - (section_type) - "if" - "elseif" - "else" -] @keyword From 92c328c088ae818338237d7f11644ba079c54648 Mon Sep 17 00:00:00 2001 From: Kitsu Date: Tue, 9 May 2023 17:21:29 -0300 Subject: [PATCH 010/571] Add wbc and wbc! commands (#6947) --- book/src/generated/typable-cmd.md | 4 ++- helix-term/src/commands/typed.rs | 46 ++++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index ae28a9ba02c2..a775c655589c 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -12,7 +12,9 @@ | `:buffer-next`, `:bn`, `:bnext` | Goto next buffer. | | `:buffer-previous`, `:bp`, `:bprev` | Goto previous buffer. | | `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) | -| `:write!`, `:w!` | Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write some/path.txt) | +| `:write!`, `:w!` | Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write! some/path.txt) | +| `:write-buffer-close`, `:wbc` | Write changes to disk and closes the buffer. Accepts an optional path (:write-buffer-close some/path.txt) | +| `:write-buffer-close!`, `:wbc!` | Force write changes to disk creating necessary subdirectories and closes the buffer. Accepts an optional path (:write-buffer-close! some/path.txt) | | `:new`, `:n` | Create a new scratch buffer. | | `:format`, `:fmt` | Format the file using the LSP formatter. | | `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) | diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index fe92798baae6..16ee83d76568 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -382,6 +382,36 @@ fn force_write( write_impl(cx, args.first(), true) } +fn write_buffer_close( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + write_impl(cx, args.first(), false)?; + + let document_ids = buffer_gather_paths_impl(cx.editor, args); + buffer_close_by_ids_impl(cx, &document_ids, false) +} + +fn force_write_buffer_close( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + write_impl(cx, args.first(), true)?; + + let document_ids = buffer_gather_paths_impl(cx.editor, args); + buffer_close_by_ids_impl(cx, &document_ids, false) +} + fn new_file( cx: &mut compositor::Context, _args: &[Cow], @@ -2287,10 +2317,24 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "write!", aliases: &["w!"], - doc: "Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write some/path.txt)", + doc: "Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write! some/path.txt)", fun: force_write, signature: CommandSignature::positional(&[completers::filename]), }, + TypableCommand { + name: "write-buffer-close", + aliases: &["wbc"], + doc: "Write changes to disk and closes the buffer. Accepts an optional path (:write-buffer-close some/path.txt)", + fun: write_buffer_close, + signature: CommandSignature::positional(&[completers::filename]), + }, + TypableCommand { + name: "write-buffer-close!", + aliases: &["wbc!"], + doc: "Force write changes to disk creating necessary subdirectories and closes the buffer. Accepts an optional path (:write-buffer-close! some/path.txt)", + fun: force_write_buffer_close, + signature: CommandSignature::positional(&[completers::filename]), + }, TypableCommand { name: "new", aliases: &["n"], From 87da58578fdcacfe1d2de3d96bf47621c47c15c0 Mon Sep 17 00:00:00 2001 From: John Kelly Date: Wed, 10 May 2023 20:35:43 +0100 Subject: [PATCH 011/571] Add better error message for file save failure due to parent not existing (#7014) Fixes #5229 --- helix-view/src/document.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 11b9ef0e620e..27f69d508c6e 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -808,7 +808,7 @@ impl Document { if force { std::fs::DirBuilder::new().recursive(true).create(parent)?; } else { - bail!("can't save file, parent directory does not exist"); + bail!("can't save file, parent directory does not exist (use :w! to create it)"); } } } From 1e5997dc98ecd82b09ccee9fbe8d5350fd333fad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bla=C5=BE=20Hrastnik?= Date: Thu, 11 May 2023 11:30:46 +0900 Subject: [PATCH 012/571] nix: Update flake dependencies --- flake.lock | 85 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 52 insertions(+), 33 deletions(-) diff --git a/flake.lock b/flake.lock index d33c404ef39a..8046f35907c8 100644 --- a/flake.lock +++ b/flake.lock @@ -3,15 +3,16 @@ "crane": { "flake": false, "locked": { - "lastModified": 1670900067, - "narHash": "sha256-VXVa+KBfukhmWizaiGiHRVX/fuk66P8dgSFfkVN4/MY=", + "lastModified": 1681175776, + "narHash": "sha256-7SsUy9114fryHAZ8p1L6G6YSu7jjz55FddEwa2U8XZc=", "owner": "ipetkov", "repo": "crane", - "rev": "59b31b41a589c0a65e4a1f86b0e5eac68081468b", + "rev": "445a3d222947632b5593112bb817850e8a9cf737", "type": "github" }, "original": { "owner": "ipetkov", + "ref": "v0.12.1", "repo": "crane", "type": "github" } @@ -62,11 +63,11 @@ ] }, "locked": { - "lastModified": 1680258209, - "narHash": "sha256-lEo50RXI/17/a9aCIun8Hz62ZJ5JM5RGeTgclIP+Lgc=", + "lastModified": 1683212002, + "narHash": "sha256-EObtqyQsv9v+inieRY5cvyCMCUI5zuU5qu+1axlJCPM=", "owner": "nix-community", "repo": "dream2nix", - "rev": "6f512b5a220fdb26bd3c659f7b55e4f052ec8b35", + "rev": "fbfb09d2ab5ff761d822dd40b4a1def81651d096", "type": "github" }, "original": { @@ -94,11 +95,11 @@ ] }, "locked": { - "lastModified": 1680172861, - "narHash": "sha256-QMyI338xRxaHFDlCXdLCtgelGQX2PdlagZALky4ZXJ8=", + "lastModified": 1680698112, + "narHash": "sha256-FgnobN/DvCjEsc0UAZEAdPLkL4IZi2ZMnu2K2bUaElc=", "owner": "davhau", "repo": "drv-parts", - "rev": "ced8a52f62b0a94244713df2225c05c85b416110", + "rev": "e8c2ec1157dc1edb002989669a0dbd935f430201", "type": "github" }, "original": { @@ -124,12 +125,15 @@ } }, "flake-utils": { + "inputs": { + "systems": "systems" + }, "locked": { - "lastModified": 1659877975, - "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", + "lastModified": 1681202837, + "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", "owner": "numtide", "repo": "flake-utils", - "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", + "rev": "cfacdce06f30d2b68473a46042957675eebb3401", "type": "github" }, "original": { @@ -141,11 +145,11 @@ "mk-naked-shell": { "flake": false, "locked": { - "lastModified": 1676572903, - "narHash": "sha256-oQoDHHUTxNVSURfkFcYLuAK+btjs30T4rbEUtCUyKy8=", + "lastModified": 1681286841, + "narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=", "owner": "yusdacra", "repo": "mk-naked-shell", - "rev": "aeca9f8aa592f5e8f71f407d081cb26fd30c5a57", + "rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd", "type": "github" }, "original": { @@ -167,11 +171,11 @@ ] }, "locked": { - "lastModified": 1680329418, - "narHash": "sha256-+KN0eQLSZvL1J0kDO8/fxv0UCHTyZCADLmpIfeeiSGo=", + "lastModified": 1683699050, + "narHash": "sha256-UWKQpzVcSshB+sU2O8CCHjOSTQrNS7Kk9V3+UeBsJpg=", "owner": "yusdacra", "repo": "nix-cargo-integration", - "rev": "98c1d2ff5155f0fee5d290f6b982cb990839d540", + "rev": "ed27173cd1b223f598343ea3c15aacb1d140feac", "type": "github" }, "original": { @@ -182,11 +186,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1680213900, - "narHash": "sha256-cIDr5WZIj3EkKyCgj/6j3HBH4Jj1W296z7HTcWj1aMA=", + "lastModified": 1683408522, + "narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=", "owner": "nixos", "repo": "nixpkgs", - "rev": "e3652e0735fbec227f342712f180f4f21f0594f2", + "rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7", "type": "github" }, "original": { @@ -199,11 +203,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1678375444, - "narHash": "sha256-XIgHfGvjFvZQ8hrkfocanCDxMefc/77rXeHvYdzBMc8=", + "lastModified": 1682879489, + "narHash": "sha256-sASwo8gBt7JDnOOstnps90K1wxmVfyhsTPPNTGBPjjg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "130fa0baaa2b93ec45523fdcde942f6844ee9f6e", + "rev": "da45bf6ec7bbcc5d1e14d3795c025199f28e0de0", "type": "github" }, "original": { @@ -237,11 +241,11 @@ ] }, "locked": { - "lastModified": 1679737941, - "narHash": "sha256-srSD9CwsVPnUMsIZ7Kt/UegkKUEBcTyU1Rev7mO45S0=", + "lastModified": 1683560683, + "narHash": "sha256-XAygPMN5Xnk/W2c1aW0jyEa6lfMDZWlQgiNtmHXytPc=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "3502ee99d6dade045bdeaf7b0cd8ec703484c25c", + "rev": "006c75898cf814ef9497252b022e91c946ba8e17", "type": "github" }, "original": { @@ -255,11 +259,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1679737941, - "narHash": "sha256-srSD9CwsVPnUMsIZ7Kt/UegkKUEBcTyU1Rev7mO45S0=", + "lastModified": 1683560683, + "narHash": "sha256-XAygPMN5Xnk/W2c1aW0jyEa6lfMDZWlQgiNtmHXytPc=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "3502ee99d6dade045bdeaf7b0cd8ec703484c25c", + "rev": "006c75898cf814ef9497252b022e91c946ba8e17", "type": "github" }, "original": { @@ -284,11 +288,11 @@ ] }, "locked": { - "lastModified": 1680315536, - "narHash": "sha256-0AsBuKssJMbcRcw4HJQwJsUHhZxR5+gaf6xPQayhR44=", + "lastModified": 1683771545, + "narHash": "sha256-we0GYcKTo2jRQGmUGrzQ9VH0OYAUsJMCsK8UkF+vZUA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "5c8c151bdd639074a0051325c16df1a64ee23497", + "rev": "c57e210faf68e5d5386f18f1b17ad8365d25e4ed", "type": "github" }, "original": { @@ -296,6 +300,21 @@ "repo": "rust-overlay", "type": "github" } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } } }, "root": "root", From 3b8c15618f51889ffd2f2f4be32f8404c1517956 Mon Sep 17 00:00:00 2001 From: ZJPzjp Date: Thu, 11 May 2023 13:44:52 +0800 Subject: [PATCH 013/571] Fix warnings from clippy (#7013) * Fix warnings from clippy * revert MAIN_SEPARATOR_STR --- helix-core/src/surround.rs | 7 +------ helix-core/src/syntax.rs | 9 ++++++--- helix-loader/src/lib.rs | 36 ++++++++++++++++++------------------ helix-term/src/ui/editor.rs | 2 +- helix-tui/src/buffer.rs | 2 +- helix-view/src/clipboard.rs | 2 +- helix-view/src/tree.rs | 3 +-- 7 files changed, 29 insertions(+), 32 deletions(-) diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index f430aee8a152..b96cce5a0664 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -397,15 +397,10 @@ mod test { let selections: SmallVec<[Range; 1]> = spec .match_indices('^') - .into_iter() .map(|(i, _)| Range::point(i)) .collect(); - let expectations: Vec = spec - .match_indices('_') - .into_iter() - .map(|(i, _)| i) - .collect(); + let expectations: Vec = spec.match_indices('_').map(|(i, _)| i).collect(); (rope, Selection::new(selections, 0), expectations) } diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 6514b40f5966..005e985de021 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -187,9 +187,12 @@ impl<'de> Deserialize<'de> for FileType { M: serde::de::MapAccess<'de>, { match map.next_entry::()? { - Some((key, suffix)) if key == "suffix" => Ok(FileType::Suffix( - suffix.replace('/', &std::path::MAIN_SEPARATOR.to_string()), - )), + Some((key, suffix)) if key == "suffix" => Ok(FileType::Suffix({ + // FIXME: use `suffix.replace('/', std::path::MAIN_SEPARATOR_STR)` + // if MSRV is updated to 1.68 + let mut seperator = [0; 1]; + suffix.replace('/', std::path::MAIN_SEPARATOR.encode_utf8(&mut seperator)) + })), Some((key, _value)) => Err(serde::de::Error::custom(format!( "unknown key in `file-types` list: {}", key diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 6c7169758df0..ad4ad899db67 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -209,6 +209,24 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi } } +/// Finds the current workspace folder. +/// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root +/// +/// This function starts searching the FS upward from the CWD +/// and returns the first directory that contains either `.git` or `.helix`. +/// If no workspace was found returns (CWD, true). +/// Otherwise (workspace, false) is returned +pub fn find_workspace() -> (PathBuf, bool) { + let current_dir = std::env::current_dir().expect("unable to determine current directory"); + for ancestor in current_dir.ancestors() { + if ancestor.join(".git").exists() || ancestor.join(".helix").exists() { + return (ancestor.to_owned(), false); + } + } + + (current_dir, true) +} + #[cfg(test)] mod merge_toml_tests { use std::str; @@ -281,21 +299,3 @@ mod merge_toml_tests { ) } } - -/// Finds the current workspace folder. -/// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root -/// -/// This function starts searching the FS upward from the CWD -/// and returns the first directory that contains either `.git` or `.helix`. -/// If no workspace was found returns (CWD, true). -/// Otherwise (workspace, false) is returned -pub fn find_workspace() -> (PathBuf, bool) { - let current_dir = std::env::current_dir().expect("unable to determine current directory"); - for ancestor in current_dir.ancestors() { - if ancestor.join(".git").exists() || ancestor.join(".helix").exists() { - return (ancestor.to_owned(), false); - } - } - - (current_dir, true) -} diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index fd8e8fb21b47..1ecbc8cc1c89 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -103,7 +103,7 @@ impl EditorView { // Set DAP highlights, if needed. if let Some(frame) = editor.current_stack_frame() { - let dap_line = frame.line.saturating_sub(1) as usize; + let dap_line = frame.line.saturating_sub(1); let style = theme.get("ui.highlight.frameline"); let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { if pos.doc_line != dap_line { diff --git a/helix-tui/src/buffer.rs b/helix-tui/src/buffer.rs index 8e0b0adf9f43..93e9fcf9b2c6 100644 --- a/helix-tui/src/buffer.rs +++ b/helix-tui/src/buffer.rs @@ -442,7 +442,7 @@ impl Buffer { let mut x_offset = x as usize; let max_offset = min(self.area.right(), width.saturating_add(x)); let mut start_index = self.index_of(x, y); - let mut index = self.index_of(max_offset as u16, y); + let mut index = self.index_of(max_offset, y); let content_width = spans.width(); let truncated = content_width > width as usize; diff --git a/helix-view/src/clipboard.rs b/helix-view/src/clipboard.rs index d43d632a9be7..d639902f7e99 100644 --- a/helix-view/src/clipboard.rs +++ b/helix-view/src/clipboard.rs @@ -68,7 +68,7 @@ macro_rules! command_provider { #[cfg(windows)] pub fn get_clipboard_provider() -> Box { - Box::new(provider::WindowsProvider::default()) + Box::::default() } #[cfg(target_os = "macos")] diff --git a/helix-view/src/tree.rs b/helix-view/src/tree.rs index e8afd2045685..4c9eba0fd125 100644 --- a/helix-view/src/tree.rs +++ b/helix-view/src/tree.rs @@ -728,12 +728,11 @@ mod test { tree.focus = l0; let view = View::new(DocumentId::default(), GutterConfig::default()); tree.split(view, Layout::Vertical); - let l2 = tree.focus; // Tree in test // | L0 | L2 | | // | L1 | R0 | - tree.focus = l2; + let l2 = tree.focus; assert_eq!(Some(l0), tree.find_split_in_direction(l2, Direction::Left)); assert_eq!(Some(l1), tree.find_split_in_direction(l2, Direction::Down)); assert_eq!(Some(r0), tree.find_split_in_direction(l2, Direction::Right)); From 5938e2c0dc9465229758d1c2997a4857555d34cc Mon Sep 17 00:00:00 2001 From: Vitalii Solodilov Date: Fri, 12 May 2023 04:29:06 +0300 Subject: [PATCH 014/571] fix: update upstream tree-sitter-dockerfile (#6895) * fix: update upstream tree-sitter-dockerfile Fixes: #6797 * fix: review * fix: review --- languages.toml | 2 +- runtime/queries/dockerfile/highlights.scm | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/languages.toml b/languages.toml index 175037be0878..8d86afe4ef81 100644 --- a/languages.toml +++ b/languages.toml @@ -1182,7 +1182,7 @@ language-server = { command = "docker-langserver", args = ["--stdio"] } [[grammar]] name = "dockerfile" -source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "7af32bc04a66ab196f5b9f92ac471f29372ae2ce" } +source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "8ee3a0f7587b2bd8c45c8cb7d28bd414604aec62" } [[language]] name = "git-commit" diff --git a/runtime/queries/dockerfile/highlights.scm b/runtime/queries/dockerfile/highlights.scm index 5a945fb9bf3d..cdbf807c2046 100644 --- a/runtime/queries/dockerfile/highlights.scm +++ b/runtime/queries/dockerfile/highlights.scm @@ -48,4 +48,7 @@ ((variable) @constant (#match? @constant "^[A-Z][A-Z_0-9]*$")) - +[ + (param) + (mount_param) +] @constant From 06d7f5d100fdcc99f4cdfda879898b2d488d8d7c Mon Sep 17 00:00:00 2001 From: lefp <70862148+lefp@users.noreply.github.com> Date: Fri, 12 May 2023 10:53:07 -0400 Subject: [PATCH 015/571] Add comment injections for Odin (#7027) --- runtime/queries/odin/injections.scm | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 runtime/queries/odin/injections.scm diff --git a/runtime/queries/odin/injections.scm b/runtime/queries/odin/injections.scm new file mode 100644 index 000000000000..2f0e58eb6431 --- /dev/null +++ b/runtime/queries/odin/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) From d5fe08ddb8bf1408a1a92d39a63b51cd16d83255 Mon Sep 17 00:00:00 2001 From: A-Walrus <58790821+A-Walrus@users.noreply.github.com> Date: Sat, 13 May 2023 18:44:25 +0300 Subject: [PATCH 016/571] Replace DAP vars popup, instead of adding new (#7034) --- helix-term/src/commands.rs | 7 +++++++ helix-term/src/commands/dap.rs | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 882a8a1dcc4a..7d86bc0b33bf 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -97,6 +97,13 @@ impl<'a> Context<'a> { })); } + /// Call `replace_or_push` on the Compositor + pub fn replace_or_push_layer(&mut self, id: &'static str, component: T) { + self.callback = Some(Box::new(move |compositor: &mut Compositor, _| { + compositor.replace_or_push(id, component); + })); + } + #[inline] pub fn on_next_key( &mut self, diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs index 8efdc9cfa516..84794bedfce9 100644 --- a/helix-term/src/commands/dap.rs +++ b/helix-term/src/commands/dap.rs @@ -580,7 +580,7 @@ pub fn dap_variables(cx: &mut Context) { let contents = Text::from(tui::text::Text::from(variables)); let popup = Popup::new("dap-variables", contents); - cx.push_layer(Box::new(popup)); + cx.replace_or_push_layer("dap-variables", popup); } pub fn dap_terminate(cx: &mut Context) { From b50032ee8540ca68dfc96a71923a27145b305405 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 10:26:05 +0900 Subject: [PATCH 017/571] build(deps): bump tokio from 1.28.0 to 1.28.1 (#7057) Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.28.0 to 1.28.1. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.28.0...tokio-1.28.1) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fecc6124ebe8..c86f4010bfbc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2120,9 +2120,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.0" +version = "1.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +checksum = "0aa32867d44e6f2ce3385e89dceb990188b8bb0fb25b0cf576647a6f98ac5105" dependencies = [ "autocfg", "bytes", From 4b9dba8217083f501bdbd2de1c5632e67145a78a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 10:26:16 +0900 Subject: [PATCH 018/571] build(deps): bump serde from 1.0.162 to 1.0.163 (#7056) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.162 to 1.0.163. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.162...v1.0.163) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c86f4010bfbc..e84dd6ba7373 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1805,18 +1805,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "serde" -version = "1.0.162" +version = "1.0.163" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" +checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.162" +version = "1.0.163" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" +checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" dependencies = [ "proc-macro2", "quote", From e9efcebdb0437fde28aec95f2a790cc4cf73f0b4 Mon Sep 17 00:00:00 2001 From: gibbz00 Date: Tue, 16 May 2023 06:01:27 +0200 Subject: [PATCH 019/571] languages.toml: recognize `geojson` files. (#7054) --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 8d86afe4ef81..019c9d668c3a 100644 --- a/languages.toml +++ b/languages.toml @@ -160,7 +160,7 @@ indent = { tab-width = 2, unit = " " } name = "json" scope = "source.json" injection-regex = "json" -file-types = ["json", "jsonc", "arb", "ipynb"] +file-types = ["json", "jsonc", "arb", "ipynb", "geojson"] roots = [] language-server = { command = "vscode-json-language-server", args = ["--stdio"] } auto-format = true From 6f135e58a3e2897dc6566c26e3cea952f09380aa Mon Sep 17 00:00:00 2001 From: gibbz00 Date: Tue, 16 May 2023 06:02:22 +0200 Subject: [PATCH 020/571] languages.toml: recognize `gml` files. (#7055) --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 019c9d668c3a..6516cc37d773 100644 --- a/languages.toml +++ b/languages.toml @@ -2085,7 +2085,7 @@ source = { git = "https://github.com/Unoqwy/tree-sitter-kdl", rev = "e1cd292c6d1 name = "xml" scope = "source.xml" injection-regex = "xml" -file-types = ["xml", "mobileconfig", "plist", "xib", "storyboard", "svg", "xsd"] +file-types = ["xml", "mobileconfig", "plist", "xib", "storyboard", "svg", "xsd", "gml"] indent = { tab-width = 2, unit = " " } roots = [] From 91da0dc172dde1a972be7708188a134db70562c3 Mon Sep 17 00:00:00 2001 From: Jan Scheer Date: Thu, 18 May 2023 03:16:25 +0200 Subject: [PATCH 021/571] Update nightfox theme (#7061) * theme: nightfox - fix subselection highlighting This fixes an issue with subselect highlighting on the same line as reported here: https://github.com/helix-editor/helix/discussions/5158 * theme: nightfox - update bufferline colors This uses `ui.bufferline` to make it easier to distinguish between (in-)active tabs/buffers. --- runtime/themes/nightfox.toml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/runtime/themes/nightfox.toml b/runtime/themes/nightfox.toml index fad56d189570..069b32ab4d33 100644 --- a/runtime/themes/nightfox.toml +++ b/runtime/themes/nightfox.toml @@ -19,8 +19,8 @@ "ui.cursor.primary" = { bg = "fg1", fg = "bg1" } # The primary cursor when there are multiple (shift-c). "ui.cursor.match" = { fg = "yellow", modifiers = ["bold"] } # The matching parentheses of that under the cursor. -"ui.selection" = { bg = "bg3" } # All currently selected text. -"ui.selection.primary" = { bg = "bg4" } # The primary selection when there are multiple. +"ui.selection" = { bg = "bg4" } # All currently selected text. +"ui.selection.primary" = { bg = "sel1" } # The primary selection when there are multiple. "ui.cursorline.primary" = { bg = "bg3" } # The line of the primary cursor (if cursorline is enabled) # "ui.cursorline.secondary" = { } # The lines of any other cursors (if cursorline is enabled) # "ui.cursorcolumn.primary" = { } # The column of the primary cursor (if cursorcolumn is enabled) @@ -41,6 +41,10 @@ "ui.statusline.insert" = { bg = "green", fg = "bg0", modifiers = ["bold"] } # Statusline mode during insert mode (only if editor.color-modes is enabled) "ui.statusline.select" = { bg = "magenta", fg = "bg0", modifiers = ["bold"] } # Statusline mode during select mode (only if editor.color-modes is enabled) +"ui.bufferline" = { fg = "fg3", bg = "bg2", underline = { style = "line" } } +"ui.bufferline.active" = { fg = "fg2", bg = "bg4" } +"ui.bufferline.background" = { bg = "bg0" } + "ui.help" = { bg = "sel0", fg = "fg1" } # Description box for commands. "ui.menu" = { bg = "sel0", fg = "fg1" } # Code and command completion menus. From 5406e9f629313221c8ae97583393dfd6221d3dfc Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Tue, 4 Apr 2023 23:34:47 +0200 Subject: [PATCH 022/571] correctly handle completion rerequest --- helix-term/src/commands.rs | 9 +++-- helix-term/src/ui/completion.rs | 46 ++++++++++++++++---------- helix-term/src/ui/editor.rs | 57 +++++++++++++++++++++++--------- helix-view/src/document.rs | 58 +++++++++++++++++++++++---------- helix-view/src/editor.rs | 13 +++++--- 5 files changed, 127 insertions(+), 56 deletions(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 7d86bc0b33bf..8d70cd9e5437 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -33,7 +33,7 @@ use helix_core::{ use helix_view::{ clipboard::ClipboardType, document::{FormatterError, Mode, SCRATCH_BUFFER_NAME}, - editor::{Action, Motion}, + editor::{Action, CompleteAction, Motion}, info::Info, input::KeyEvent, keyboard::KeyCode, @@ -4254,7 +4254,12 @@ pub fn completion(cx: &mut Context) { iter.reverse(); let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count(); let start_offset = cursor.saturating_sub(offset); - let savepoint = doc.savepoint(view); + let savepoint = if let Some(CompleteAction::Selected { savepoint }) = &cx.editor.last_completion + { + savepoint.clone() + } else { + doc.savepoint(view) + }; let trigger_doc = doc.id(); let trigger_view = view.id; diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index bc216509f3ca..dd21be03f7c3 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -209,14 +209,27 @@ impl Completion { let (view, doc) = current!(editor); - // if more text was entered, remove it - doc.restore(view, &savepoint); - match event { - PromptEvent::Abort => { - editor.last_completion = None; - } + PromptEvent::Abort => {} PromptEvent::Update => { + // Update creates "ghost" transactiosn which are not send to the + // lsp server to avoid messing up rerequesting completions. Once a + // completion has been selected (with) tab it's always accepted whenever anything + // is typed. The only way to avoid that is to explicitly abort the completion + // with esc/c-c. This will remove the "ghost" transaction. + // + // The ghost transaction is modeled with a transaction that is not send to the LS. + // (apply_temporary) and a savepoint. It's extremly important this savepoint is restored + // (also without sending the transaction to the LS) *before any further transaction is applied*. + // Otherwise incremental sync breaks (since the state of the LS doesn't match the state the transaction + // is applied to). + if editor.last_completion.is_none() { + editor.last_completion = Some(CompleteAction::Selected { + savepoint: doc.savepoint(view), + }) + } + // if more text was entered, remove it + doc.restore(view, &savepoint, false); // always present here let item = item.unwrap(); @@ -229,19 +242,20 @@ impl Completion { true, replace_mode, ); - - // initialize a savepoint - doc.apply(&transaction, view.id); - - editor.last_completion = Some(CompleteAction { - trigger_offset, - changes: completion_changes(&transaction, trigger_offset), - }); + doc.apply_temporary(&transaction, view.id); } PromptEvent::Validate => { + if let Some(CompleteAction::Selected { savepoint }) = + editor.last_completion.take() + { + doc.restore(view, &savepoint, false); + } // always present here let item = item.unwrap(); + + // if more text was entered, remove it + doc.restore(view, &savepoint, true); let transaction = item_to_transaction( doc, view.id, @@ -251,10 +265,9 @@ impl Completion { false, replace_mode, ); - doc.apply(&transaction, view.id); - editor.last_completion = Some(CompleteAction { + editor.last_completion = Some(CompleteAction::Applied { trigger_offset, changes: completion_changes(&transaction, trigger_offset), }); @@ -270,7 +283,6 @@ impl Completion { } else { Self::resolve_completion_item(doc, item.clone()) }; - if let Some(additional_edits) = resolved_item .as_ref() .and_then(|item| item.additional_text_edits.as_ref()) diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 1ecbc8cc1c89..f0989fa887ab 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -19,7 +19,7 @@ use helix_core::{ syntax::{self, HighlightEvent}, text_annotations::TextAnnotations, unicode::width::UnicodeWidthStr, - visual_offset_from_block, Position, Range, Selection, Transaction, + visual_offset_from_block, Change, Position, Range, Selection, Transaction, }; use helix_view::{ document::{Mode, SavePoint, SCRATCH_BUFFER_NAME}, @@ -48,7 +48,10 @@ pub struct EditorView { #[derive(Debug, Clone)] pub enum InsertEvent { Key(KeyEvent), - CompletionApply(CompleteAction), + CompletionApply { + trigger_offset: usize, + changes: Vec, + }, TriggerCompletion, RequestCompletion, } @@ -813,7 +816,7 @@ impl EditorView { } (Mode::Insert, Mode::Normal) => { // if exiting insert mode, remove completion - self.completion = None; + self.clear_completion(cxt.editor); cxt.editor.completion_request_handle = None; // TODO: Use an on_mode_change hook to remove signature help @@ -891,22 +894,25 @@ impl EditorView { for key in self.last_insert.1.clone() { match key { InsertEvent::Key(key) => self.insert_mode(cxt, key), - InsertEvent::CompletionApply(compl) => { + InsertEvent::CompletionApply { + trigger_offset, + changes, + } => { let (view, doc) = current!(cxt.editor); if let Some(last_savepoint) = last_savepoint.as_deref() { - doc.restore(view, last_savepoint); + doc.restore(view, last_savepoint, true); } let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); let shift_position = - |pos: usize| -> usize { pos + cursor - compl.trigger_offset }; + |pos: usize| -> usize { pos + cursor - trigger_offset }; let tx = Transaction::change( doc.text(), - compl.changes.iter().cloned().map(|(start, end, t)| { + changes.iter().cloned().map(|(start, end, t)| { (shift_position(start), shift_position(end), t) }), ); @@ -979,6 +985,21 @@ impl EditorView { pub fn clear_completion(&mut self, editor: &mut Editor) { self.completion = None; + if let Some(last_completion) = editor.last_completion.take() { + match last_completion { + CompleteAction::Applied { + trigger_offset, + changes, + } => self.last_insert.1.push(InsertEvent::CompletionApply { + trigger_offset, + changes, + }), + CompleteAction::Selected { savepoint } => { + let (view, doc) = current!(editor); + doc.restore(view, &savepoint, false); + } + } + } // Clear any savepoints editor.clear_idle_timer(); // don't retrigger @@ -1265,12 +1286,22 @@ impl Component for EditorView { jobs: cx.jobs, scroll: None, }; - completion.handle_event(event, &mut cx) - }; - if let EventResult::Consumed(callback) = res { - consumed = true; + if let EventResult::Consumed(callback) = + completion.handle_event(event, &mut cx) + { + consumed = true; + Some(callback) + } else if let EventResult::Consumed(callback) = + completion.handle_event(&Event::Key(key!(Enter)), &mut cx) + { + Some(callback) + } else { + None + } + }; + if let Some(callback) = res { if callback.is_some() { // assume close_fn self.clear_completion(cx.editor); @@ -1286,10 +1317,6 @@ impl Component for EditorView { // if completion didn't take the event, we pass it onto commands if !consumed { - if let Some(compl) = cx.editor.last_completion.take() { - self.last_insert.1.push(InsertEvent::CompletionApply(compl)); - } - self.insert_mode(&mut cx, key); // record last_insert key diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 27f69d508c6e..4948befddb96 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1034,7 +1034,12 @@ impl Document { } /// Apply a [`Transaction`] to the [`Document`] to change its text. - fn apply_impl(&mut self, transaction: &Transaction, view_id: ViewId) -> bool { + fn apply_impl( + &mut self, + transaction: &Transaction, + view_id: ViewId, + emit_lsp_notification: bool, + ) -> bool { use helix_core::Assoc; let old_doc = self.text().clone(); @@ -1130,25 +1135,31 @@ impl Document { apply_inlay_hint_changes(padding_after_inlay_hints); } - // emit lsp notification - if let Some(language_server) = self.language_server() { - let notify = language_server.text_document_did_change( - self.versioned_identifier(), - &old_doc, - self.text(), - changes, - ); + if emit_lsp_notification { + // emit lsp notification + if let Some(language_server) = self.language_server() { + let notify = language_server.text_document_did_change( + self.versioned_identifier(), + &old_doc, + self.text(), + changes, + ); - if let Some(notify) = notify { - tokio::spawn(notify); + if let Some(notify) = notify { + tokio::spawn(notify); + } } } } success } - /// Apply a [`Transaction`] to the [`Document`] to change its text. - pub fn apply(&mut self, transaction: &Transaction, view_id: ViewId) -> bool { + fn apply_inner( + &mut self, + transaction: &Transaction, + view_id: ViewId, + emit_lsp_notification: bool, + ) -> bool { // store the state just before any changes are made. This allows us to undo to the // state just before a transaction was applied. if self.changes.is_empty() && !transaction.changes().is_empty() { @@ -1158,7 +1169,7 @@ impl Document { }); } - let success = self.apply_impl(transaction, view_id); + let success = self.apply_impl(transaction, view_id, emit_lsp_notification); if !transaction.changes().is_empty() { // Compose this transaction with the previous one @@ -1168,12 +1179,23 @@ impl Document { } success } + /// Apply a [`Transaction`] to the [`Document`] to change its text. + pub fn apply(&mut self, transaction: &Transaction, view_id: ViewId) -> bool { + self.apply_inner(transaction, view_id, true) + } + + /// Apply a [`Transaction`] to the [`Document`] to change its text. + /// without notifying the language servers. This is useful for temporary transactions + /// that must not influence the server. + pub fn apply_temporary(&mut self, transaction: &Transaction, view_id: ViewId) -> bool { + self.apply_inner(transaction, view_id, false) + } fn undo_redo_impl(&mut self, view: &mut View, undo: bool) -> bool { let mut history = self.history.take(); let txn = if undo { history.undo() } else { history.redo() }; let success = if let Some(txn) = txn { - self.apply_impl(txn, view.id) + self.apply_impl(txn, view.id, true) } else { false }; @@ -1213,7 +1235,7 @@ impl Document { savepoint } - pub fn restore(&mut self, view: &mut View, savepoint: &SavePoint) { + pub fn restore(&mut self, view: &mut View, savepoint: &SavePoint, emit_lsp_notification: bool) { assert_eq!( savepoint.view, view.id, "Savepoint must not be used with a different view!" @@ -1228,7 +1250,7 @@ impl Document { let savepoint_ref = self.savepoints.remove(savepoint_idx); let mut revert = savepoint.revert.lock(); - self.apply(&revert, view.id); + self.apply_inner(&revert, view.id, emit_lsp_notification); *revert = Transaction::new(self.text()).with_selection(self.selection(view.id).clone()); self.savepoints.push(savepoint_ref) } @@ -1241,7 +1263,7 @@ impl Document { }; let mut success = false; for txn in txns { - if self.apply_impl(&txn, view.id) { + if self.apply_impl(&txn, view.id, true) { success = true; } } diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 8e4dab414c99..43227c5f138d 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1,7 +1,7 @@ use crate::{ align_view, clipboard::{get_clipboard_provider, ClipboardProvider}, - document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode}, + document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode, SavePoint}, graphics::{CursorKind, Rect}, info::Info, input::KeyEvent, @@ -906,9 +906,14 @@ enum ThemeAction { } #[derive(Debug, Clone)] -pub struct CompleteAction { - pub trigger_offset: usize, - pub changes: Vec, +pub enum CompleteAction { + Applied { + trigger_offset: usize, + changes: Vec, + }, + /// A savepoint of the currently active completion. The completion + /// MUST be restored before sending any event to the LSP + Selected { savepoint: Arc }, } #[derive(Debug, Copy, Clone)] From 30ff7f8db29d5fc84191268b94ec971e7ca5da5d Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Tue, 4 Apr 2023 23:35:31 +0200 Subject: [PATCH 023/571] resolve completions before applying transactions --- helix-term/src/ui/completion.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index dd21be03f7c3..7ec4f5775725 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -253,6 +253,17 @@ impl Completion { // always present here let item = item.unwrap(); + // apply additional edits, mostly used to auto import unqualified types + let resolved_item = if item + .additional_text_edits + .as_ref() + .map(|edits| !edits.is_empty()) + .unwrap_or(false) + { + None + } else { + Self::resolve_completion_item(doc, item.clone()) + }; // if more text was entered, remove it doc.restore(view, &savepoint, true); @@ -272,17 +283,6 @@ impl Completion { changes: completion_changes(&transaction, trigger_offset), }); - // apply additional edits, mostly used to auto import unqualified types - let resolved_item = if item - .additional_text_edits - .as_ref() - .map(|edits| !edits.is_empty()) - .unwrap_or(false) - { - None - } else { - Self::resolve_completion_item(doc, item.clone()) - }; if let Some(additional_edits) = resolved_item .as_ref() .and_then(|item| item.additional_text_edits.as_ref()) From 9c558fc4705934097b5f20b100462fc1fa4f50e1 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Wed, 5 Apr 2023 01:38:17 +0200 Subject: [PATCH 024/571] ensure correct trigger/start completion offset When re requesting a completion that already has a selected item we reuse that selections savepoint. However, the selection has likely changed since that savepoint which requires us to use the selection from that savepoint --- helix-term/src/commands.rs | 19 ++++++++++--------- helix-view/src/document.rs | 14 ++++++++++++++ 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 8d70cd9e5437..80774ceae4cb 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -4210,16 +4210,23 @@ pub fn completion(cx: &mut Context) { let (view, doc) = current!(cx.editor); + let savepoint = if let Some(CompleteAction::Selected { savepoint }) = &cx.editor.last_completion + { + savepoint.clone() + } else { + doc.savepoint(view) + }; + let language_server = match doc.language_server() { Some(language_server) => language_server, None => return, }; let offset_encoding = language_server.offset_encoding(); - let text = doc.text().slice(..); - let cursor = doc.selection(view.id).primary().cursor(text); + let text = savepoint.text.clone(); + let cursor = savepoint.cursor(); - let pos = pos_to_lsp_pos(doc.text(), cursor, offset_encoding); + let pos = pos_to_lsp_pos(&text, cursor, offset_encoding); let future = match language_server.completion(doc.identifier(), pos, None) { Some(future) => future, @@ -4254,12 +4261,6 @@ pub fn completion(cx: &mut Context) { iter.reverse(); let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count(); let start_offset = cursor.saturating_sub(offset); - let savepoint = if let Some(CompleteAction::Selected { savepoint }) = &cx.editor.last_completion - { - savepoint.clone() - } else { - doc.savepoint(view) - }; let trigger_doc = doc.id(); let trigger_view = view.id; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 4948befddb96..e467efd39f8c 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -114,6 +114,19 @@ pub struct SavePoint { /// The view this savepoint is associated with pub view: ViewId, revert: Mutex, + pub text: Rope, +} + +impl SavePoint { + pub fn cursor(&self) -> usize { + // we always create transactions with selections + self.revert + .lock() + .selection() + .unwrap() + .primary() + .cursor(self.text.slice(..)) + } } pub struct Document { @@ -1230,6 +1243,7 @@ impl Document { let savepoint = Arc::new(SavePoint { view: view.id, revert: Mutex::new(revert), + text: self.text.clone(), }); self.savepoints.push(Arc::downgrade(&savepoint)); savepoint From bcb8c3d34d87d97d01d37a2dda839197f2a375d8 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Wed, 5 Apr 2023 20:24:49 +0200 Subject: [PATCH 025/571] deduplicate savepoints --- helix-view/src/document.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index e467efd39f8c..4d8e61e1db44 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1240,6 +1240,22 @@ impl Document { /// the state it had when this function was called. pub fn savepoint(&mut self, view: &View) -> Arc { let revert = Transaction::new(self.text()).with_selection(self.selection(view.id).clone()); + // check if there is already an existing (identical) savepoint around + if let Some(savepoint) = self + .savepoints + .iter() + .rev() + .find_map(|savepoint| savepoint.upgrade()) + { + let transaction = savepoint.revert.lock(); + if savepoint.view == view.id + && transaction.changes().is_empty() + && transaction.selection() == revert.selection() + { + drop(transaction); + return savepoint; + } + } let savepoint = Arc::new(SavePoint { view: view.id, revert: Mutex::new(revert), From 28b730381cc93d06e5a090b8734505edbebb1af4 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Wed, 5 Apr 2023 20:25:28 +0200 Subject: [PATCH 026/571] only resolve completion items once --- helix-term/src/ui/completion.rs | 97 +++++++++++++++++++-------------- 1 file changed, 55 insertions(+), 42 deletions(-) diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 7ec4f5775725..c736f043fd89 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -16,7 +16,6 @@ use crate::commands; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; use helix_lsp::{lsp, util}; -use lsp::CompletionItem; impl menu::Item for CompletionItem { type Data = (); @@ -26,28 +25,29 @@ impl menu::Item for CompletionItem { #[inline] fn filter_text(&self, _data: &Self::Data) -> Cow { - self.filter_text + self.item + .filter_text .as_ref() - .unwrap_or(&self.label) + .unwrap_or(&self.item.label) .as_str() .into() } fn format(&self, _data: &Self::Data) -> menu::Row { - let deprecated = self.deprecated.unwrap_or_default() - || self.tags.as_ref().map_or(false, |tags| { + let deprecated = self.item.deprecated.unwrap_or_default() + || self.item.tags.as_ref().map_or(false, |tags| { tags.contains(&lsp::CompletionItemTag::DEPRECATED) }); menu::Row::new(vec![ menu::Cell::from(Span::styled( - self.label.as_str(), + self.item.label.as_str(), if deprecated { Style::default().add_modifier(Modifier::CROSSED_OUT) } else { Style::default() }, )), - menu::Cell::from(match self.kind { + menu::Cell::from(match self.item.kind { Some(lsp::CompletionItemKind::TEXT) => "text", Some(lsp::CompletionItemKind::METHOD) => "method", Some(lsp::CompletionItemKind::FUNCTION) => "function", @@ -88,6 +88,12 @@ impl menu::Item for CompletionItem { } } +#[derive(Debug, PartialEq, Default, Clone)] +struct CompletionItem { + item: lsp::CompletionItem, + resolved: bool, +} + /// Wraps a Menu. pub struct Completion { popup: Popup>, @@ -103,7 +109,7 @@ impl Completion { pub fn new( editor: &Editor, savepoint: Arc, - mut items: Vec, + mut items: Vec, offset_encoding: helix_lsp::OffsetEncoding, start_offset: usize, trigger_offset: usize, @@ -111,6 +117,13 @@ impl Completion { let replace_mode = editor.config().completion_replace; // Sort completion items according to their preselect status (given by the LSP server) items.sort_by_key(|item| !item.preselect.unwrap_or(false)); + let items = items + .into_iter() + .map(|item| CompletionItem { + item, + resolved: false, + }) + .collect(); // Then create the menu let menu = Menu::new(items, (), move |editor: &mut Editor, item, event| { @@ -128,7 +141,7 @@ impl Completion { let text = doc.text().slice(..); let primary_cursor = selection.primary().cursor(text); - let (edit_offset, new_text) = if let Some(edit) = &item.text_edit { + let (edit_offset, new_text) = if let Some(edit) = &item.item.text_edit { let edit = match edit { lsp::CompletionTextEdit::Edit(edit) => edit.clone(), lsp::CompletionTextEdit::InsertAndReplace(item) => { @@ -151,9 +164,10 @@ impl Completion { (Some((start_offset, end_offset)), edit.new_text) } else { let new_text = item + .item .insert_text .clone() - .unwrap_or_else(|| item.label.clone()); + .unwrap_or_else(|| item.item.label.clone()); // check that we are still at the correct savepoint // we can still generate a transaction regardless but if the // document changed (and not just the selection) then we will @@ -162,9 +176,9 @@ impl Completion { (None, new_text) }; - if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET)) + if matches!(item.item.kind, Some(lsp::CompletionItemKind::SNIPPET)) || matches!( - item.insert_text_format, + item.item.insert_text_format, Some(lsp::InsertTextFormat::SNIPPET) ) { @@ -251,26 +265,22 @@ impl Completion { doc.restore(view, &savepoint, false); } // always present here - let item = item.unwrap(); - - // apply additional edits, mostly used to auto import unqualified types - let resolved_item = if item - .additional_text_edits - .as_ref() - .map(|edits| !edits.is_empty()) - .unwrap_or(false) - { - None - } else { - Self::resolve_completion_item(doc, item.clone()) + let mut item = item.unwrap().clone(); + + // resolve item if not yet resolved + if !item.resolved { + if let Some(resolved) = + Self::resolve_completion_item(doc, item.item.clone()) + { + item.item = resolved; + } }; - // if more text was entered, remove it doc.restore(view, &savepoint, true); let transaction = item_to_transaction( doc, view.id, - item, + &item, offset_encoding, trigger_offset, false, @@ -283,15 +293,12 @@ impl Completion { changes: completion_changes(&transaction, trigger_offset), }); - if let Some(additional_edits) = resolved_item - .as_ref() - .and_then(|item| item.additional_text_edits.as_ref()) - .or(item.additional_text_edits.as_ref()) - { + // TOOD: add additional _edits to completion_changes? + if let Some(additional_edits) = item.item.additional_text_edits { if !additional_edits.is_empty() { let transaction = util::generate_transaction_from_edits( doc.text(), - additional_edits.clone(), + additional_edits, offset_encoding, // TODO: should probably transcode in Client ); doc.apply(&transaction, view.id); @@ -318,7 +325,7 @@ impl Completion { fn resolve_completion_item( doc: &Document, completion_item: lsp::CompletionItem, - ) -> Option { + ) -> Option { let language_server = doc.language_server()?; let future = language_server.resolve_completion_item(completion_item)?; @@ -371,7 +378,7 @@ impl Completion { self.popup.contents().is_empty() } - fn replace_item(&mut self, old_item: lsp::CompletionItem, new_item: lsp::CompletionItem) { + fn replace_item(&mut self, old_item: CompletionItem, new_item: CompletionItem) { self.popup.contents_mut().replace_option(old_item, new_item); } @@ -387,7 +394,7 @@ impl Completion { // > The returned completion item should have the documentation property filled in. // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion let current_item = match self.popup.contents().selection() { - Some(item) if item.documentation.is_none() => item.clone(), + Some(item) if !item.resolved => item.clone(), _ => return false, }; @@ -397,7 +404,7 @@ impl Completion { }; // This method should not block the compositor so we handle the response asynchronously. - let future = match language_server.resolve_completion_item(current_item.clone()) { + let future = match language_server.resolve_completion_item(current_item.item.clone()) { Some(future) => future, None => return false, }; @@ -415,7 +422,13 @@ impl Completion { .unwrap() .completion { - completion.replace_item(current_item, resolved_item); + completion.replace_item( + current_item, + CompletionItem { + item: resolved_item, + resolved: true, + }, + ); } }, ); @@ -469,25 +482,25 @@ impl Component for Completion { Markdown::new(md, cx.editor.syn_loader.clone()) }; - let mut markdown_doc = match &option.documentation { + let mut markdown_doc = match &option.item.documentation { Some(lsp::Documentation::String(contents)) | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind: lsp::MarkupKind::PlainText, value: contents, })) => { // TODO: convert to wrapped text - markdowned(language, option.detail.as_deref(), Some(contents)) + markdowned(language, option.item.detail.as_deref(), Some(contents)) } Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind: lsp::MarkupKind::Markdown, value: contents, })) => { // TODO: set language based on doc scope - markdowned(language, option.detail.as_deref(), Some(contents)) + markdowned(language, option.item.detail.as_deref(), Some(contents)) } - None if option.detail.is_some() => { + None if option.item.detail.is_some() => { // TODO: set language based on doc scope - markdowned(language, option.detail.as_deref(), None) + markdowned(language, option.item.detail.as_deref(), None) } None => return, }; From 6842fd4c36c5855023b007a36b0b5c8bd965d8de Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Fri, 7 Apr 2023 14:50:47 +0200 Subject: [PATCH 027/571] clarify comments about completion savepoints Co-authored-by: Michael Davis --- helix-term/src/ui/completion.rs | 14 +++++++------- helix-view/src/document.rs | 2 +- helix-view/src/editor.rs | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index c736f043fd89..c5c405801f28 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -226,14 +226,14 @@ impl Completion { match event { PromptEvent::Abort => {} PromptEvent::Update => { - // Update creates "ghost" transactiosn which are not send to the - // lsp server to avoid messing up rerequesting completions. Once a - // completion has been selected (with) tab it's always accepted whenever anything + // Update creates "ghost" transactions which are not sent to the + // lsp server to avoid messing up re-requesting completions. Once a + // completion has been selected (with tab, c-n or c-p) it's always accepted whenever anything // is typed. The only way to avoid that is to explicitly abort the completion - // with esc/c-c. This will remove the "ghost" transaction. + // with c-c. This will remove the "ghost" transaction. // - // The ghost transaction is modeled with a transaction that is not send to the LS. - // (apply_temporary) and a savepoint. It's extremly important this savepoint is restored + // The ghost transaction is modeled with a transaction that is not sent to the LS. + // (apply_temporary) and a savepoint. It's extremely important this savepoint is restored // (also without sending the transaction to the LS) *before any further transaction is applied*. // Otherwise incremental sync breaks (since the state of the LS doesn't match the state the transaction // is applied to). @@ -293,7 +293,7 @@ impl Completion { changes: completion_changes(&transaction, trigger_offset), }); - // TOOD: add additional _edits to completion_changes? + // TODO: add additional _edits to completion_changes? if let Some(additional_edits) = item.item.additional_text_edits { if !additional_edits.is_empty() { let transaction = util::generate_transaction_from_edits( diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 4d8e61e1db44..e29e52cc5c69 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1197,7 +1197,7 @@ impl Document { self.apply_inner(transaction, view_id, true) } - /// Apply a [`Transaction`] to the [`Document`] to change its text. + /// Apply a [`Transaction`] to the [`Document`] to change its text /// without notifying the language servers. This is useful for temporary transactions /// that must not influence the server. pub fn apply_temporary(&mut self, transaction: &Transaction, view_id: ViewId) -> bool { diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 43227c5f138d..9546d460bf83 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -911,7 +911,7 @@ pub enum CompleteAction { trigger_offset: usize, changes: Vec, }, - /// A savepoint of the currently active completion. The completion + /// A savepoint of the currently selected completion. The savepoint /// MUST be restored before sending any event to the LSP Selected { savepoint: Arc }, } From f8225ed9219f23cf04bd378ec43e1e1a1059a0ed Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Thu, 16 Feb 2023 22:15:06 +0100 Subject: [PATCH 028/571] fix panic when deleting overlapping ranges Some deletion operations (especially those that use indentation) can generate overlapping deletion ranges when using multiple cursors. To fix that problem a new `Transaction::delete` and `Transaction:delete_by_selection` function were added. These functions merge overlapping deletion ranges instead of generating an invalid transaction. This merging of changes is only possible for deletions and not for other changes and therefore require its own function. The function has been used in all commands that currently delete text by using `Transaction::change_by_selection`. --- helix-core/src/lib.rs | 2 +- helix-core/src/transaction.rs | 46 +++++++++++++++++++++++++++++++ helix-term/src/commands.rs | 37 ++++++++----------------- helix-term/tests/test/commands.rs | 44 +++++++++++++++++++++++++++++ 4 files changed, 102 insertions(+), 27 deletions(-) diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index b67e2c8a38e2..14abf0162079 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -67,4 +67,4 @@ pub use syntax::Syntax; pub use diagnostic::Diagnostic; pub use line_ending::{LineEnding, DEFAULT_LINE_ENDING}; -pub use transaction::{Assoc, Change, ChangeSet, Operation, Transaction}; +pub use transaction::{Assoc, Change, ChangeSet, Deletion, Operation, Transaction}; diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index d8e581aae12f..06efe259f708 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -5,6 +5,7 @@ use std::borrow::Cow; /// (from, to, replacement) pub type Change = (usize, usize, Option); +pub type Deletion = (usize, usize); // TODO: pub(crate) #[derive(Debug, Clone, PartialEq, Eq)] @@ -534,6 +535,41 @@ impl Transaction { Self::from(changeset) } + /// Generate a transaction from a set of potentially overlapping deletions + /// by merging overlapping deletions together. + pub fn delete(doc: &Rope, deletions: I) -> Self + where + I: Iterator, + { + let len = doc.len_chars(); + + let (lower, upper) = deletions.size_hint(); + let size = upper.unwrap_or(lower); + let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate + + let mut last = 0; + for (mut from, to) in deletions { + if last > to { + continue; + } + if last > from { + from = last + } + debug_assert!( + from <= to, + "Edit end must end before it starts (should {from} <= {to})" + ); + // Retain from last "to" to current "from" + changeset.retain(from - last); + changeset.delete(to - from); + last = to; + } + + changeset.retain(len - last); + + Self::from(changeset) + } + /// Generate a transaction with a change per selection range. pub fn change_by_selection(doc: &Rope, selection: &Selection, f: F) -> Self where @@ -580,6 +616,16 @@ impl Transaction { ) } + /// Generate a transaction with a deletion per selection range. + /// Compared to using `change_by_selection` directly these ranges may overlap. + /// In that case they are merged + pub fn delete_by_selection(doc: &Rope, selection: &Selection, f: F) -> Self + where + F: FnMut(&Range) -> Deletion, + { + Self::delete(doc, selection.iter().map(f)) + } + /// Insert text at each selection head. pub fn insert(doc: &Rope, selection: &Selection, text: Tendril) -> Self { Self::change_by_selection(doc, selection, |range| { diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 80774ceae4cb..964d87ff9c29 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -2315,9 +2315,8 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { }; // then delete - let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { - (range.from(), range.to(), None) - }); + let transaction = + Transaction::delete_by_selection(doc.text(), selection, |range| (range.from(), range.to())); doc.apply(&transaction, view.id); match op { @@ -2333,9 +2332,8 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { #[inline] fn delete_selection_insert_mode(doc: &mut Document, view: &mut View, selection: &Selection) { - let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { - (range.from(), range.to(), None) - }); + let transaction = + Transaction::delete_by_selection(doc.text(), selection, |range| (range.from(), range.to())); doc.apply(&transaction, view.id); } @@ -3422,10 +3420,10 @@ pub mod insert { let auto_pairs = doc.auto_pairs(cx.editor); let transaction = - Transaction::change_by_selection(doc.text(), doc.selection(view.id), |range| { + Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { let pos = range.cursor(text); if pos == 0 { - return (pos, pos, None); + return (pos, pos); } let line_start_pos = text.line_to_char(range.cursor_line(text)); // consider to delete by indent level if all characters before `pos` are indent units. @@ -3433,11 +3431,7 @@ pub mod insert { if !fragment.is_empty() && fragment.chars().all(|ch| ch == ' ' || ch == '\t') { if text.get_char(pos.saturating_sub(1)) == Some('\t') { // fast path, delete one char - ( - graphemes::nth_prev_grapheme_boundary(text, pos, 1), - pos, - None, - ) + (graphemes::nth_prev_grapheme_boundary(text, pos, 1), pos) } else { let width: usize = fragment .chars() @@ -3464,7 +3458,7 @@ pub mod insert { _ => break, } } - (start, pos, None) // delete! + (start, pos) // delete! } } else { match ( @@ -3482,17 +3476,12 @@ pub mod insert { ( graphemes::nth_prev_grapheme_boundary(text, pos, count), graphemes::nth_next_grapheme_boundary(text, pos, count), - None, ) } _ => // delete 1 char { - ( - graphemes::nth_prev_grapheme_boundary(text, pos, count), - pos, - None, - ) + (graphemes::nth_prev_grapheme_boundary(text, pos, count), pos) } } } @@ -3508,13 +3497,9 @@ pub mod insert { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let transaction = - Transaction::change_by_selection(doc.text(), doc.selection(view.id), |range| { + Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { let pos = range.cursor(text); - ( - pos, - graphemes::nth_next_grapheme_boundary(text, pos, count), - None, - ) + (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) }); doc.apply(&transaction, view.id); diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 342a849be349..1efb204e6c7f 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -385,3 +385,47 @@ async fn test_character_info() -> anyhow::Result<()> { Ok(()) } + +#[tokio::test(flavor = "multi_thread")] +async fn test_delete_char_backward() -> anyhow::Result<()> { + // don't panic when deleting overlapping ranges + test(( + platform_line("#(x|)# #[x|]#").as_str(), + "c", + platform_line("#[\n|]#").as_str(), + )) + .await?; + test(( + platform_line("#( |)##( |)#a#( |)#axx#[x|]#a").as_str(), + "li", + platform_line("#(a|)##(|a)#xx#[|a]#").as_str(), + )) + .await?; + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_delete_word_backward() -> anyhow::Result<()> { + // don't panic when deleting overlapping ranges + test(( + platform_line("fo#[o|]#ba#(r|)#").as_str(), + "a", + platform_line("#[\n|]#").as_str(), + )) + .await?; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_delete_word_forward() -> anyhow::Result<()> { + // don't panic when deleting overlapping ranges + test(( + platform_line("fo#[o|]#b#(|ar)#").as_str(), + "i", + platform_line("fo#[\n|]#").as_str(), + )) + .await?; + Ok(()) +} + From 2c3ccc3e8b2487e9bcca271341aabc67811ebb46 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Thu, 16 Feb 2023 22:59:15 +0100 Subject: [PATCH 029/571] cleanup delete_by_selection_insert_mode function --- helix-term/src/commands.rs | 84 ++++++++++++++------------------------ 1 file changed, 31 insertions(+), 53 deletions(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 964d87ff9c29..05b5b9b69e94 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -27,8 +27,8 @@ use helix_core::{ textobject, tree_sitter::Node, unicode::width::UnicodeWidthChar, - visual_offset_from_block, LineEnding, Position, Range, Rope, RopeGraphemes, RopeSlice, - Selection, SmallVec, Tendril, Transaction, + visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeGraphemes, + RopeSlice, Selection, SmallVec, Tendril, Transaction, }; use helix_view::{ clipboard::ClipboardType, @@ -795,10 +795,7 @@ fn extend_to_line_start(cx: &mut Context) { } fn kill_to_line_start(cx: &mut Context) { - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - - let selection = doc.selection(view.id).clone().transform(|range| { + delete_by_selection_insert_mode(cx, move |text, range| { let line = range.cursor_line(text); let first_char = text.line_to_char(line); let anchor = range.cursor(text); @@ -817,32 +814,23 @@ fn kill_to_line_start(cx: &mut Context) { // select until start of line first_char }; - Range::new(head, anchor) + (head, anchor) }); - delete_selection_insert_mode(doc, view, &selection); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } fn kill_to_line_end(cx: &mut Context) { - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - - let selection = doc.selection(view.id).clone().transform(|range| { + delete_by_selection_insert_mode(cx, |text, range| { let line = range.cursor_line(text); let line_end_pos = line_end_char_index(&text, line); let pos = range.cursor(text); - let mut new_range = range.put_cursor(text, line_end_pos, true); - // don't want to remove the line separator itself if the cursor doesn't reach the end of line. - if pos != line_end_pos { - new_range.head = line_end_pos; + // if the cursor is on the newline char delete that + if pos == line_end_pos { + (pos, text.line_to_char(line + 1)) + } else { + (pos, line_end_pos) } - new_range }); - delete_selection_insert_mode(doc, view, &selection); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } fn goto_first_nonwhitespace(cx: &mut Context) { @@ -2331,10 +2319,18 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { } #[inline] -fn delete_selection_insert_mode(doc: &mut Document, view: &mut View, selection: &Selection) { +fn delete_by_selection_insert_mode( + cx: &mut Context, + mut f: impl FnMut(RopeSlice, &Range) -> Deletion, +) { + let (view, doc) = current!(cx.editor); + let text = doc.text().slice(..); let transaction = - Transaction::delete_by_selection(doc.text(), selection, |range| (range.from(), range.to())); + Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { + f(text, range) + }); doc.apply(&transaction, view.id); + lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } fn delete_selection(cx: &mut Context) { @@ -3494,46 +3490,28 @@ pub mod insert { pub fn delete_char_forward(cx: &mut Context) { let count = cx.count(); - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - let transaction = - Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { - let pos = range.cursor(text); - (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) - }); - doc.apply(&transaction, view.id); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); + delete_by_selection_insert_mode(cx, |text, range| { + let pos = range.cursor(text); + (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) + }) } pub fn delete_word_backward(cx: &mut Context) { let count = cx.count(); - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - - let selection = doc.selection(view.id).clone().transform(|range| { - let anchor = movement::move_prev_word_start(text, range, count).from(); + delete_by_selection_insert_mode(cx, |text, range| { + let anchor = movement::move_prev_word_start(text, *range, count).from(); let next = Range::new(anchor, range.cursor(text)); - exclude_cursor(text, next, range) + let range = exclude_cursor(text, next, *range); + (range.from(), range.to()) }); - delete_selection_insert_mode(doc, view, &selection); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } pub fn delete_word_forward(cx: &mut Context) { let count = cx.count(); - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - - let selection = doc.selection(view.id).clone().transform(|range| { - let head = movement::move_next_word_end(text, range, count).to(); - Range::new(range.cursor(text), head) + delete_by_selection_insert_mode(cx, |text, range| { + let head = movement::move_next_word_end(text, *range, count).to(); + (range.cursor(text), head) }); - - delete_selection_insert_mode(doc, view, &selection); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } } From 25d4ebe30d7920bc087f004075048f62f53726af Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Mon, 20 Feb 2023 16:31:26 +0100 Subject: [PATCH 030/571] don't move cursor while forward deleting in append mode Currently, when forward deleting (`delete_char_forward` bound to `del`, `delete_word_forward`, `kill_to_line_end`) the cursor is moved to the left in append mode (or generally when the cursor is at the end of the selection). For example in a document `|abc|def` (|indicates selection) if enter append mode the cursor is moved to `c` and the selection becomes: `|abcd|ef`. When deleting forward (`del`) `d` is deleted. The expectation would be that the selection doesn't shrink so that `del` again deletes `e` and then `f`. This would look as follows: `|abcd|ef` `|abce|f` `|abcf|` `|abc |` This is inline with how other editors like kakoune work. However, helix currently moves the selection backwards leading to the following behavior: `|abcd|ef` `|abc|ef` `|ab|ef` `ef` This means that `delete_char_forward` essentially acts like `delete_char_backward` after deleting the first character in append mode. To fix the problem the cursor must be moved to the right while deleting forward (first fix in this commit). Furthermore, when the EOF char is reached a newline char must be inserted (just like when entering appendmode) to prevent the cursor from moving to the right --- helix-core/src/transaction.rs | 5 ++ helix-term/src/commands.rs | 141 ++++++++++++++++++++---------- helix-term/tests/test/commands.rs | 23 +++++ 3 files changed, 124 insertions(+), 45 deletions(-) diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index 06efe259f708..f4f94b540e69 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -570,6 +570,11 @@ impl Transaction { Self::from(changeset) } + pub fn insert_at_eof(mut self, text: Tendril) -> Transaction { + self.changes.insert(text); + self + } + /// Generate a transaction with a change per selection range. pub fn change_by_selection(doc: &Rope, selection: &Selection, f: F) -> Self where diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 05b5b9b69e94..5a844e35152e 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -795,42 +795,50 @@ fn extend_to_line_start(cx: &mut Context) { } fn kill_to_line_start(cx: &mut Context) { - delete_by_selection_insert_mode(cx, move |text, range| { - let line = range.cursor_line(text); - let first_char = text.line_to_char(line); - let anchor = range.cursor(text); - let head = if anchor == first_char && line != 0 { - // select until previous line - line_end_char_index(&text, line - 1) - } else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { - if first_char + pos < anchor { - // select until first non-blank in line if cursor is after it - first_char + pos + delete_by_selection_insert_mode( + cx, + move |text, range| { + let line = range.cursor_line(text); + let first_char = text.line_to_char(line); + let anchor = range.cursor(text); + let head = if anchor == first_char && line != 0 { + // select until previous line + line_end_char_index(&text, line - 1) + } else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { + if first_char + pos < anchor { + // select until first non-blank in line if cursor is after it + first_char + pos + } else { + // select until start of line + first_char + } } else { // select until start of line first_char - } - } else { - // select until start of line - first_char - }; - (head, anchor) - }); + }; + (head, anchor) + }, + Direction::Backward, + ); } fn kill_to_line_end(cx: &mut Context) { - delete_by_selection_insert_mode(cx, |text, range| { - let line = range.cursor_line(text); - let line_end_pos = line_end_char_index(&text, line); - let pos = range.cursor(text); + delete_by_selection_insert_mode( + cx, + |text, range| { + let line = range.cursor_line(text); + let line_end_pos = line_end_char_index(&text, line); + let pos = range.cursor(text); - // if the cursor is on the newline char delete that - if pos == line_end_pos { - (pos, text.line_to_char(line + 1)) - } else { - (pos, line_end_pos) - } - }); + // if the cursor is on the newline char delete that + if pos == line_end_pos { + (pos, text.line_to_char(line + 1)) + } else { + (pos, line_end_pos) + } + }, + Direction::Forward, + ); } fn goto_first_nonwhitespace(cx: &mut Context) { @@ -2322,13 +2330,44 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { fn delete_by_selection_insert_mode( cx: &mut Context, mut f: impl FnMut(RopeSlice, &Range) -> Deletion, + direction: Direction, ) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); - let transaction = + let mut selection = SmallVec::new(); + let mut insert_newline = false; + let text_len = text.len_chars(); + let mut transaction = Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { - f(text, range) + let (start, end) = f(text, range); + if direction == Direction::Forward { + let mut range = *range; + if range.head > range.anchor { + insert_newline |= end == text_len; + // move the cursor to the right so that the selection + // doesn't shrink when deleting forward (so the text appears to + // move to left) + // += 1 is enough here as the range is normalized to grapheme boundaries + // later anyway + range.head += 1; + } + selection.push(range); + } + (start, end) }); + + // in case we delete the last character and the cursor would be moved to the EOF char + // insert a newline, just like when entering append mode + if insert_newline { + transaction = transaction.insert_at_eof(doc.line_ending.as_str().into()); + } + + if direction == Direction::Forward { + doc.set_selection( + view.id, + Selection::new(selection, doc.selection(view.id).primary_index()), + ); + } doc.apply(&transaction, view.id); lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } @@ -3490,28 +3529,40 @@ pub mod insert { pub fn delete_char_forward(cx: &mut Context) { let count = cx.count(); - delete_by_selection_insert_mode(cx, |text, range| { - let pos = range.cursor(text); - (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) - }) + delete_by_selection_insert_mode( + cx, + |text, range| { + let pos = range.cursor(text); + (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) + }, + Direction::Forward, + ) } pub fn delete_word_backward(cx: &mut Context) { let count = cx.count(); - delete_by_selection_insert_mode(cx, |text, range| { - let anchor = movement::move_prev_word_start(text, *range, count).from(); - let next = Range::new(anchor, range.cursor(text)); - let range = exclude_cursor(text, next, *range); - (range.from(), range.to()) - }); + delete_by_selection_insert_mode( + cx, + |text, range| { + let anchor = movement::move_prev_word_start(text, *range, count).from(); + let next = Range::new(anchor, range.cursor(text)); + let range = exclude_cursor(text, next, *range); + (range.from(), range.to()) + }, + Direction::Backward, + ); } pub fn delete_word_forward(cx: &mut Context) { let count = cx.count(); - delete_by_selection_insert_mode(cx, |text, range| { - let head = movement::move_next_word_end(text, *range, count).to(); - (range.cursor(text), head) - }); + delete_by_selection_insert_mode( + cx, + |text, range| { + let head = movement::move_next_word_end(text, *range, count).to(); + (range.cursor(text), head) + }, + Direction::Forward, + ); } } diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 1efb204e6c7f..f91a6371f00d 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -429,3 +429,26 @@ async fn test_delete_word_forward() -> anyhow::Result<()> { Ok(()) } +#[tokio::test(flavor = "multi_thread")] +async fn test_delete_char_forward() -> anyhow::Result<()> { + test(( + platform_line(indoc! {"\ + #[abc|]#def + #(abc|)#ef + #(abc|)#f + #(abc|)# + "}) + .as_str(), + "a", + platform_line(indoc! {"\ + #[abc|]#ef + #(abc|)#f + #(abc|)# + #(abc|)# + "}) + .as_str(), + )) + .await?; + + Ok(()) +} From c6f169b1f87d01d72713e84bd331743e4da40f5f Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Wed, 3 May 2023 16:11:57 +0200 Subject: [PATCH 031/571] cleanup integration tests --- helix-term/tests/test/commands.rs | 94 +++++++++++-------------------- 1 file changed, 34 insertions(+), 60 deletions(-) diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index f91a6371f00d..52b123c7eb58 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -12,15 +12,13 @@ async fn test_selection_duplication() -> anyhow::Result<()> { #[lo|]#rem ipsum dolor - "}) - .as_str(), + "}), "CC", platform_line(indoc! {"\ #(lo|)#rem #(ip|)#sum #[do|]#lor - "}) - .as_str(), + "}), )) .await?; @@ -30,15 +28,13 @@ async fn test_selection_duplication() -> anyhow::Result<()> { #[|lo]#rem ipsum dolor - "}) - .as_str(), + "}), "CC", platform_line(indoc! {"\ #(|lo)#rem #(|ip)#sum #[|do]#lor - "}) - .as_str(), + "}), )) .await?; @@ -47,14 +43,12 @@ async fn test_selection_duplication() -> anyhow::Result<()> { platform_line(indoc! {"\ test #[testitem|]# - "}) - .as_str(), + "}), "", platform_line(indoc! {"\ test #[testitem|]# - "}) - .as_str(), + "}), )) .await?; @@ -63,14 +57,12 @@ async fn test_selection_duplication() -> anyhow::Result<()> { platform_line(indoc! {"\ test #[test|]# - "}) - .as_str(), + "}), "", platform_line(indoc! {"\ #[test|]# #(test|)# - "}) - .as_str(), + "}), )) .await?; @@ -79,14 +71,12 @@ async fn test_selection_duplication() -> anyhow::Result<()> { platform_line(indoc! {"\ #[testitem|]# test - "}) - .as_str(), + "}), "C", platform_line(indoc! {"\ #[testitem|]# test - "}) - .as_str(), + "}), )) .await?; @@ -95,14 +85,12 @@ async fn test_selection_duplication() -> anyhow::Result<()> { platform_line(indoc! {"\ #[test|]# test - "}) - .as_str(), + "}), "C", platform_line(indoc! {"\ #(test|)# #[test|]# - "}) - .as_str(), + "}), )) .await?; Ok(()) @@ -174,15 +162,13 @@ async fn test_multi_selection_paste() -> anyhow::Result<()> { #[|lorem]# #(|ipsum)# #(|dolor)# - "}) - .as_str(), + "}), "yp", platform_line(indoc! {"\ lorem#[|lorem]# ipsum#(|ipsum)# dolor#(|dolor)# - "}) - .as_str(), + "}), )) .await?; @@ -197,8 +183,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { #[|lorem]# #(|ipsum)# #(|dolor)# - "}) - .as_str(), + "}), "|echo foo", platform_line(indoc! {"\ #[|foo\n]# @@ -207,8 +192,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { #(|foo\n)# - "}) - .as_str(), + "}), )) .await?; @@ -218,8 +202,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { #[|lorem]# #(|ipsum)# #(|dolor)# - "}) - .as_str(), + "}), "!echo foo", platform_line(indoc! {"\ #[|foo\n]# @@ -228,8 +211,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { ipsum #(|foo\n)# dolor - "}) - .as_str(), + "}), )) .await?; @@ -239,8 +221,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { #[|lorem]# #(|ipsum)# #(|dolor)# - "}) - .as_str(), + "}), "echo foo", platform_line(indoc! {"\ lorem#[|foo\n]# @@ -249,8 +230,7 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> { dolor#(|foo\n)# - "}) - .as_str(), + "}), )) .await?; @@ -294,16 +274,14 @@ async fn test_extend_line() -> anyhow::Result<()> { ipsum dolor - "}) - .as_str(), + "}), "x2x", platform_line(indoc! {"\ #[lorem ipsum dolor\n|]# - "}) - .as_str(), + "}), )) .await?; @@ -313,15 +291,13 @@ async fn test_extend_line() -> anyhow::Result<()> { #[l|]#orem ipsum - "}) - .as_str(), + "}), "2x", platform_line(indoc! {"\ #[lorem ipsum\n|]# - "}) - .as_str(), + "}), )) .await?; @@ -390,15 +366,15 @@ async fn test_character_info() -> anyhow::Result<()> { async fn test_delete_char_backward() -> anyhow::Result<()> { // don't panic when deleting overlapping ranges test(( - platform_line("#(x|)# #[x|]#").as_str(), + platform_line("#(x|)# #[x|]#"), "c", - platform_line("#[\n|]#").as_str(), + platform_line("#[\n|]#"), )) .await?; test(( - platform_line("#( |)##( |)#a#( |)#axx#[x|]#a").as_str(), + platform_line("#( |)##( |)#a#( |)#axx#[x|]#a"), "li", - platform_line("#(a|)##(|a)#xx#[|a]#").as_str(), + platform_line("#(a|)##(|a)#xx#[|a]#"), )) .await?; @@ -409,9 +385,9 @@ async fn test_delete_char_backward() -> anyhow::Result<()> { async fn test_delete_word_backward() -> anyhow::Result<()> { // don't panic when deleting overlapping ranges test(( - platform_line("fo#[o|]#ba#(r|)#").as_str(), + platform_line("fo#[o|]#ba#(r|)#"), "a", - platform_line("#[\n|]#").as_str(), + platform_line("#[\n|]#"), )) .await?; Ok(()) @@ -421,9 +397,9 @@ async fn test_delete_word_backward() -> anyhow::Result<()> { async fn test_delete_word_forward() -> anyhow::Result<()> { // don't panic when deleting overlapping ranges test(( - platform_line("fo#[o|]#b#(|ar)#").as_str(), + platform_line("fo#[o|]#b#(|ar)#"), "i", - platform_line("fo#[\n|]#").as_str(), + platform_line("fo#[\n|]#"), )) .await?; Ok(()) @@ -437,16 +413,14 @@ async fn test_delete_char_forward() -> anyhow::Result<()> { #(abc|)#ef #(abc|)#f #(abc|)# - "}) - .as_str(), + "}), "a", platform_line(indoc! {"\ #[abc|]#ef #(abc|)#f #(abc|)# #(abc|)# - "}) - .as_str(), + "}), )) .await?; From 2f2306475cac7ee9385b816424137421c13bf4c2 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Fri, 12 May 2023 16:42:00 +0200 Subject: [PATCH 032/571] async picker syntax highlighting --- helix-term/src/ui/picker.rs | 107 +++++++++++++++++++++++++----------- helix-view/src/document.rs | 24 +++++--- 2 files changed, 91 insertions(+), 40 deletions(-) diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index e7a7de9095db..6120bfd1f2f3 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -1,7 +1,9 @@ use crate::{ alt, - compositor::{Component, Compositor, Context, Event, EventResult}, - ctrl, key, shift, + compositor::{self, Component, Compositor, Context, Event, EventResult}, + ctrl, + job::Callback, + key, shift, ui::{ self, document::{render_document, LineDecoration, LinePos, TextRenderer}, @@ -9,7 +11,7 @@ use crate::{ EditorView, }, }; -use futures_util::future::BoxFuture; +use futures_util::{future::BoxFuture, FutureExt}; use tui::{ buffer::Buffer as Surface, layout::Constraint, @@ -26,7 +28,7 @@ use std::{collections::HashMap, io::Read, path::PathBuf}; use crate::ui::{Prompt, PromptEvent}; use helix_core::{ movement::Direction, text_annotations::TextAnnotations, - unicode::segmentation::UnicodeSegmentation, Position, + unicode::segmentation::UnicodeSegmentation, Position, Syntax, }; use helix_view::{ editor::Action, @@ -122,7 +124,7 @@ impl Preview<'_, '_> { } } -impl FilePicker { +impl FilePicker { pub fn new( options: Vec, editor_data: T::Data, @@ -208,29 +210,67 @@ impl FilePicker { } fn handle_idle_timeout(&mut self, cx: &mut Context) -> EventResult { + let Some((current_file, _)) = self.current_file(cx.editor) else { + return EventResult::Consumed(None) + }; + // Try to find a document in the cache - let doc = self - .current_file(cx.editor) - .and_then(|(path, _range)| match path { - PathOrId::Id(doc_id) => Some(doc_mut!(cx.editor, &doc_id)), - PathOrId::Path(path) => match self.preview_cache.get_mut(&path) { - Some(CachedPreview::Document(doc)) => Some(doc), - _ => None, - }, - }); + let doc = match ¤t_file { + PathOrId::Id(doc_id) => doc_mut!(cx.editor, doc_id), + PathOrId::Path(path) => match self.preview_cache.get_mut(path) { + Some(CachedPreview::Document(ref mut doc)) => doc, + _ => return EventResult::Consumed(None), + }, + }; + + let mut callback: Option = None; // Then attempt to highlight it if it has no language set - if let Some(doc) = doc { - if doc.language_config().is_none() { + if doc.language_config().is_none() { + if let Some(language_config) = doc.detect_language_config(&cx.editor.syn_loader) { + doc.language = Some(language_config.clone()); + let text = doc.text().clone(); let loader = cx.editor.syn_loader.clone(); - doc.detect_language(loader); + let job = tokio::task::spawn_blocking(move || { + let syntax = language_config + .highlight_config(&loader.scopes()) + .and_then(|highlight_config| Syntax::new(&text, highlight_config, loader)); + let callback = move |editor: &mut Editor, compositor: &mut Compositor| { + let Some(syntax) = syntax else { + log::info!("highlighting picker item failed"); + return + }; + log::info!("hmm1"); + let Some(Overlay { content: picker, .. }) = compositor.find::>() else { + log::info!("picker closed before syntax highlighting finished"); + return + }; + log::info!("hmm2"); + // Try to find a document in the cache + let doc = match current_file { + PathOrId::Id(doc_id) => doc_mut!(editor, &doc_id), + PathOrId::Path(path) => match picker.preview_cache.get_mut(&path) { + Some(CachedPreview::Document(ref mut doc)) => doc, + _ => return, + }, + }; + log::info!("yay"); + doc.syntax = Some(syntax); + }; + Callback::EditorCompositor(Box::new(callback)) + }); + let tmp: compositor::Callback = Box::new(move |_, ctx| { + ctx.jobs + .callback(job.map(|res| res.map_err(anyhow::Error::from))) + }); + callback = Some(Box::new(tmp)) } - - // QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now - // but it could be interesting in the future } - EventResult::Consumed(None) + // QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now + // but it could be interesting in the future + + EventResult::Consumed(callback) } } @@ -373,6 +413,10 @@ impl Component for FilePicker { self.picker.required_size((picker_width, height))?; Some((width, height)) } + + fn id(&self) -> Option<&'static str> { + Some("file-picker") + } } #[derive(PartialEq, Eq, Debug)] @@ -945,17 +989,16 @@ impl Component for DynamicPicker { cx.jobs.callback(async move { let new_options = new_options.await?; - let callback = - crate::job::Callback::EditorCompositor(Box::new(move |editor, compositor| { - // Wrapping of pickers in overlay is done outside the picker code, - // so this is fragile and will break if wrapped in some other widget. - let picker = match compositor.find_id::>>(Self::ID) { - Some(overlay) => &mut overlay.content.file_picker.picker, - None => return, - }; - picker.set_options(new_options); - editor.reset_idle_timer(); - })); + let callback = Callback::EditorCompositor(Box::new(move |editor, compositor| { + // Wrapping of pickers in overlay is done outside the picker code, + // so this is fragile and will break if wrapped in some other widget. + let picker = match compositor.find_id::>>(Self::ID) { + Some(overlay) => &mut overlay.content.file_picker.picker, + None => return, + }; + picker.set_options(new_options); + editor.reset_idle_timer(); + })); anyhow::Ok(callback) }); EventResult::Consumed(None) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index e29e52cc5c69..770341dcdb9f 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -154,9 +154,9 @@ pub struct Document { /// The document's default line ending. pub line_ending: LineEnding, - syntax: Option, + pub syntax: Option, /// Corresponding language scope name. Usually `source.`. - pub(crate) language: Option>, + pub language: Option>, /// Pending changes since last history commit. changes: ChangeSet, @@ -869,12 +869,20 @@ impl Document { /// Detect the programming language based on the file type. pub fn detect_language(&mut self, config_loader: Arc) { - if let Some(path) = &self.path { - let language_config = config_loader - .language_config_for_file_name(path) - .or_else(|| config_loader.language_config_for_shebang(self.text())); - self.set_language(language_config, Some(config_loader)); - } + self.set_language( + self.detect_language_config(&config_loader), + Some(config_loader), + ); + } + + /// Detect the programming language based on the file type. + pub fn detect_language_config( + &self, + config_loader: &syntax::Loader, + ) -> Option> { + config_loader + .language_config_for_file_name(self.path.as_ref()?) + .or_else(|| config_loader.language_config_for_shebang(self.text())) } /// Detect the indentation used in the file, or otherwise defaults to the language indentation From b0705337bec836604bdb97689d0a44940c6bddae Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Fri, 12 May 2023 16:49:39 +0200 Subject: [PATCH 033/571] automatically disable TS when parsing takes longer than 500ms --- helix-core/src/syntax.rs | 23 +++++++++++++++-------- helix-core/tests/indent.rs | 2 +- helix-term/src/ui/markdown.rs | 2 +- helix-term/src/ui/picker.rs | 3 --- helix-view/src/document.rs | 11 ++++++----- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 005e985de021..f36c985e3485 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -768,7 +768,11 @@ fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow, loader: Arc) -> Self { + pub fn new( + source: &Rope, + config: Arc, + loader: Arc, + ) -> Option { let root_layer = LanguageLayer { tree: None, config, @@ -793,11 +797,13 @@ impl Syntax { loader, }; - syntax - .update(source, source, &ChangeSet::new(source)) - .unwrap(); + let res = syntax.update(source, source, &ChangeSet::new(source)); - syntax + if res.is_err() { + log::error!("TS parser failed, disabeling TS for the current buffer: {res:?}"); + return None; + } + Some(syntax) } pub fn update( @@ -925,6 +931,7 @@ impl Syntax { PARSER.with(|ts_parser| { let ts_parser = &mut ts_parser.borrow_mut(); + ts_parser.parser.set_timeout_micros(1000 * 500); // half a second is pretty generours let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new); // TODO: might need to set cursor range cursor.set_byte_range(0..usize::MAX); @@ -2371,7 +2378,7 @@ mod test { let mut cursor = QueryCursor::new(); let config = HighlightConfiguration::new(language, "", "", "").unwrap(); - let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)); + let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let root = syntax.tree().root_node(); let mut test = |capture, range| { @@ -2442,7 +2449,7 @@ mod test { fn main() {} ", ); - let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)); + let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let tree = syntax.tree(); let root = tree.root_node(); assert_eq!(root.kind(), "source_file"); @@ -2529,7 +2536,7 @@ mod test { let language = get_language(language_name).unwrap(); let config = HighlightConfiguration::new(language, "", "", "").unwrap(); - let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)); + let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let root = syntax .tree() diff --git a/helix-core/tests/indent.rs b/helix-core/tests/indent.rs index f558f86f3e65..409706bb99bf 100644 --- a/helix-core/tests/indent.rs +++ b/helix-core/tests/indent.rs @@ -72,7 +72,7 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) { let language_config = loader.language_config_for_scope(lang_scope).unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap(); - let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader)); + let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader)).unwrap(); let indent_query = language_config.indent_query().unwrap(); let text = doc.slice(..); diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index fea3de78f838..def64434a3f1 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -51,7 +51,7 @@ pub fn highlighted_code_block<'a>( language.into(), )) .and_then(|config| config.highlight_config(theme.scopes())) - .map(|config| Syntax::new(&rope, config, Arc::clone(&config_loader))); + .and_then(|config| Syntax::new(&rope, config, Arc::clone(&config_loader))); let syntax = match syntax { Some(s) => s, diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 6120bfd1f2f3..d161f786c4fe 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -240,12 +240,10 @@ impl FilePicker { log::info!("highlighting picker item failed"); return }; - log::info!("hmm1"); let Some(Overlay { content: picker, .. }) = compositor.find::>() else { log::info!("picker closed before syntax highlighting finished"); return }; - log::info!("hmm2"); // Try to find a document in the cache let doc = match current_file { PathOrId::Id(doc_id) => doc_mut!(editor, &doc_id), @@ -254,7 +252,6 @@ impl FilePicker { _ => return, }, }; - log::info!("yay"); doc.syntax = Some(syntax); }; Callback::EditorCompositor(Box::new(callback)) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 770341dcdb9f..eb376567953d 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -972,8 +972,7 @@ impl Document { ) { if let (Some(language_config), Some(loader)) = (language_config, loader) { if let Some(highlight_config) = language_config.highlight_config(&loader.scopes()) { - let syntax = Syntax::new(&self.text, highlight_config, loader); - self.syntax = Some(syntax); + self.syntax = Syntax::new(&self.text, highlight_config, loader); } self.language = Some(language_config); @@ -1113,9 +1112,11 @@ impl Document { // update tree-sitter syntax tree if let Some(syntax) = &mut self.syntax { // TODO: no unwrap - syntax - .update(&old_doc, &self.text, transaction.changes()) - .unwrap(); + let res = syntax.update(&old_doc, &self.text, transaction.changes()); + if res.is_err() { + log::error!("TS parser failed, disabeling TS for the current buffer: {res:?}"); + self.syntax = None; + } } let changes = transaction.changes(); From 2cccb3f09c52824bf070e127c0b196ed8d8e7555 Mon Sep 17 00:00:00 2001 From: Ivan Gulakov <44394533+ogimgd@users.noreply.github.com> Date: Thu, 18 May 2023 08:27:29 +0200 Subject: [PATCH 034/571] Fix completion on paths containing spaces (#6779) There was an issue with autocompletion of a path with a space in it. Before: :o test\ dir -> -> test\ dirfile1 After: :o test\ dir -> -> test\ dir\file1 --- helix-term/src/commands/typed.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 16ee83d76568..81a24059a900 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -2847,13 +2847,10 @@ pub(super) fn command_mode(cx: &mut Context) { } else { // Otherwise, use the command's completer and the last shellword // as completion input. - let (part, part_len) = if words.len() == 1 || shellwords.ends_with_whitespace() { + let (word, word_len) = if words.len() == 1 || shellwords.ends_with_whitespace() { (&Cow::Borrowed(""), 0) } else { - ( - words.last().unwrap(), - shellwords.parts().last().unwrap().len(), - ) + (words.last().unwrap(), words.last().unwrap().len()) }; let argument_number = argument_number_of(&shellwords); @@ -2862,13 +2859,13 @@ pub(super) fn command_mode(cx: &mut Context) { .get(&words[0] as &str) .map(|tc| tc.completer_for_argument_number(argument_number)) { - completer(editor, part) + completer(editor, word) .into_iter() .map(|(range, file)| { let file = shellwords::escape(file); // offset ranges to input - let offset = input.len() - part_len; + let offset = input.len() - word_len; let range = (range.start + offset)..; (range, file) }) From 04fbf30488348e417e3922ef23973d4542923731 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Wed, 5 Apr 2023 15:37:20 -0500 Subject: [PATCH 035/571] Bump the version to 23.05 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 35371314c17b..527d78c51453 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.03 \ No newline at end of file +23.05 \ No newline at end of file From 59f8f5e6d4c3eeabb24bee71740d48686d3257b6 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Wed, 5 Apr 2023 15:37:08 -0500 Subject: [PATCH 036/571] Add changelog notes for the 23.05 release --- CHANGELOG.md | 117 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01184571eccf..8f2775b2968d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,120 @@ +# 23.05 (2023-05-18) + +23.05 is a smaller release focusing on fixes. There were 88 contributors in this release. Thank you all! + +Features: + +- Add a config option to exclude declaration from LSP references request ([#6886](https://github.com/helix-editor/helix/pull/6886)) +- Enable injecting languages based on their file extension and shebang ([#3970](https://github.com/helix-editor/helix/pull/3970)) +- Sort the buffer picker by most recent access ([#2980](https://github.com/helix-editor/helix/pull/2980)) +- Perform syntax highlighting in the picker asynchronously ([#7028](https://github.com/helix-editor/helix/pull/7028)) + +Commands: + +- `:update` is now aliased as `:u` ([#6835](https://github.com/helix-editor/helix/pull/6835)) +- Add `extend_to_first_nonwhitespace` which acts the same as `goto_first_nonwhitespace` but always extends ([#6837](https://github.com/helix-editor/helix/pull/6837)) +- Add `:clear-register` for clearing the given register or all registers ([#5695](https://github.com/helix-editor/helix/pull/5695)) +- Add `:write-buffer-close` and `:write-buffer-close!` ([#6947](https://github.com/helix-editor/helix/pull/6947)) + +Fixes: + +- Normalize LSP workspace paths ([#6517](https://github.com/helix-editor/helix/pull/6517)) +- Robustly handle invalid LSP ranges ([#6512](https://github.com/helix-editor/helix/pull/6512)) +- Fix line number display for LSP goto pickers ([#6559](https://github.com/helix-editor/helix/pull/6559)) +- Fix toggling of `soft-wrap.enable` option ([#6656](https://github.com/helix-editor/helix/pull/6656), [58e457a](https://github.com/helix-editor/helix/commit/58e457a), [#6742](https://github.com/helix-editor/helix/pull/6742)) +- Handle `workspace/configuration` requests from stopped language servers ([#6693](https://github.com/helix-editor/helix/pull/6693)) +- Fix possible crash from opening the jumplist picker ([#6672](https://github.com/helix-editor/helix/pull/6672)) +- Fix theme preview returning to current theme on line and word deletions ([#6694](https://github.com/helix-editor/helix/pull/6694)) +- Re-run crate build scripts on changes to revision and grammar repositories ([#6743](https://github.com/helix-editor/helix/pull/6743)) +- Fix crash on opening from suspended state ([#6764](https://github.com/helix-editor/helix/pull/6764)) +- Fix unwrap bug in DAP ([#6786](https://github.com/helix-editor/helix/pull/6786)) +- Always build tree-sitter parsers with C++14 and C11 ([#6792](https://github.com/helix-editor/helix/pull/6792), [#6834](https://github.com/helix-editor/helix/pull/6834), [#6845](https://github.com/helix-editor/helix/pull/6845)) +- Exit with a non-zero statuscode when tree-sitter parser builds fail ([#6795](https://github.com/helix-editor/helix/pull/6795)) +- Flip symbol range in LSP goto commands ([#6794](https://github.com/helix-editor/helix/pull/6794)) +- Fix runtime toggling of the `mouse` option ([#6675](https://github.com/helix-editor/helix/pull/6675)) +- Fix panic in inlay hint computation when view anchor is out of bounds ([#6883](https://github.com/helix-editor/helix/pull/6883)) +- Significantly improve performance of git discovery on slow file systems ([#6890](https://github.com/helix-editor/helix/pull/6890)) +- Downgrade gix log level to info ([#6915](https://github.com/helix-editor/helix/pull/6915)) +- Conserve BOM and properly support saving UTF16 files ([#6497](https://github.com/helix-editor/helix/pull/6497)) +- Correctly handle completion re-request ([#6594](https://github.com/helix-editor/helix/pull/6594)) +- Fix offset encoding in LSP `didChange` notifications ([#6921](https://github.com/helix-editor/helix/pull/6921)) +- Change `gix` logging level to info ([#6915](https://github.com/helix-editor/helix/pull/6915)) +- Improve error message when writes fail because parent directories do not exist ([#7014](https://github.com/helix-editor/helix/pull/7014)) +- Replace DAP variables popup instead of pushing more popups ([#7034](https://github.com/helix-editor/helix/pull/7034)) +- Disable tree-sitter for files after parsing for 500ms ([#7028](https://github.com/helix-editor/helix/pull/7028)) +- Fix crash when deleting with multiple cursors ([#6024](https://github.com/helix-editor/helix/pull/6024)) +- Fix selection sliding when deleting forwards in append mode ([#6024](https://github.com/helix-editor/helix/pull/6024)) +- Fix completion on paths containing spaces ([#6779](https://github.com/helix-editor/helix/pull/6779)) + +Themes: + +- Style inlay hints in `dracula` theme ([#6515](https://github.com/helix-editor/helix/pull/6515)) +- Style inlay hints in `onedark` theme ([#6503](https://github.com/helix-editor/helix/pull/6503)) +- Style inlay hints and the soft-wrap indicator in `varua` ([#6568](https://github.com/helix-editor/helix/pull/6568), [#6589](https://github.com/helix-editor/helix/pull/6589)) +- Style inlay hints in `emacs` theme ([#6569](https://github.com/helix-editor/helix/pull/6569)) +- Update `base16_transparent` and `dark_high_contrast` themes ([#6577](https://github.com/helix-editor/helix/pull/6577)) +- Style inlay hints for `mellow` and `rasmus` themes ([#6583](https://github.com/helix-editor/helix/pull/6583)) +- Dim pane divider for `base16_transparent` theme ([#6534](https://github.com/helix-editor/helix/pull/6534)) +- Style inlay hints in `zenburn` theme ([#6593](https://github.com/helix-editor/helix/pull/6593)) +- Style inlay hints in `boo_berry` theme ([#6625](https://github.com/helix-editor/helix/pull/6625)) +- Add `ferra` theme ([#6619](https://github.com/helix-editor/helix/pull/6619), [#6776](https://github.com/helix-editor/helix/pull/6776)) +- Style inlay hints in `nightfox` theme ([#6655](https://github.com/helix-editor/helix/pull/6655)) +- Fix `ayu` theme family markup code block background ([#6538](https://github.com/helix-editor/helix/pull/6538)) +- Improve whitespace and search match colors in `rose_pine` theme ([#6679](https://github.com/helix-editor/helix/pull/6679)) +- Highlight selected items in `base16_transparent` theme ([#6716](https://github.com/helix-editor/helix/pull/6716)) +- Adjust everforest to resemble original more closely ([#5866](https://github.com/helix-editor/helix/pull/5866)) +- Refactor `dracula` theme ([#6552](https://github.com/helix-editor/helix/pull/6552), [#6767](https://github.com/helix-editor/helix/pull/6767), [#6855](https://github.com/helix-editor/helix/pull/6855), [#6987](https://github.com/helix-editor/helix/pull/6987)) +- Style inlay hints in `darcula` theme ([#6732](https://github.com/helix-editor/helix/pull/6732)) +- Style inlay hints in `kanagawa` theme ([#6773](https://github.com/helix-editor/helix/pull/6773)) +- Improve `ayu_dark` theme ([#6622](https://github.com/helix-editor/helix/pull/6622)) +- Refactor `noctis` theme multiple cursor highlighting ([96720e7](https://github.com/helix-editor/helix/commit/96720e7)) +- Refactor `noctis` theme whitespace rendering and indent guides ([f2ccc03](https://github.com/helix-editor/helix/commit/f2ccc03)) +- Add `amberwood` theme ([#6924](https://github.com/helix-editor/helix/pull/6924)) +- Update `nightfox` theme ([#7061](https://github.com/helix-editor/helix/pull/7061)) + +Language support: + +- R language server: use the `--no-echo` flag to silence output ([#6570](https://github.com/helix-editor/helix/pull/6570)) +- Recognize CUDA files as C++ ([#6521](https://github.com/helix-editor/helix/pull/6521)) +- Add support for Hurl ([#6450](https://github.com/helix-editor/helix/pull/6450)) +- Add textobject queries for Julia ([#6588](https://github.com/helix-editor/helix/pull/6588)) +- Update Ruby highlight queries ([#6587](https://github.com/helix-editor/helix/pull/6587)) +- Add xsd to XML file-types ([#6631](https://github.com/helix-editor/helix/pull/6631)) +- Support Robot Framework ([#6611](https://github.com/helix-editor/helix/pull/6611)) +- Update Gleam tree-sitter parser ([#6641](https://github.com/helix-editor/helix/pull/6641)) +- Update git-commit tree-sitter parser ([#6692](https://github.com/helix-editor/helix/pull/6692)) +- Update Haskell tree-sitter parser ([#6317](https://github.com/helix-editor/helix/pull/6317)) +- Add injection queries for Haskell quasiquotes ([#6474](https://github.com/helix-editor/helix/pull/6474)) +- Highlight C/C++ escape sequences ([#6724](https://github.com/helix-editor/helix/pull/6724)) +- Support Markdoc ([#6432](https://github.com/helix-editor/helix/pull/6432)) +- Support OpenCL ([#6473](https://github.com/helix-editor/helix/pull/6473)) +- Support DTD ([#6644](https://github.com/helix-editor/helix/pull/6644)) +- Fix constant highlighting in Python queries ([#6751](https://github.com/helix-editor/helix/pull/6751)) +- Support Just ([#6453](https://github.com/helix-editor/helix/pull/6453)) +- Fix Go locals query for `var_spec` identifiers ([#6763](https://github.com/helix-editor/helix/pull/6763)) +- Update Markdown tree-sitter parser ([#6785](https://github.com/helix-editor/helix/pull/6785)) +- Fix Haskell workspace root for cabal projects ([#6828](https://github.com/helix-editor/helix/pull/6828)) +- Avoid extra indentation in Go switches ([#6817](https://github.com/helix-editor/helix/pull/6817)) +- Fix Go workspace roots ([#6884](https://github.com/helix-editor/helix/pull/6884)) +- Set PerlNavigator as the default Perl language server ([#6860](https://github.com/helix-editor/helix/pull/6860)) +- Highlight more sqlx macros in Rust ([#6793](https://github.com/helix-editor/helix/pull/6793)) +- Switch Odin tree-sitter grammar ([#6766](https://github.com/helix-editor/helix/pull/6766)) +- Recognize `poetry.lock` as TOML ([#6928](https://github.com/helix-editor/helix/pull/6928)) +- Recognize Jupyter notebooks as JSON ([#6927](https://github.com/helix-editor/helix/pull/6927)) +- Add language server configuration for Crystal ([#6948](https://github.com/helix-editor/helix/pull/6948)) +- Add `build.gradle.kts` to Java and Scala roots ([#6970](https://github.com/helix-editor/helix/pull/6970)) +- Recognize `sty` and `cls` files as latex ([#6986](https://github.com/helix-editor/helix/pull/6986)) +- Update Dockerfile tree-sitter grammar ([#6895](https://github.com/helix-editor/helix/pull/6895)) +- Add comment injections for Odin ([#7027](https://github.com/helix-editor/helix/pull/7027)) +- Recognize `gml` as XML ([#7055](https://github.com/helix-editor/helix/pull/7055)) +- Recognize `geojson` as JSON ([#7054](https://github.com/helix-editor/helix/pull/7054)) + +Packaging: + +- Update the Nix flake dependencies, remove a deprecated option ([#6546](https://github.com/helix-editor/helix/pull/6546)) +- Fix and re-enable aarch64-macos release binary builds ([#6504](https://github.com/helix-editor/helix/pull/6504)) +- The git dependency on `tree-sitter` has been replaced with a regular crates.io dependency ([#6608](https://github.com/helix-editor/helix/pull/6608)) + # 23.03 (2023-03-31) 23.03 brings some long-awaited and exciting features. Thank you to everyone involved! This release saw changes from 102 contributors. From 7f5940be80eaa3aec7903903072b7108f41dd97b Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Thu, 18 May 2023 15:56:49 +0900 Subject: [PATCH 037/571] Add 23.05 to the AppData --- contrib/Helix.appdata.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/contrib/Helix.appdata.xml b/contrib/Helix.appdata.xml index b99738a18ea2..f1b310db441e 100644 --- a/contrib/Helix.appdata.xml +++ b/contrib/Helix.appdata.xml @@ -36,6 +36,9 @@ + + https://github.com/helix-editor/helix/releases/tag/23.05 + https://helix-editor.com/news/release-23-03-highlights/ From 71551d395b4e47804df2d8ecea99e34dbbf16157 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 23 May 2022 18:10:48 +0200 Subject: [PATCH 038/571] Adds support for multiple language servers per language. Language Servers are now configured in a separate table in `languages.toml`: ```toml [langauge-server.mylang-lsp] command = "mylang-lsp" args = ["--stdio"] config = { provideFormatter = true } [language-server.efm-lsp-prettier] command = "efm-langserver" [language-server.efm-lsp-prettier.config] documentFormatting = true languages = { typescript = [ { formatCommand ="prettier --stdin-filepath ${INPUT}", formatStdin = true } ] } ``` The language server for a language is configured like this (`typescript-language-server` is configured by default): ```toml [[language]] name = "typescript" language-servers = [ { name = "efm-lsp-prettier", only-features = [ "format" ] }, "typescript-language-server" ] ``` or equivalent: ```toml [[language]] name = "typescript" language-servers = [ { name = "typescript-language-server", except-features = [ "format" ] }, "efm-lsp-prettier" ] ``` Each requested LSP feature is priorized in the order of the `language-servers` array. For example the first `goto-definition` supported language server (in this case `typescript-language-server`) will be taken for the relevant LSP request (command `goto_definition`). If no `except-features` or `only-features` is given all features for the language server are enabled, as long as the language server supports these. If it doesn't the next language server which supports the feature is tried. The list of supported features are: - `format` - `goto-definition` - `goto-declaration` - `goto-type-definition` - `goto-reference` - `goto-implementation` - `signature-help` - `hover` - `document-highlight` - `completion` - `code-action` - `workspace-command` - `document-symbols` - `workspace-symbols` - `diagnostics` - `rename-symbol` - `inlay-hints` Another side-effect/difference that comes with this PR, is that only one language server instance is started if different languages use the same language server. --- book/src/generated/typable-cmd.md | 2 +- book/src/guides/adding_languages.md | 1 + book/src/languages.md | 104 +++- helix-core/src/diagnostic.rs | 1 + helix-core/src/syntax.rs | 113 +++- helix-lsp/src/client.rs | 12 +- helix-lsp/src/lib.rs | 204 +++---- helix-lsp/src/transport.rs | 63 ++- helix-term/src/application.rs | 126 +++-- helix-term/src/commands.rs | 273 +++++----- helix-term/src/commands/lsp.rs | 799 +++++++++++++++------------- helix-term/src/commands/typed.rs | 106 ++-- helix-term/src/health.rs | 27 +- helix-term/src/ui/completion.rs | 85 ++- helix-term/src/ui/editor.rs | 17 +- helix-term/src/ui/mod.rs | 21 +- helix-term/src/ui/statusline.rs | 12 +- helix-view/src/document.rs | 145 +++-- helix-view/src/editor.rs | 64 ++- helix-view/src/gutter.rs | 2 +- languages.toml | 425 ++++++++------- xtask/src/docgen.rs | 11 +- 22 files changed, 1555 insertions(+), 1058 deletions(-) diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index a775c655589c..0c377b3bbd4b 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -50,7 +50,7 @@ | `:reload-all` | Discard changes and reload all documents from the source files. | | `:update`, `:u` | Write changes only if the file has been modified. | | `:lsp-workspace-command` | Open workspace command picker | -| `:lsp-restart` | Restarts the Language Server that is in use by the current doc | +| `:lsp-restart` | Restarts the language servers used by the currently opened file | | `:lsp-stop` | Stops the Language Server that is in use by the current doc | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | diff --git a/book/src/guides/adding_languages.md b/book/src/guides/adding_languages.md index b92af4028e9a..93ec013f5c3e 100644 --- a/book/src/guides/adding_languages.md +++ b/book/src/guides/adding_languages.md @@ -9,6 +9,7 @@ below. necessary configuration for the new language. For more information on language configuration, refer to the [language configuration section](../languages.md) of the documentation. + A new language server can be added by extending the `[language-server]` table in the same file. 2. If you are adding a new language or updating an existing language server configuration, run the command `cargo xtask docgen` to update the [Language Support](../lang-support.md) documentation. diff --git a/book/src/languages.md b/book/src/languages.md index fe4db141309d..3328c6103a09 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -18,6 +18,9 @@ There are three possible locations for a `languages.toml` file: ```toml # in /helix/languages.toml +[language-server.mylang-lsp] +command = "mylang-lsp" + [[language]] name = "rust" auto-format = false @@ -41,8 +44,8 @@ injection-regex = "mylang" file-types = ["mylang", "myl"] comment-token = "#" indent = { tab-width = 2, unit = " " } -language-server = { command = "mylang-lsp", args = ["--stdio"], environment = { "ENV1" = "value1", "ENV2" = "value2" } } formatter = { command = "mylang-formatter" , args = ["--stdin"] } +language-servers = [ "mylang-lsp" ] ``` These configuration keys are available: @@ -50,6 +53,7 @@ These configuration keys are available: | Key | Description | | ---- | ----------- | | `name` | The name of the language | +| `language-id` | The language-id for language servers, checkout the table at [TextDocumentItem](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem) for the right id | | `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.` or `text.` in case of markup languages | | `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. | | `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. See the file-type detection section below. | @@ -59,7 +63,7 @@ These configuration keys are available: | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | | `comment-token` | The token to use as a comment-token | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | -| `language-server` | The Language Server to run. See the Language Server configuration section below. | +| `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `config` | Language Server configuration | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | | `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout | @@ -92,31 +96,97 @@ with the following priorities: replaced at runtime with the appropriate path separator for the operating system, so this rule would match against `.git\config` files on Windows. -### Language Server configuration +## Language Server configuration + +Language servers are configured separately in the table `language-server` in the same file as the languages `languages.toml` + +For example: + +```toml +[language-server.mylang-lsp] +command = "mylang-lsp" +args = ["--stdio"] +config = { provideFormatter = true } +environment = { "ENV1" = "value1", "ENV2" = "value2" } + +[language-server.efm-lsp-prettier] +command = "efm-langserver" + +[language-server.efm-lsp-prettier.config] +documentFormatting = true +languages = { typescript = [ { formatCommand ="prettier --stdin-filepath ${INPUT}", formatStdin = true } ] } +``` -The `language-server` field takes the following keys: +These are the available options for a language server. -| Key | Description | -| --- | ----------- | -| `command` | The name of the language server binary to execute. Binaries must be in `$PATH` | -| `args` | A list of arguments to pass to the language server binary | -| `timeout` | The maximum time a request to the language server may take, in seconds. Defaults to `20` | -| `language-id` | The language name to pass to the language server. Some language servers support multiple languages and use this field to determine which one is being served in a buffer | -| `environment` | Any environment variables that will be used when starting the language server `{ "KEY1" = "Value1", "KEY2" = "Value2" }` | +| Key | Description | +| ---- | ----------- | +| `command` | The name or path of the language server binary to execute. Binaries must be in `$PATH` | +| `args` | A list of arguments to pass to the language server binary | +| `config` | LSP initialization options | +| `timeout` | The maximum time a request to the language server may take, in seconds. Defaults to `20` | +| `environment` | Any environment variables that will be used when starting the language server `{ "KEY1" = "Value1", "KEY2" = "Value2" }` | -The top-level `config` field is used to configure the LSP initialization options. A `format` -sub-table within `config` can be used to pass extra formatting options to -[Document Formatting Requests](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#document-formatting-request--leftwards_arrow_with_hook). +A `format` sub-table within `config` can be used to pass extra formatting options to +[Document Formatting Requests](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-17.md#document-formatting-request--leftwards_arrow_with_hook). For example with typescript: ```toml -[[language]] -name = "typescript" -auto-format = true +[language-server.typescript-language-server] # pass format options according to https://github.com/typescript-language-server/typescript-language-server#workspacedidchangeconfiguration omitting the "[language].format." prefix. config = { format = { "semicolons" = "insert", "insertSpaceBeforeFunctionParenthesis" = true } } ``` +### Configuring Language Servers for a language + +The `language-servers` attribute in a language tells helix which language servers are used for this language. +They have to be defined in the `[language-server]` table as described in the previous section. +Different languages can use the same language server instance, e.g. `typescript-language-server` is used for javascript, jsx, tsx and typescript by default. +In case multiple language servers are specified in the `language-servers` attribute of a `language`, +it's often useful to only enable/disable certain language-server features for these language servers. +For example `efm-lsp-prettier` of the previous example is used only with a formatting command `prettier`, +so everything else should be handled by the `typescript-language-server` (which is configured by default) +The language configuration for typescript could look like this: + +```toml +[[language]] +name = "typescript" +language-servers = [ { name = "efm-lsp-prettier", only-features = [ "format" ] }, "typescript-language-server" ] +``` + +or equivalent: + +```toml +[[language]] +name = "typescript" +language-servers = [ { name = "typescript-language-server", except-features = [ "format" ] }, "efm-lsp-prettier" ] +``` + +Each requested LSP feature is priorized in the order of the `language-servers` array. +For example the first `goto-definition` supported language server (in this case `typescript-language-server`) will be taken for the relevant LSP request (command `goto_definition`). +If no `except-features` or `only-features` is given all features for the language server are enabled. +If a language server itself doesn't support a feature the next language server array entry will be tried (and so on). + +The list of supported features are: + +- `format` +- `goto-definition` +- `goto-declaration` +- `goto-type-definition` +- `goto-reference` +- `goto-implementation` +- `signature-help` +- `hover` +- `document-highlight` +- `completion` +- `code-action` +- `workspace-command` +- `document-symbols` +- `workspace-symbols` +- `diagnostics` +- `rename-symbol` +- `inlay-hints` + ## Tree-sitter grammar configuration The source for a language's tree-sitter grammar is specified in a `[[grammar]]` diff --git a/helix-core/src/diagnostic.rs b/helix-core/src/diagnostic.rs index 58ddb0383a0a..0b75d2a586f1 100644 --- a/helix-core/src/diagnostic.rs +++ b/helix-core/src/diagnostic.rs @@ -43,6 +43,7 @@ pub struct Diagnostic { pub message: String, pub severity: Option, pub code: Option, + pub language_server_id: usize, pub tags: Vec, pub source: Option, pub data: Option, diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index f36c985e3485..ff4bb6c27b50 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -17,7 +17,7 @@ use std::{ borrow::Cow, cell::RefCell, collections::{HashMap, VecDeque}, - fmt, + fmt::{self, Display}, hash::{Hash, Hasher}, mem::{replace, transmute}, path::{Path, PathBuf}, @@ -60,8 +60,11 @@ fn default_timeout() -> u64 { } #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct Configuration { pub language: Vec, + #[serde(default)] + pub language_server: HashMap, } impl Default for Configuration { @@ -75,7 +78,10 @@ impl Default for Configuration { #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct LanguageConfiguration { #[serde(rename = "name")] - pub language_id: String, // c-sharp, rust + pub language_id: String, // c-sharp, rust, tsx + #[serde(rename = "language-id")] + // see the table under https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem + pub language_server_language_id: Option, // csharp, rust, typescriptreact, for the language-server pub scope: String, // source.rust pub file_types: Vec, // filename extension or ends_with? #[serde(default)] @@ -85,9 +91,6 @@ pub struct LanguageConfiguration { pub text_width: Option, pub soft_wrap: Option, - #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] - pub config: Option, - #[serde(default)] pub auto_format: bool, @@ -107,8 +110,8 @@ pub struct LanguageConfiguration { #[serde(skip)] pub(crate) highlight_config: OnceCell>>, // tags_config OnceCell<> https://github.com/tree-sitter/tree-sitter/pull/583 - #[serde(skip_serializing_if = "Option::is_none")] - pub language_server: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub language_servers: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub indent: Option, @@ -208,6 +211,68 @@ impl<'de> Deserialize<'de> for FileType { } } +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub enum LanguageServerFeature { + Format, + GotoDeclaration, + GotoDefinition, + GotoTypeDefinition, + GotoReference, + GotoImplementation, + // Goto, use bitflags, combining previous Goto members? + SignatureHelp, + Hover, + DocumentHighlight, + Completion, + CodeAction, + WorkspaceCommand, + DocumentSymbols, + WorkspaceSymbols, + // Symbols, use bitflags, see above? + Diagnostics, + RenameSymbol, + InlayHints, +} + +impl Display for LanguageServerFeature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + LanguageServerFeature::Format => write!(f, "format"), + LanguageServerFeature::GotoDeclaration => write!(f, "goto-declaration"), + LanguageServerFeature::GotoDefinition => write!(f, "goto-definition"), + LanguageServerFeature::GotoTypeDefinition => write!(f, "goto-type-definition"), + LanguageServerFeature::GotoReference => write!(f, "goto-type-definition"), + LanguageServerFeature::GotoImplementation => write!(f, "goto-implementation"), + LanguageServerFeature::SignatureHelp => write!(f, "signature-help"), + LanguageServerFeature::Hover => write!(f, "hover"), + LanguageServerFeature::DocumentHighlight => write!(f, "document-highlight"), + LanguageServerFeature::Completion => write!(f, "completion"), + LanguageServerFeature::CodeAction => write!(f, "code-action"), + LanguageServerFeature::WorkspaceCommand => write!(f, "workspace-command"), + LanguageServerFeature::DocumentSymbols => write!(f, "document-symbols"), + LanguageServerFeature::WorkspaceSymbols => write!(f, "workspace-symbols"), + LanguageServerFeature::Diagnostics => write!(f, "diagnostics"), + LanguageServerFeature::RenameSymbol => write!(f, "rename-symbol"), + LanguageServerFeature::InlayHints => write!(f, "inlay-hints"), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)] +pub enum LanguageServerFeatureConfiguration { + #[serde(rename_all = "kebab-case")] + Features { + #[serde(default, skip_serializing_if = "Vec::is_empty")] + only_features: Vec, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + except_features: Vec, + name: String, + }, + Simple(String), +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct LanguageServerConfiguration { @@ -217,9 +282,10 @@ pub struct LanguageServerConfiguration { pub args: Vec, #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub environment: HashMap, + #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] + pub config: Option, #[serde(default = "default_timeout")] pub timeout: u64, - pub language_id: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -584,6 +650,15 @@ pub struct SoftWrap { pub wrap_at_text_width: Option, } +impl LanguageServerFeatureConfiguration { + pub fn name(&self) -> &String { + match self { + LanguageServerFeatureConfiguration::Simple(name) => name, + LanguageServerFeatureConfiguration::Features { name, .. } => name, + } + } +} + // Expose loader as Lazy<> global since it's always static? #[derive(Debug)] @@ -594,6 +669,8 @@ pub struct Loader { language_config_ids_by_suffix: HashMap, language_config_ids_by_shebang: HashMap, + language_server_configs: HashMap, + scopes: ArcSwap>, } @@ -601,6 +678,7 @@ impl Loader { pub fn new(config: Configuration) -> Self { let mut loader = Self { language_configs: Vec::new(), + language_server_configs: config.language_server, language_config_ids_by_extension: HashMap::new(), language_config_ids_by_suffix: HashMap::new(), language_config_ids_by_shebang: HashMap::new(), @@ -725,6 +803,10 @@ impl Loader { self.language_configs.iter() } + pub fn language_server_configs(&self) -> &HashMap { + &self.language_server_configs + } + pub fn set_scopes(&self, scopes: Vec) { self.scopes.store(Arc::new(scopes)); @@ -2370,7 +2452,10 @@ mod test { "#, ); - let loader = Loader::new(Configuration { language: vec![] }); + let loader = Loader::new(Configuration { + language: vec![], + language_server: HashMap::new(), + }); let language = get_language("rust").unwrap(); let query = Query::new(language, query_str).unwrap(); @@ -2429,7 +2514,10 @@ mod test { .map(String::from) .collect(); - let loader = Loader::new(Configuration { language: vec![] }); + let loader = Loader::new(Configuration { + language: vec![], + language_server: HashMap::new(), + }); let language = get_language("rust").unwrap(); let config = HighlightConfiguration::new( @@ -2532,7 +2620,10 @@ mod test { ) { let source = Rope::from_str(source); - let loader = Loader::new(Configuration { language: vec![] }); + let loader = Loader::new(Configuration { + language: vec![], + language_server: HashMap::new(), + }); let language = get_language(language_name).unwrap(); let config = HighlightConfiguration::new(language, "", "", "").unwrap(); diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 840e73828dc7..c0f3adb8211f 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -44,6 +44,7 @@ fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder { #[derive(Debug)] pub struct Client { id: usize, + name: String, _process: Child, server_tx: UnboundedSender, request_counter: AtomicU64, @@ -166,8 +167,7 @@ impl Client { tokio::spawn(self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new())); } - #[allow(clippy::type_complexity)] - #[allow(clippy::too_many_arguments)] + #[allow(clippy::type_complexity, clippy::too_many_arguments)] pub fn start( cmd: &str, args: &[String], @@ -176,6 +176,7 @@ impl Client { root_markers: &[String], manual_roots: &[PathBuf], id: usize, + name: String, req_timeout: u64, doc_path: Option<&std::path::PathBuf>, ) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc)> { @@ -200,7 +201,7 @@ impl Client { let stderr = BufReader::new(process.stderr.take().expect("Failed to open stderr")); let (server_rx, server_tx, initialize_notify) = - Transport::start(reader, writer, stderr, id); + Transport::start(reader, writer, stderr, id, name.clone()); let (workspace, workspace_is_cwd) = find_workspace(); let workspace = path::get_normalized_path(&workspace); let root = find_lsp_workspace( @@ -225,6 +226,7 @@ impl Client { let client = Self { id, + name, _process: process, server_tx, request_counter: AtomicU64::new(0), @@ -240,6 +242,10 @@ impl Client { Ok((client, server_rx, initialize_notify)) } + pub fn name(&self) -> &String { + &self.name + } + pub fn id(&self) -> usize { self.id } diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index 31ee1d75cb7d..12e63255adb2 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -17,19 +17,16 @@ use helix_core::{ use tokio::sync::mpsc::UnboundedReceiver; use std::{ - collections::{hash_map::Entry, HashMap}, + collections::HashMap, path::{Path, PathBuf}, - sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, - }, + sync::Arc, }; use thiserror::Error; use tokio_stream::wrappers::UnboundedReceiverStream; pub type Result = core::result::Result; -type LanguageId = String; +type LanguageServerName = String; #[derive(Error, Debug)] pub enum Error { @@ -49,7 +46,7 @@ pub enum Error { Other(#[from] anyhow::Error), } -#[derive(Clone, Copy, Debug, Default)] +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum OffsetEncoding { /// UTF-8 code units aka bytes Utf8, @@ -624,23 +621,18 @@ impl Notification { #[derive(Debug)] pub struct Registry { - inner: HashMap)>>, - - counter: AtomicUsize, + inner: HashMap>>, + syn_loader: Arc, + counter: usize, pub incoming: SelectAll>, } -impl Default for Registry { - fn default() -> Self { - Self::new() - } -} - impl Registry { - pub fn new() -> Self { + pub fn new(syn_loader: Arc) -> Self { Self { inner: HashMap::new(), - counter: AtomicUsize::new(0), + syn_loader, + counter: 0, incoming: SelectAll::new(), } } @@ -649,15 +641,43 @@ impl Registry { self.inner .values() .flatten() - .find(|(client_id, _)| client_id == &id) - .map(|(_, client)| client.as_ref()) + .find(|client| client.id() == id) + .map(|client| &**client) } pub fn remove_by_id(&mut self, id: usize) { - self.inner.retain(|_, clients| { - clients.retain(|&(client_id, _)| client_id != id); - !clients.is_empty() - }) + self.inner.retain(|_, language_servers| { + language_servers.retain(|ls| id != ls.id()); + !language_servers.is_empty() + }); + } + + fn start_client( + &mut self, + name: String, + ls_config: &LanguageConfiguration, + doc_path: Option<&std::path::PathBuf>, + root_dirs: &[PathBuf], + enable_snippets: bool, + ) -> Result> { + let config = self + .syn_loader + .language_server_configs() + .get(&name) + .ok_or_else(|| anyhow::anyhow!("Language server '{name}' not defined"))?; + self.counter += 1; + let id = self.counter; + let NewClient(client, incoming) = start_client( + id, + name, + ls_config, + config, + doc_path, + root_dirs, + enable_snippets, + )?; + self.incoming.push(UnboundedReceiverStream::new(incoming)); + Ok(client) } pub fn restart( @@ -666,48 +686,46 @@ impl Registry { doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, - ) -> Result>> { - let config = match &language_config.language_server { - Some(config) => config, - None => return Ok(None), - }; - - let scope = language_config.scope.clone(); - - match self.inner.entry(scope) { - Entry::Vacant(_) => Ok(None), - Entry::Occupied(mut entry) => { - // initialize a new client - let id = self.counter.fetch_add(1, Ordering::Relaxed); - - let NewClientResult(client, incoming) = start_client( - id, - language_config, - config, - doc_path, - root_dirs, - enable_snippets, - )?; - self.incoming.push(UnboundedReceiverStream::new(incoming)); + ) -> Result>> { + language_config + .language_servers + .iter() + .filter_map(|config| { + let name = config.name().clone(); + + #[allow(clippy::map_entry)] + if self.inner.contains_key(&name) { + let client = match self.start_client( + name.clone(), + language_config, + doc_path, + root_dirs, + enable_snippets, + ) { + Ok(client) => client, + error => return Some(error), + }; + let old_clients = self.inner.insert(name, vec![client.clone()]).unwrap(); - let old_clients = entry.insert(vec![(id, client.clone())]); + // TODO what if there are different language servers for different workspaces, + // I think the language servers will be stopped without being restarted, which is not intended + for old_client in old_clients { + tokio::spawn(async move { + let _ = old_client.force_shutdown().await; + }); + } - for (_, old_client) in old_clients { - tokio::spawn(async move { - let _ = old_client.force_shutdown().await; - }); + Some(Ok(client)) + } else { + None } - - Ok(Some(client)) - } - } + }) + .collect() } - pub fn stop(&mut self, language_config: &LanguageConfiguration) { - let scope = language_config.scope.clone(); - - if let Some(clients) = self.inner.remove(&scope) { - for (_, client) in clients { + pub fn stop(&mut self, name: &str) { + if let Some(clients) = self.inner.remove(name) { + for client in clients { tokio::spawn(async move { let _ = client.force_shutdown().await; }); @@ -721,37 +739,35 @@ impl Registry { doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, - ) -> Result>> { - let config = match &language_config.language_server { - Some(config) => config, - None => return Ok(None), - }; - - let clients = self.inner.entry(language_config.scope.clone()).or_default(); - // check if we already have a client for this documents root that we can reuse - if let Some((_, client)) = clients.iter_mut().enumerate().find(|(i, (_, client))| { - client.try_add_doc(&language_config.roots, root_dirs, doc_path, *i == 0) - }) { - return Ok(Some(client.1.clone())); - } - // initialize a new client - let id = self.counter.fetch_add(1, Ordering::Relaxed); - - let NewClientResult(client, incoming) = start_client( - id, - language_config, - config, - doc_path, - root_dirs, - enable_snippets, - )?; - clients.push((id, client.clone())); - self.incoming.push(UnboundedReceiverStream::new(incoming)); - Ok(Some(client)) + ) -> Result>> { + language_config + .language_servers + .iter() + .map(|features| { + let name = features.name(); + if let Some(clients) = self.inner.get_mut(name) { + if let Some((_, client)) = clients.iter_mut().enumerate().find(|(i, client)| { + client.try_add_doc(&language_config.roots, root_dirs, doc_path, *i == 0) + }) { + return Ok(client.clone()); + } + } + let client = self.start_client( + name.clone(), + language_config, + doc_path, + root_dirs, + enable_snippets, + )?; + let clients = self.inner.entry(features.name().clone()).or_default(); + clients.push(client.clone()); + Ok(client) + }) + .collect() } pub fn iter_clients(&self) -> impl Iterator> { - self.inner.values().flatten().map(|(_, client)| client) + self.inner.values().flatten() } } @@ -833,26 +849,28 @@ impl LspProgressMap { } } -struct NewClientResult(Arc, UnboundedReceiver<(usize, Call)>); +struct NewClient(Arc, UnboundedReceiver<(usize, Call)>); /// start_client takes both a LanguageConfiguration and a LanguageServerConfiguration to ensure that /// it is only called when it makes sense. fn start_client( id: usize, + name: String, config: &LanguageConfiguration, ls_config: &LanguageServerConfiguration, doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, -) -> Result { +) -> Result { let (client, incoming, initialize_notify) = Client::start( &ls_config.command, &ls_config.args, - config.config.clone(), + ls_config.config.clone(), ls_config.environment.clone(), &config.roots, config.workspace_lsp_roots.as_deref().unwrap_or(root_dirs), id, + name, ls_config.timeout, doc_path, )?; @@ -886,7 +904,7 @@ fn start_client( initialize_notify.notify_one(); }); - Ok(NewClientResult(client, incoming)) + Ok(NewClient(client, incoming)) } /// Find an LSP workspace of a file using the following mechanism: diff --git a/helix-lsp/src/transport.rs b/helix-lsp/src/transport.rs index 3e3e06eec4c5..8c38c1773c19 100644 --- a/helix-lsp/src/transport.rs +++ b/helix-lsp/src/transport.rs @@ -38,6 +38,7 @@ enum ServerMessage { #[derive(Debug)] pub struct Transport { id: usize, + name: String, pending_requests: Mutex>>>, } @@ -47,6 +48,7 @@ impl Transport { server_stdin: BufWriter, server_stderr: BufReader, id: usize, + name: String, ) -> ( UnboundedReceiver<(usize, jsonrpc::Call)>, UnboundedSender, @@ -58,6 +60,7 @@ impl Transport { let transport = Self { id, + name, pending_requests: Mutex::new(HashMap::default()), }; @@ -83,6 +86,7 @@ impl Transport { async fn recv_server_message( reader: &mut (impl AsyncBufRead + Unpin + Send), buffer: &mut String, + language_server_name: &str, ) -> Result { let mut content_length = None; loop { @@ -124,7 +128,7 @@ impl Transport { reader.read_exact(&mut content).await?; let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?; - info!("<- {}", msg); + info!("{language_server_name} <- {msg}"); // try parsing as output (server response) or call (server request) let output: serde_json::Result = serde_json::from_str(msg); @@ -135,12 +139,13 @@ impl Transport { async fn recv_server_error( err: &mut (impl AsyncBufRead + Unpin + Send), buffer: &mut String, + language_server_name: &str, ) -> Result<()> { buffer.truncate(0); if err.read_line(buffer).await? == 0 { return Err(Error::StreamClosed); }; - error!("err <- {:?}", buffer); + error!("{language_server_name} err <- {buffer:?}"); Ok(()) } @@ -162,15 +167,17 @@ impl Transport { Payload::Notification(value) => serde_json::to_string(&value)?, Payload::Response(error) => serde_json::to_string(&error)?, }; - self.send_string_to_server(server_stdin, json).await + self.send_string_to_server(server_stdin, json, &self.name) + .await } async fn send_string_to_server( &self, server_stdin: &mut BufWriter, request: String, + language_server_name: &str, ) -> Result<()> { - info!("-> {}", request); + info!("{language_server_name} -> {request}"); // send the headers server_stdin @@ -189,9 +196,13 @@ impl Transport { &self, client_tx: &UnboundedSender<(usize, jsonrpc::Call)>, msg: ServerMessage, + language_server_name: &str, ) -> Result<()> { match msg { - ServerMessage::Output(output) => self.process_request_response(output).await?, + ServerMessage::Output(output) => { + self.process_request_response(output, language_server_name) + .await? + } ServerMessage::Call(call) => { client_tx .send((self.id, call)) @@ -202,14 +213,18 @@ impl Transport { Ok(()) } - async fn process_request_response(&self, output: jsonrpc::Output) -> Result<()> { + async fn process_request_response( + &self, + output: jsonrpc::Output, + language_server_name: &str, + ) -> Result<()> { let (id, result) = match output { jsonrpc::Output::Success(jsonrpc::Success { id, result, .. }) => { - info!("<- {}", result); + info!("{language_server_name} <- {}", result); (id, Ok(result)) } jsonrpc::Output::Failure(jsonrpc::Failure { id, error, .. }) => { - error!("<- {}", error); + error!("{language_server_name} <- {error}"); (id, Err(error.into())) } }; @@ -240,12 +255,17 @@ impl Transport { ) { let mut recv_buffer = String::new(); loop { - match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await { + match Self::recv_server_message(&mut server_stdout, &mut recv_buffer, &transport.name) + .await + { Ok(msg) => { - match transport.process_server_message(&client_tx, msg).await { + match transport + .process_server_message(&client_tx, msg, &transport.name) + .await + { Ok(_) => {} Err(err) => { - error!("err: <- {:?}", err); + error!("{} err: <- {err:?}", transport.name); break; } }; @@ -270,7 +290,7 @@ impl Transport { params: jsonrpc::Params::None, })); match transport - .process_server_message(&client_tx, notification) + .process_server_message(&client_tx, notification, &transport.name) .await { Ok(_) => {} @@ -281,20 +301,22 @@ impl Transport { break; } Err(err) => { - error!("err: <- {:?}", err); + error!("{} err: <- {err:?}", transport.name); break; } } } } - async fn err(_transport: Arc, mut server_stderr: BufReader) { + async fn err(transport: Arc, mut server_stderr: BufReader) { let mut recv_buffer = String::new(); loop { - match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await { + match Self::recv_server_error(&mut server_stderr, &mut recv_buffer, &transport.name) + .await + { Ok(_) => {} Err(err) => { - error!("err: <- {:?}", err); + error!("{} err: <- {err:?}", transport.name); break; } } @@ -348,10 +370,11 @@ impl Transport { method: lsp_types::notification::Initialized::METHOD.to_string(), params: jsonrpc::Params::None, })); - match transport.process_server_message(&client_tx, notification).await { + let language_server_name = &transport.name; + match transport.process_server_message(&client_tx, notification, language_server_name).await { Ok(_) => {} Err(err) => { - error!("err: <- {:?}", err); + error!("{language_server_name} err: <- {err:?}"); } } @@ -361,7 +384,7 @@ impl Transport { match transport.send_payload_to_server(&mut server_stdin, msg).await { Ok(_) => {} Err(err) => { - error!("err: <- {:?}", err); + error!("{language_server_name} err: <- {err:?}"); } } } @@ -380,7 +403,7 @@ impl Transport { match transport.send_payload_to_server(&mut server_stdin, msg).await { Ok(_) => {} Err(err) => { - error!("err: <- {:?}", err); + error!("{} err: <- {err:?}", transport.name); } } } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index b54d6835a5af..45f99e48d275 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -30,6 +30,7 @@ use crate::{ use log::{debug, error, warn}; use std::{ + collections::btree_map::Entry, io::{stdin, stdout}, path::Path, sync::Arc, @@ -564,7 +565,7 @@ impl Application { let doc = doc_mut!(self.editor, &doc_save_event.doc_id); let id = doc.id(); doc.detect_language(loader); - let _ = self.editor.refresh_language_server(id); + self.editor.refresh_language_servers(id); } // TODO: fix being overwritten by lsp @@ -662,6 +663,18 @@ impl Application { ) { use helix_lsp::{Call, MethodCall, Notification}; + macro_rules! language_server { + () => { + match self.editor.language_servers.get_by_id(server_id) { + Some(language_server) => language_server, + None => { + warn!("can't find language server with id `{}`", server_id); + return; + } + } + }; + } + match call { Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => { let notification = match Notification::parse(&method, params) { @@ -677,14 +690,7 @@ impl Application { match notification { Notification::Initialized => { - let language_server = - match self.editor.language_servers.get_by_id(server_id) { - Some(language_server) => language_server, - None => { - warn!("can't find language server with id `{}`", server_id); - return; - } - }; + let language_server = language_server!(); // Trigger a workspace/didChangeConfiguration notification after initialization. // This might not be required by the spec but Neovim does this as well, so it's @@ -694,7 +700,7 @@ impl Application { } let docs = self.editor.documents().filter(|doc| { - doc.language_server().map(|server| server.id()) == Some(server_id) + doc.language_servers().iter().any(|l| l.id() == server_id) }); // trigger textDocument/didOpen for docs that are already open @@ -723,6 +729,7 @@ impl Application { return; } }; + let offset_encoding = language_server!().offset_encoding(); let doc = self.editor.document_by_path_mut(&path).filter(|doc| { if let Some(version) = params.version { if version != doc.version() { @@ -745,18 +752,11 @@ impl Application { use helix_core::diagnostic::{Diagnostic, Range, Severity::*}; use lsp::DiagnosticSeverity; - let language_server = if let Some(language_server) = doc.language_server() { - language_server - } else { - log::warn!("Discarding diagnostic because language server is not initialized: {:?}", diagnostic); - return None; - }; - // TODO: convert inside server let start = if let Some(start) = lsp_pos_to_pos( text, diagnostic.range.start, - language_server.offset_encoding(), + offset_encoding, ) { start } else { @@ -764,11 +764,9 @@ impl Application { return None; }; - let end = if let Some(end) = lsp_pos_to_pos( - text, - diagnostic.range.end, - language_server.offset_encoding(), - ) { + let end = if let Some(end) = + lsp_pos_to_pos(text, diagnostic.range.end, offset_encoding) + { end } else { log::warn!("lsp position out of bounds - {:?}", diagnostic); @@ -807,14 +805,19 @@ impl Application { None => None, }; - let tags = if let Some(ref tags) = diagnostic.tags { - let new_tags = tags.iter().filter_map(|tag| { - match *tag { - lsp::DiagnosticTag::DEPRECATED => Some(DiagnosticTag::Deprecated), - lsp::DiagnosticTag::UNNECESSARY => Some(DiagnosticTag::Unnecessary), - _ => None - } - }).collect(); + let tags = if let Some(tags) = &diagnostic.tags { + let new_tags = tags + .iter() + .filter_map(|tag| match *tag { + lsp::DiagnosticTag::DEPRECATED => { + Some(DiagnosticTag::Deprecated) + } + lsp::DiagnosticTag::UNNECESSARY => { + Some(DiagnosticTag::Unnecessary) + } + _ => None, + }) + .collect(); new_tags } else { @@ -830,11 +833,12 @@ impl Application { tags, source: diagnostic.source.clone(), data: diagnostic.data.clone(), + language_server_id: server_id, }) }) .collect(); - doc.set_diagnostics(diagnostics); + doc.replace_diagnostics(diagnostics, server_id); } // Sort diagnostics first by severity and then by line numbers. @@ -842,13 +846,26 @@ impl Application { params .diagnostics .sort_unstable_by_key(|d| (d.severity, d.range.start)); + let diagnostics = params + .diagnostics + .into_iter() + .map(|d| (d, server_id, offset_encoding)) + .collect(); // Insert the original lsp::Diagnostics here because we may have no open document // for diagnosic message and so we can't calculate the exact position. // When using them later in the diagnostics picker, we calculate them on-demand. - self.editor - .diagnostics - .insert(params.uri, params.diagnostics); + match self.editor.diagnostics.entry(params.uri) { + Entry::Occupied(o) => { + let current_diagnostics = o.into_mut(); + // there may entries of other language servers, which is why we can't overwrite the whole entry + current_diagnostics.retain(|(_, lsp_id, _)| *lsp_id != server_id); + current_diagnostics.extend(diagnostics); + } + Entry::Vacant(v) => { + v.insert(diagnostics); + } + }; } Notification::ShowMessage(params) => { log::warn!("unhandled window/showMessage: {:?}", params); @@ -950,10 +967,12 @@ impl Application { .editor .documents_mut() .filter_map(|doc| { - if doc.language_server().map(|server| server.id()) - == Some(server_id) + if doc + .language_servers() + .iter() + .any(|server| server.id() == server_id) { - doc.set_diagnostics(Vec::new()); + doc.clear_diagnostics(server_id); doc.url() } else { None @@ -1029,28 +1048,15 @@ impl Application { })) } Ok(MethodCall::WorkspaceFolders) => { - let language_server = - self.editor.language_servers.get_by_id(server_id).unwrap(); - - Ok(json!(&*language_server.workspace_folders().await)) + Ok(json!(&*language_server!().workspace_folders().await)) } Ok(MethodCall::WorkspaceConfiguration(params)) => { + let language_server = language_server!(); let result: Vec<_> = params .items .iter() - .map(|item| { - let mut config = match &item.scope_uri { - Some(scope) => { - let path = scope.to_file_path().ok()?; - let doc = self.editor.document_by_path(path)?; - doc.language_config()?.config.as_ref()? - } - None => self - .editor - .language_servers - .get_by_id(server_id)? - .config()?, - }; + .filter_map(|item| { + let mut config = language_server.config()?; if let Some(section) = item.section.as_ref() { for part in section.split('.') { config = config.get(part)?; @@ -1074,15 +1080,7 @@ impl Application { } }; - let language_server = match self.editor.language_servers.get_by_id(server_id) { - Some(language_server) => language_server, - None => { - warn!("can't find language server with id `{}`", server_id); - return; - } - }; - - tokio::spawn(language_server.reply(id, reply)); + tokio::spawn(language_server!().reply(id, reply)); } Call::Invalid { id } => log::error!("LSP invalid method call id={:?}", id), } diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 5a844e35152e..c7d28e1987c5 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -23,6 +23,7 @@ use helix_core::{ regex::{self, Regex, RegexBuilder}, search::{self, CharMatcher}, selection, shellwords, surround, + syntax::LanguageServerFeature, text_annotations::TextAnnotations, textobject, tree_sitter::Node, @@ -54,13 +55,13 @@ use crate::{ job::Callback, keymap::ReverseKeymap, ui::{ - self, editor::InsertEvent, lsp::SignatureHelp, overlay::overlaid, FilePicker, Picker, - Popup, Prompt, PromptEvent, + self, editor::InsertEvent, lsp::SignatureHelp, overlay::overlaid, CompletionItem, + FilePicker, Picker, Popup, Prompt, PromptEvent, }, }; use crate::job::{self, Jobs}; -use futures_util::StreamExt; +use futures_util::{stream::FuturesUnordered, StreamExt, TryStreamExt}; use std::{collections::HashMap, fmt, future::Future}; use std::{collections::HashSet, num::NonZeroUsize}; @@ -3029,7 +3030,7 @@ fn exit_select_mode(cx: &mut Context) { fn goto_first_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let selection = match doc.diagnostics().first() { + let selection = match doc.shown_diagnostics().next() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; @@ -3038,7 +3039,7 @@ fn goto_first_diag(cx: &mut Context) { fn goto_last_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let selection = match doc.diagnostics().last() { + let selection = match doc.shown_diagnostics().last() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; @@ -3054,10 +3055,9 @@ fn goto_next_diag(cx: &mut Context) { .cursor(doc.text().slice(..)); let diag = doc - .diagnostics() - .iter() + .shown_diagnostics() .find(|diag| diag.range.start > cursor_pos) - .or_else(|| doc.diagnostics().first()); + .or_else(|| doc.shown_diagnostics().next()); let selection = match diag { Some(diag) => Selection::single(diag.range.start, diag.range.end), @@ -3075,11 +3075,12 @@ fn goto_prev_diag(cx: &mut Context) { .cursor(doc.text().slice(..)); let diag = doc - .diagnostics() - .iter() + .shown_diagnostics() + .collect::>() + .into_iter() .rev() .find(|diag| diag.range.start < cursor_pos) - .or_else(|| doc.diagnostics().last()); + .or_else(|| doc.shown_diagnostics().last()); let selection = match diag { // NOTE: the selection is reversed because we're jumping to the @@ -3234,60 +3235,72 @@ pub mod insert { use helix_lsp::lsp; // if ch matches completion char, trigger completion let doc = doc_mut!(cx.editor); - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => return, - }; + let trigger_completion = doc + .language_servers_with_feature(LanguageServerFeature::Completion) + .iter() + .any(|ls| { + let capabilities = ls.capabilities(); - let capabilities = language_server.capabilities(); + // TODO: what if trigger is multiple chars long + matches!(&capabilities.completion_provider, Some(lsp::CompletionOptions { + trigger_characters: Some(triggers), + .. + }) if triggers.iter().any(|trigger| trigger.contains(ch))) + }); - if let Some(lsp::CompletionOptions { - trigger_characters: Some(triggers), - .. - }) = &capabilities.completion_provider - { - // TODO: what if trigger is multiple chars long - if triggers.iter().any(|trigger| trigger.contains(ch)) { - cx.editor.clear_idle_timer(); - super::completion(cx); - } + if trigger_completion { + cx.editor.clear_idle_timer(); + super::completion(cx); } } fn signature_help(cx: &mut Context, ch: char) { + use futures_util::FutureExt; use helix_lsp::lsp; // if ch matches signature_help char, trigger - let doc = doc_mut!(cx.editor); - // The language_server!() macro is not used here since it will - // print an "LSP not active for current buffer" message on - // every keypress. - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => return, - }; - - let capabilities = language_server.capabilities(); + let (view, doc) = current!(cx.editor); + // lsp doesn't tell us when to close the signature help, so we request + // the help information again after common close triggers which should + // return None, which in turn closes the popup. + let close_triggers = &[')', ';', '.']; + // TODO support multiple language servers (not just the first that is found) + let future = doc + .language_servers_with_feature(LanguageServerFeature::SignatureHelp) + .iter() + .find_map(|ls| { + let capabilities = ls.capabilities(); + + match capabilities { + lsp::ServerCapabilities { + signature_help_provider: + Some(lsp::SignatureHelpOptions { + trigger_characters: Some(triggers), + // TODO: retrigger_characters + .. + }), + .. + } if triggers.iter().any(|trigger| trigger.contains(ch)) + || close_triggers.contains(&ch) => + { + let pos = doc.position(view.id, ls.offset_encoding()); + ls.text_document_signature_help(doc.identifier(), pos, None) + } + _ if close_triggers.contains(&ch) => ls.text_document_signature_help( + doc.identifier(), + doc.position(view.id, ls.offset_encoding()), + None, + ), + // TODO: what if trigger is multiple chars long + _ => None, + } + }); - if let lsp::ServerCapabilities { - signature_help_provider: - Some(lsp::SignatureHelpOptions { - trigger_characters: Some(triggers), - // TODO: retrigger_characters - .. - }), - .. - } = capabilities - { - // TODO: what if trigger is multiple chars long - let is_trigger = triggers.iter().any(|trigger| trigger.contains(ch)); - // lsp doesn't tell us when to close the signature help, so we request - // the help information again after common close triggers which should - // return None, which in turn closes the popup. - let close_triggers = &[')', ';', '.']; - - if is_trigger || close_triggers.contains(&ch) { - super::signature_help_impl(cx, SignatureHelpInvoked::Automatic); - } + if let Some(future) = future { + super::signature_help_impl_with_future( + cx, + future.boxed(), + SignatureHelpInvoked::Automatic, + ) } } @@ -3301,7 +3314,7 @@ pub mod insert { Some(transaction) } - use helix_core::auto_pairs; + use helix_core::{auto_pairs, syntax::LanguageServerFeature}; pub fn insert_char(cx: &mut Context, c: char) { let (view, doc) = current_ref!(cx.editor); @@ -4046,55 +4059,55 @@ fn format_selections(cx: &mut Context) { use helix_lsp::{lsp, util::range_to_lsp_range}; let (view, doc) = current!(cx.editor); + let view_id = view.id; // via lsp if available // TODO: else via tree-sitter indentation calculations - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => return, - }; - - let ranges: Vec = doc - .selection(view.id) - .iter() - .map(|range| range_to_lsp_range(doc.text(), *range, language_server.offset_encoding())) - .collect(); - - if ranges.len() != 1 { + if doc.selection(view_id).len() != 1 { cx.editor .set_error("format_selections only supports a single selection for now"); return; } - // TODO: handle fails - // TODO: concurrent map over all ranges - - let range = ranges[0]; - - let request = match language_server.text_document_range_formatting( - doc.identifier(), - range, - lsp::FormattingOptions::default(), - None, - ) { - Some(future) => future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::Format) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let ranges: Vec = doc + .selection(view_id) + .iter() + .map(|range| range_to_lsp_range(doc.text(), *range, offset_encoding)) + .collect(); + + // TODO: handle fails + // TODO: concurrent map over all ranges + + let range = ranges[0]; + + let future = language_server.text_document_range_formatting( + doc.identifier(), + range, + lsp::FormattingOptions::default(), + None, + )?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support range formatting"); + .set_error("No language server supports range formatting"); return; } }; - let edits = tokio::task::block_in_place(|| helix_lsp::block_on(request)).unwrap_or_default(); + let edits = tokio::task::block_in_place(|| helix_lsp::block_on(future)).unwrap_or_default(); - let transaction = helix_lsp::util::generate_transaction_from_edits( - doc.text(), - edits, - language_server.offset_encoding(), - ); + let transaction = + helix_lsp::util::generate_transaction_from_edits(doc.text(), edits, offset_encoding); - doc.apply(&transaction, view.id); + doc.apply(&transaction, view_id); } fn join_selections_impl(cx: &mut Context, select_space: bool) { @@ -4231,21 +4244,45 @@ pub fn completion(cx: &mut Context) { doc.savepoint(view) }; - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => return, - }; - - let offset_encoding = language_server.offset_encoding(); let text = savepoint.text.clone(); let cursor = savepoint.cursor(); - let pos = pos_to_lsp_pos(&text, cursor, offset_encoding); + let mut futures: FuturesUnordered<_> = doc + .language_servers_with_feature(LanguageServerFeature::Completion) + .iter() + // TODO this should probably already been filtered in something like "language_servers_with_feature" + .filter_map(|language_server| { + let language_server_id = language_server.id(); + let offset_encoding = language_server.offset_encoding(); + let pos = pos_to_lsp_pos(doc.text(), cursor, helix_lsp::OffsetEncoding::Utf8); + let completion_request = language_server.completion(doc.identifier(), pos, None)?; + + Some(async move { + let json = completion_request.await?; + let response: Option = serde_json::from_value(json)?; + + let items = match response { + Some(lsp::CompletionResponse::Array(items)) => items, + // TODO: do something with is_incomplete + Some(lsp::CompletionResponse::List(lsp::CompletionList { + is_incomplete: _is_incomplete, + items, + })) => items, + None => Vec::new(), + } + .into_iter() + .map(|item| CompletionItem { + item, + language_server_id, + offset_encoding, + resolved: false, + }) + .collect(); - let future = match language_server.completion(doc.identifier(), pos, None) { - Some(future) => future, - None => return, - }; + anyhow::Ok(items) + }) + }) + .collect(); // setup a channel that allows the request to be canceled let (tx, rx) = oneshot::channel(); @@ -4254,12 +4291,20 @@ pub fn completion(cx: &mut Context) { // and the associated request is automatically dropped cx.editor.completion_request_handle = Some(tx); let future = async move { + let items_future = async move { + let mut items = Vec::new(); + // TODO if one completion request errors, all other completion requests are discarded (even if they're valid) + while let Some(mut lsp_items) = futures.try_next().await? { + items.append(&mut lsp_items); + } + anyhow::Ok(items) + }; tokio::select! { biased; _ = rx => { - Ok(serde_json::Value::Null) + Ok(Vec::new()) } - res = future => { + res = items_future => { res } } @@ -4293,9 +4338,9 @@ pub fn completion(cx: &mut Context) { }, )); - cx.callback( - future, - move |editor, compositor, response: Option| { + cx.jobs.callback(async move { + let items = future.await?; + let call = move |editor: &mut Editor, compositor: &mut Compositor| { let (view, doc) = current_ref!(editor); // check if the completion request is stale. // @@ -4306,16 +4351,6 @@ pub fn completion(cx: &mut Context) { return; } - let items = match response { - Some(lsp::CompletionResponse::Array(items)) => items, - // TODO: do something with is_incomplete - Some(lsp::CompletionResponse::List(lsp::CompletionList { - is_incomplete: _is_incomplete, - items, - })) => items, - None => Vec::new(), - }; - if items.is_empty() { // editor.set_error("No completion available"); return; @@ -4326,7 +4361,6 @@ pub fn completion(cx: &mut Context) { editor, savepoint, items, - offset_encoding, start_offset, trigger_offset, size, @@ -4340,8 +4374,9 @@ pub fn completion(cx: &mut Context) { { compositor.remove(SignatureHelp::ID); } - }, - ); + }; + Ok(Callback::EditorCompositor(Box::new(call))) + }); } // comments @@ -5141,7 +5176,7 @@ async fn shell_impl_async( helix_view::document::to_writer(&mut stdin, (encoding::UTF_8, false), &input) .await?; } - Ok::<_, anyhow::Error>(()) + anyhow::Ok(()) }); let (output, _) = tokio::join! { process.wait_with_output(), diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 0ad6fb7eb4b4..efef1211c4f6 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -1,4 +1,4 @@ -use futures_util::FutureExt; +use futures_util::{future::BoxFuture, stream::FuturesUnordered, FutureExt}; use helix_lsp::{ block_on, lsp::{ @@ -8,6 +8,8 @@ use helix_lsp::{ util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range}, OffsetEncoding, }; +use serde_json::Value; +use tokio_stream::StreamExt; use tui::{ text::{Span, Spans}, widgets::Row, @@ -15,7 +17,9 @@ use tui::{ use super::{align_view, push_jump, Align, Context, Editor, Open}; -use helix_core::{path, text_annotations::InlineAnnotation, Selection}; +use helix_core::{ + path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, +}; use helix_view::{ document::{DocumentInlayHints, DocumentInlayHintsId, Mode}, editor::Action, @@ -25,6 +29,7 @@ use helix_view::{ use crate::{ compositor::{self, Compositor}, + job::Callback, ui::{ self, lsp::SignatureHelp, overlay::overlaid, DynamicPicker, FileLocation, FilePicker, Popup, PromptEvent, @@ -35,24 +40,6 @@ use std::{ cmp::Ordering, collections::BTreeMap, fmt::Write, future::Future, path::PathBuf, sync::Arc, }; -/// Gets the language server that is attached to a document, and -/// if it's not active displays a status message. Using this macro -/// in a context where the editor automatically queries the LSP -/// (instead of when the user explicitly does so via a keybind like -/// `gd`) will spam the "LSP inactive" status message confusingly. -#[macro_export] -macro_rules! language_server { - ($editor:expr, $doc:expr) => { - match $doc.language_server() { - Some(language_server) => language_server, - None => { - $editor.set_status("Language server not active for current buffer"); - return; - } - } - }; -} - impl ui::menu::Item for lsp::Location { /// Current working directory. type Data = PathBuf; @@ -87,20 +74,30 @@ impl ui::menu::Item for lsp::Location { } } -impl ui::menu::Item for lsp::SymbolInformation { +struct SymbolInformationItem { + symbol: lsp::SymbolInformation, + offset_encoding: OffsetEncoding, +} + +impl ui::menu::Item for SymbolInformationItem { /// Path to currently focussed document type Data = Option; fn format(&self, current_doc_path: &Self::Data) -> Row { - if current_doc_path.as_ref() == Some(&self.location.uri) { - self.name.as_str().into() + if current_doc_path.as_ref() == Some(&self.symbol.location.uri) { + self.symbol.name.as_str().into() } else { - match self.location.uri.to_file_path() { + match self.symbol.location.uri.to_file_path() { Ok(path) => { let get_relative_path = path::get_relative_path(path.as_path()); - format!("{} ({})", &self.name, get_relative_path.to_string_lossy()).into() + format!( + "{} ({})", + &self.symbol.name, + get_relative_path.to_string_lossy() + ) + .into() } - Err(_) => format!("{} ({})", &self.name, &self.location.uri).into(), + Err(_) => format!("{} ({})", &self.symbol.name, &self.symbol.location.uri).into(), } } } @@ -116,6 +113,7 @@ struct DiagnosticStyles { struct PickerDiagnostic { url: lsp::Url, diag: lsp::Diagnostic, + offset_encoding: OffsetEncoding, } impl ui::menu::Item for PickerDiagnostic { @@ -211,21 +209,19 @@ fn jump_to_location( align_view(doc, view, Align::Center); } -fn sym_picker( - symbols: Vec, - current_path: Option, - offset_encoding: OffsetEncoding, -) -> FilePicker { +type SymbolPicker = FilePicker; + +fn sym_picker(symbols: Vec, current_path: Option) -> SymbolPicker { // TODO: drop current_path comparison and instead use workspace: bool flag? FilePicker::new( symbols, current_path.clone(), - move |cx, symbol, action| { + move |cx, item, action| { let (view, doc) = current!(cx.editor); push_jump(view, doc); - if current_path.as_ref() != Some(&symbol.location.uri) { - let uri = &symbol.location.uri; + if current_path.as_ref() != Some(&item.symbol.location.uri) { + let uri = &item.symbol.location.uri; let path = match uri.to_file_path() { Ok(path) => path, Err(_) => { @@ -245,7 +241,7 @@ fn sym_picker( let (view, doc) = current!(cx.editor); if let Some(range) = - lsp_range_to_range(doc.text(), symbol.location.range, offset_encoding) + lsp_range_to_range(doc.text(), item.symbol.location.range, item.offset_encoding) { // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). @@ -253,7 +249,7 @@ fn sym_picker( align_view(doc, view, Align::Center); } }, - move |_editor, symbol| Some(location_to_file_location(&symbol.location)), + move |_editor, item| Some(location_to_file_location(&item.symbol.location)), ) .truncate_start(false) } @@ -266,10 +262,9 @@ enum DiagnosticsFormat { fn diag_picker( cx: &Context, - diagnostics: BTreeMap>, + diagnostics: BTreeMap>, current_path: Option, format: DiagnosticsFormat, - offset_encoding: OffsetEncoding, ) -> FilePicker { // TODO: drop current_path comparison and instead use workspace: bool flag? @@ -277,10 +272,11 @@ fn diag_picker( let mut flat_diag = Vec::new(); for (url, diags) in diagnostics { flat_diag.reserve(diags.len()); - for diag in diags { + for (diag, _, offset_encoding) in diags { flat_diag.push(PickerDiagnostic { url: url.clone(), diag, + offset_encoding, }); } } @@ -295,7 +291,13 @@ fn diag_picker( FilePicker::new( flat_diag, (styles, format), - move |cx, PickerDiagnostic { url, diag }, action| { + move |cx, + PickerDiagnostic { + url, + diag, + offset_encoding, + }, + action| { if current_path.as_ref() == Some(url) { let (view, doc) = current!(cx.editor); push_jump(view, doc); @@ -306,14 +308,14 @@ fn diag_picker( let (view, doc) = current!(cx.editor); - if let Some(range) = lsp_range_to_range(doc.text(), diag.range, offset_encoding) { + if let Some(range) = lsp_range_to_range(doc.text(), diag.range, *offset_encoding) { // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). doc.set_selection(view.id, Selection::single(range.head, range.anchor)); align_view(doc, view, Align::Center); } }, - move |_editor, PickerDiagnostic { url, diag }| { + move |_editor, PickerDiagnostic { url, diag, .. }| { let location = lsp::Location::new(url.clone(), diag.range); Some(location_to_file_location(&location)) }, @@ -323,126 +325,149 @@ fn diag_picker( pub fn symbol_picker(cx: &mut Context) { fn nested_to_flat( - list: &mut Vec, + list: &mut Vec, file: &lsp::TextDocumentIdentifier, symbol: lsp::DocumentSymbol, + offset_encoding: OffsetEncoding, ) { #[allow(deprecated)] - list.push(lsp::SymbolInformation { - name: symbol.name, - kind: symbol.kind, - tags: symbol.tags, - deprecated: symbol.deprecated, - location: lsp::Location::new(file.uri.clone(), symbol.selection_range), - container_name: None, + list.push(SymbolInformationItem { + symbol: lsp::SymbolInformation { + name: symbol.name, + kind: symbol.kind, + tags: symbol.tags, + deprecated: symbol.deprecated, + location: lsp::Location::new(file.uri.clone(), symbol.selection_range), + container_name: None, + }, + offset_encoding, }); for child in symbol.children.into_iter().flatten() { - nested_to_flat(list, file, child); + nested_to_flat(list, file, child, offset_encoding); } } let doc = doc!(cx.editor); - let language_server = language_server!(cx.editor, doc); + let mut futures: FuturesUnordered<_> = doc + .language_servers_with_feature(LanguageServerFeature::DocumentSymbols) + .iter() + .filter_map(|ls| { + let request = ls.document_symbols(doc.identifier())?; + Some((request, ls.offset_encoding(), doc.identifier())) + }) + .map(|(request, offset_encoding, doc_id)| async move { + let json = request.await?; + let response: Option = serde_json::from_value(json)?; + let symbols = match response { + Some(symbols) => symbols, + None => return anyhow::Ok(vec![]), + }; + // lsp has two ways to represent symbols (flat/nested) + // convert the nested variant to flat, so that we have a homogeneous list + let symbols = match symbols { + lsp::DocumentSymbolResponse::Flat(symbols) => symbols + .into_iter() + .map(|symbol| SymbolInformationItem { + symbol, + offset_encoding, + }) + .collect(), + lsp::DocumentSymbolResponse::Nested(symbols) => { + let mut flat_symbols = Vec::new(); + for symbol in symbols { + nested_to_flat(&mut flat_symbols, &doc_id, symbol, offset_encoding) + } + flat_symbols + } + }; + Ok(symbols) + }) + .collect(); let current_url = doc.url(); - let offset_encoding = language_server.offset_encoding(); - let future = match language_server.document_symbols(doc.identifier()) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support document symbols"); - return; - } - }; - - cx.callback( - future, - move |editor, compositor, response: Option| { - if let Some(symbols) = response { - // lsp has two ways to represent symbols (flat/nested) - // convert the nested variant to flat, so that we have a homogeneous list - let symbols = match symbols { - lsp::DocumentSymbolResponse::Flat(symbols) => symbols, - lsp::DocumentSymbolResponse::Nested(symbols) => { - let doc = doc!(editor); - let mut flat_symbols = Vec::new(); - for symbol in symbols { - nested_to_flat(&mut flat_symbols, &doc.identifier(), symbol) - } - flat_symbols - } - }; + if futures.is_empty() { + cx.editor + .set_error("No Language server does support document symbols"); + return; + } - let picker = sym_picker(symbols, current_url, offset_encoding); - compositor.push(Box::new(overlaid(picker))) + cx.jobs.callback(async move { + let mut symbols = Vec::new(); + // TODO if one symbol request errors, all other requests are discarded (even if they're valid) + while let Some(mut lsp_items) = futures.try_next().await? { + symbols.append(&mut lsp_items); + } + let call = move |editor: &mut Editor, compositor: &mut Compositor| { + if symbols.is_empty() { + editor.set_error("No symbols available"); + return; } - }, - ) + let picker = sym_picker(symbols, current_url); + compositor.push(Box::new(overlaid(picker))) + }; + + Ok(Callback::EditorCompositor(Box::new(call))) + }); } pub fn workspace_symbol_picker(cx: &mut Context) { let doc = doc!(cx.editor); - let current_url = doc.url(); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - let future = match language_server.workspace_symbols("".to_string()) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support workspace symbols"); - return; + + let get_symbols = move |pattern: String, editor: &mut Editor| { + let doc = doc!(editor); + let mut futures: FuturesUnordered<_> = doc + .language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols) + .iter() + .filter_map(|ls| Some((ls.workspace_symbols(pattern.clone())?, ls.offset_encoding()))) + .map(|(request, offset_encoding)| async move { + let json = request.await?; + + let response = serde_json::from_value::>>(json)? + .unwrap_or_default() + .into_iter() + .map(|symbol| SymbolInformationItem { + symbol, + offset_encoding, + }) + .collect(); + + anyhow::Ok(response) + }) + .collect(); + + if futures.is_empty() { + editor.set_error("No Language server does support workspace symbols"); } - }; - cx.callback( - future, - move |_editor, compositor, response: Option>| { - let symbols = response.unwrap_or_default(); - let picker = sym_picker(symbols, current_url, offset_encoding); - let get_symbols = |query: String, editor: &mut Editor| { - let doc = doc!(editor); - let language_server = match doc.language_server() { - Some(s) => s, - None => { - // This should not generally happen since the picker will not - // even open in the first place if there is no server. - return async move { Err(anyhow::anyhow!("LSP not active")) }.boxed(); - } - }; - let symbol_request = match language_server.workspace_symbols(query) { - Some(future) => future, - None => { - // This should also not happen since the language server must have - // supported workspace symbols before to reach this block. - return async move { - Err(anyhow::anyhow!( - "Language server does not support workspace symbols" - )) - } - .boxed(); - } - }; + async move { + let mut symbols = Vec::new(); + // TODO if one symbol request errors, all other requests are discarded (even if they're valid) + while let Some(mut lsp_items) = futures.try_next().await? { + symbols.append(&mut lsp_items); + } + anyhow::Ok(symbols) + } + .boxed() + }; - let future = async move { - let json = symbol_request.await?; - let response: Option> = - serde_json::from_value(json)?; + let current_url = doc.url(); + let initial_symbols = get_symbols("".to_owned(), cx.editor); - Ok(response.unwrap_or_default()) - }; - future.boxed() - }; + cx.jobs.callback(async move { + let symbols = initial_symbols.await?; + let call = move |_editor: &mut Editor, compositor: &mut Compositor| { + let picker = sym_picker(symbols, current_url); let dyn_picker = DynamicPicker::new(picker, Box::new(get_symbols)); compositor.push(Box::new(overlaid(dyn_picker))) - }, - ) + }; + + Ok(Callback::EditorCompositor(Box::new(call))) + }); } pub fn diagnostics_picker(cx: &mut Context) { let doc = doc!(cx.editor); - let language_server = language_server!(cx.editor, doc); if let Some(current_url) = doc.url() { - let offset_encoding = language_server.offset_encoding(); let diagnostics = cx .editor .diagnostics @@ -454,7 +479,6 @@ pub fn diagnostics_picker(cx: &mut Context) { [(current_url.clone(), diagnostics)].into(), Some(current_url), DiagnosticsFormat::HideSourcePath, - offset_encoding, ); cx.push_layer(Box::new(overlaid(picker))); } @@ -462,24 +486,28 @@ pub fn diagnostics_picker(cx: &mut Context) { pub fn workspace_diagnostics_picker(cx: &mut Context) { let doc = doc!(cx.editor); - let language_server = language_server!(cx.editor, doc); let current_url = doc.url(); - let offset_encoding = language_server.offset_encoding(); + // TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents let diagnostics = cx.editor.diagnostics.clone(); let picker = diag_picker( cx, diagnostics, current_url, DiagnosticsFormat::ShowSourcePath, - offset_encoding, ); cx.push_layer(Box::new(overlaid(picker))); } -impl ui::menu::Item for lsp::CodeActionOrCommand { +struct CodeActionOrCommandItem { + lsp_item: lsp::CodeActionOrCommand, + offset_encoding: OffsetEncoding, + language_server_id: usize, +} + +impl ui::menu::Item for CodeActionOrCommandItem { type Data = (); fn format(&self, _data: &Self::Data) -> Row { - match self { + match &self.lsp_item { lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(), lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(), } @@ -546,45 +574,40 @@ fn action_fixes_diagnostics(action: &CodeActionOrCommand) -> bool { pub fn code_action(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let selection_range = doc.selection(view.id).primary(); - let offset_encoding = language_server.offset_encoding(); - - let range = range_to_lsp_range(doc.text(), selection_range, offset_encoding); - let future = match language_server.code_actions( - doc.identifier(), - range, - // Filter and convert overlapping diagnostics - lsp::CodeActionContext { - diagnostics: doc - .diagnostics() - .iter() - .filter(|&diag| { - selection_range - .overlaps(&helix_core::Range::new(diag.range.start, diag.range.end)) - }) - .map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding)) - .collect(), - only: None, - trigger_kind: Some(CodeActionTriggerKind::INVOKED), - }, - ) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support code actions"); - return; - } - }; - - cx.callback( - future, - move |editor, compositor, response: Option| { + let mut futures: FuturesUnordered<_> = doc + .language_servers_with_feature(LanguageServerFeature::CodeAction) + .iter() + // TODO this should probably already been filtered in something like "language_servers_with_feature" + .filter_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let language_server_id = language_server.id(); + let range = range_to_lsp_range(doc.text(), selection_range, offset_encoding); + // Filter and convert overlapping diagnostics + let code_action_context = lsp::CodeActionContext { + diagnostics: doc + .diagnostics() + .iter() + .filter(|&diag| { + selection_range + .overlaps(&helix_core::Range::new(diag.range.start, diag.range.end)) + }) + .map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding)) + .collect(), + only: None, + trigger_kind: Some(CodeActionTriggerKind::INVOKED), + }; + let code_action_request = + language_server.code_actions(doc.identifier(), range, code_action_context)?; + Some((code_action_request, offset_encoding, language_server_id)) + }) + .map(|(request, offset_encoding, ls_id)| async move { + let json = request.await?; + let response: Option = serde_json::from_value(json)?; let mut actions = match response { Some(a) => a, - None => return, + None => return anyhow::Ok(Vec::new()), }; // remove disabled code actions @@ -596,11 +619,6 @@ pub fn code_action(cx: &mut Context) { ) }); - if actions.is_empty() { - editor.set_status("No code actions available"); - return; - } - // Sort codeactions into a useful order. This behaviour is only partially described in the LSP spec. // Many details are modeled after vscode because language servers are usually tested against it. // VScode sorts the codeaction two times: @@ -636,18 +654,48 @@ pub fn code_action(cx: &mut Context) { .reverse() }); - let mut picker = ui::Menu::new(actions, (), move |editor, code_action, event| { + Ok(actions + .into_iter() + .map(|lsp_item| CodeActionOrCommandItem { + lsp_item, + offset_encoding, + language_server_id: ls_id, + }) + .collect()) + }) + .collect(); + + if futures.is_empty() { + cx.editor + .set_error("No Language server does support code actions"); + return; + } + + cx.jobs.callback(async move { + let mut actions = Vec::new(); + // TODO if one code action request errors, all other requests are ignored (even if they're valid) + while let Some(mut lsp_items) = futures.try_next().await? { + actions.append(&mut lsp_items); + } + + let call = move |editor: &mut Editor, compositor: &mut Compositor| { + if actions.is_empty() { + editor.set_error("No code actions available"); + return; + } + let mut picker = ui::Menu::new(actions, (), move |editor, action, event| { if event != PromptEvent::Validate { return; } // always present here - let code_action = code_action.unwrap(); + let action = action.unwrap(); + let offset_encoding = action.offset_encoding; - match code_action { + match &action.lsp_item { lsp::CodeActionOrCommand::Command(command) => { log::debug!("code action command: {:?}", command); - execute_lsp_command(editor, command.clone()); + execute_lsp_command(editor, action.language_server_id, command.clone()); } lsp::CodeActionOrCommand::CodeAction(code_action) => { log::debug!("code action: {:?}", code_action); @@ -659,7 +707,7 @@ pub fn code_action(cx: &mut Context) { // if code action provides both edit and command first the edit // should be applied and then the command if let Some(command) = &code_action.command { - execute_lsp_command(editor, command.clone()); + execute_lsp_command(editor, action.language_server_id, command.clone()); } } } @@ -668,8 +716,10 @@ pub fn code_action(cx: &mut Context) { let popup = Popup::new("code-action", picker).with_scrollbar(false); compositor.replace_or_push("code-action", popup); - }, - ) + }; + + Ok(Callback::EditorCompositor(Box::new(call))) + }); } impl ui::menu::Item for lsp::Command { @@ -679,13 +729,14 @@ impl ui::menu::Item for lsp::Command { } } -pub fn execute_lsp_command(editor: &mut Editor, cmd: lsp::Command) { - let doc = doc!(editor); - let language_server = language_server!(editor, doc); - +pub fn execute_lsp_command(editor: &mut Editor, language_server_id: usize, cmd: lsp::Command) { // the command is executed on the server and communicated back // to the client asynchronously using workspace edits - let future = match language_server.command(cmd) { + let future = match editor + .language_servers + .get_by_id(language_server_id) + .and_then(|language_server| language_server.command(cmd)) + { Some(future) => future, None => { editor.set_error("Language server does not support executing commands"); @@ -977,18 +1028,22 @@ fn to_locations(definitions: Option) -> Vec future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::GotoDeclaration) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.goto_declaration(doc.identifier(), pos, None)?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support goto-declaration"); + .set_error("No language server supports goto-declaration"); return; } }; @@ -1004,16 +1059,19 @@ pub fn goto_declaration(cx: &mut Context) { pub fn goto_definition(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.goto_definition(doc.identifier(), pos, None) { - Some(future) => future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::GotoDefinition) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.goto_definition(doc.identifier(), pos, None)?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support goto-definition"); + .set_error("No language server supports goto-definition"); return; } }; @@ -1029,16 +1087,19 @@ pub fn goto_definition(cx: &mut Context) { pub fn goto_type_definition(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.goto_type_definition(doc.identifier(), pos, None) { - Some(future) => future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::GotoTypeDefinition) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.goto_type_definition(doc.identifier(), pos, None)?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support goto-type-definition"); + .set_error("No language server supports goto-type-definition"); return; } }; @@ -1054,16 +1115,19 @@ pub fn goto_type_definition(cx: &mut Context) { pub fn goto_implementation(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.goto_implementation(doc.identifier(), pos, None) { - Some(future) => future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::GotoImplementation) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.goto_implementation(doc.identifier(), pos, None)?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support goto-implementation"); + .set_error("no language server supports goto-implementation"); return; } }; @@ -1080,21 +1144,24 @@ pub fn goto_implementation(cx: &mut Context) { pub fn goto_reference(cx: &mut Context) { let config = cx.editor.config(); let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.goto_reference( - doc.identifier(), - pos, - config.lsp.goto_reference_include_declaration, - None, - ) { - Some(future) => future, + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::GotoReference) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.goto_reference( + doc.identifier(), + pos, + config.lsp.goto_reference_include_declaration, + None, + )?; + Some((future, offset_encoding)) + }) { + Some(future_offset_encoding) => future_offset_encoding, None => { cx.editor - .set_error("Language server does not support goto-reference"); + .set_error("language server supports goto-reference"); return; } }; @@ -1108,7 +1175,7 @@ pub fn goto_reference(cx: &mut Context) { ); } -#[derive(PartialEq, Eq)] +#[derive(PartialEq, Eq, Clone, Copy)] pub enum SignatureHelpInvoked { Manual, Automatic, @@ -1120,35 +1187,34 @@ pub fn signature_help(cx: &mut Context) { pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) { let (view, doc) = current!(cx.editor); - let was_manually_invoked = invoked == SignatureHelpInvoked::Manual; - let language_server = match doc.language_server() { - Some(language_server) => language_server, + // TODO merge multiple language server signature help into one instead of just taking the first language server that supports it + let future = match doc + .language_servers_with_feature(LanguageServerFeature::SignatureHelp) + .iter() + .find_map(|language_server| { + let pos = doc.position(view.id, language_server.offset_encoding()); + language_server.text_document_signature_help(doc.identifier(), pos, None) + }) { + Some(future) => future.boxed(), None => { // Do not show the message if signature help was invoked // automatically on backspace, trigger characters, etc. - if was_manually_invoked { + if invoked == SignatureHelpInvoked::Manual { cx.editor - .set_status("Language server not active for current buffer"); - } - return; - } - }; - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.text_document_signature_help(doc.identifier(), pos, None) { - Some(f) => f, - None => { - if was_manually_invoked { - cx.editor - .set_error("Language server does not support signature-help"); + .set_error("No language server supports signature-help"); } return; } }; + signature_help_impl_with_future(cx, future, invoked); +} +pub fn signature_help_impl_with_future( + cx: &mut Context, + future: BoxFuture<'static, helix_lsp::Result>, + invoked: SignatureHelpInvoked, +) { cx.callback( future, move |editor, compositor, response: Option| { @@ -1156,7 +1222,7 @@ pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) { if !(config.lsp.auto_signature_help || SignatureHelp::visible_popup(compositor).is_some() - || was_manually_invoked) + || invoked == SignatureHelpInvoked::Manual) { return; } @@ -1165,7 +1231,7 @@ pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) { // it very probably means the server was a little slow to respond and the user has // already moved on to something else, making a signature help popup will just be an // annoyance, see https://github.com/helix-editor/helix/issues/3112 - if !was_manually_invoked && editor.mode != Mode::Insert { + if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert { return; } @@ -1255,18 +1321,20 @@ pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) { pub fn hover(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); // TODO: factor out a doc.position_identifier() that returns lsp::TextDocumentPositionIdentifier + let request = doc + .language_servers_with_feature(LanguageServerFeature::Hover) + .iter() + .find_map(|language_server| { + let pos = doc.position(view.id, language_server.offset_encoding()); + language_server.text_document_hover(doc.identifier(), pos, None) + }); - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.text_document_hover(doc.identifier(), pos, None) { + let future = match request { Some(future) => future, None => { - cx.editor - .set_error("Language server does not support hover"); + cx.editor.set_error("No language server supports hover"); return; } }; @@ -1349,7 +1417,11 @@ pub fn rename_symbol(cx: &mut Context) { } } - fn create_rename_prompt(editor: &Editor, prefill: String) -> Box { + fn create_rename_prompt( + editor: &Editor, + prefill: String, + language_server_id: Option, + ) -> Box { let prompt = ui::Prompt::new( "rename-to:".into(), None, @@ -1358,27 +1430,36 @@ pub fn rename_symbol(cx: &mut Context) { if event != PromptEvent::Validate { return; } - let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = - match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support symbol renaming"); - return; + let request = doc + .language_servers_with_feature(LanguageServerFeature::RenameSymbol) + .iter() + .find_map(|language_server| { + if let Some(language_server_id) = language_server_id { + if language_server.id() != language_server_id { + return None; + } } - }; - match block_on(future) { - Ok(edits) => { - let _ = apply_workspace_edit(cx.editor, offset_encoding, &edits); + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.rename_symbol( + doc.identifier(), + pos, + input.to_string(), + )?; + Some((future, offset_encoding)) + }); + + if let Some((future, offset_encoding)) = request { + match block_on(future) { + Ok(edits) => { + let _ = apply_workspace_edit(cx.editor, offset_encoding, &edits); + } + Err(err) => cx.editor.set_error(err.to_string()), } - Err(err) => cx.editor.set_error(err.to_string()), + } else { + cx.editor + .set_error("No language server supports symbol renaming"); } }, ) @@ -1388,20 +1469,20 @@ pub fn rename_symbol(cx: &mut Context) { } let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - if !language_server.supports_rename() { - cx.editor - .set_error("Language server does not support symbol renaming"); - return; - } - let pos = doc.position(view.id, offset_encoding); + let prepare_rename_request = doc + .language_servers_with_feature(LanguageServerFeature::RenameSymbol) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = language_server.prepare_rename(doc.identifier(), pos)?; + Some((future, offset_encoding, language_server.id())) + }); - match language_server.prepare_rename(doc.identifier(), pos) { + match prepare_rename_request { // Language server supports textDocument/prepareRename, use it. - Some(future) => cx.callback( + Some((future, offset_encoding, ls_id)) => cx.callback( future, move |editor, compositor, response: Option| { let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response) @@ -1413,7 +1494,7 @@ pub fn rename_symbol(cx: &mut Context) { } }; - let prompt = create_rename_prompt(editor, prefill); + let prompt = create_rename_prompt(editor, prefill, Some(ls_id)); compositor.push(prompt); }, @@ -1423,7 +1504,7 @@ pub fn rename_symbol(cx: &mut Context) { None => { let prefill = get_prefill_from_word_boundary(cx.editor); - let prompt = create_rename_prompt(cx.editor, prefill); + let prompt = create_rename_prompt(cx.editor, prefill, None); cx.push_layer(prompt); } @@ -1432,17 +1513,20 @@ pub fn rename_symbol(cx: &mut Context) { pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); - - let pos = doc.position(view.id, offset_encoding); - - let future = match language_server.text_document_document_highlight(doc.identifier(), pos, None) - { + let (future, offset_encoding) = match doc + .language_servers_with_feature(LanguageServerFeature::DocumentHighlight) + .iter() + .find_map(|language_server| { + let offset_encoding = language_server.offset_encoding(); + let pos = doc.position(view.id, offset_encoding); + let future = + language_server.text_document_document_highlight(doc.identifier(), pos, None)?; + Some((future, offset_encoding)) + }) { Some(future) => future, None => { cx.editor - .set_error("Language server does not support document highlight"); + .set_error("No language server supports document-highlight"); return; } }; @@ -1455,8 +1539,6 @@ pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { _ => return, }; let (view, doc) = current!(editor); - let language_server = language_server!(editor, doc); - let offset_encoding = language_server.offset_encoding(); let text = doc.text(); let pos = doc.selection(view.id).primary().head; @@ -1502,63 +1584,58 @@ fn compute_inlay_hints_for_view( let view_id = view.id; let doc_id = view.doc; - let language_server = doc.language_server()?; - - let capabilities = language_server.capabilities(); - - let (future, new_doc_inlay_hints_id) = match capabilities.inlay_hint_provider { - Some( - lsp::OneOf::Left(true) - | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), - ) => { - let doc_text = doc.text(); - let len_lines = doc_text.len_lines(); - - // Compute ~3 times the current view height of inlay hints, that way some scrolling - // will not show half the view with hints and half without while still being faster - // than computing all the hints for the full file (which could be dozens of time - // longer than the view is). - let view_height = view.inner_height(); - let first_visible_line = - doc_text.char_to_line(view.offset.anchor.min(doc_text.len_chars())); - let first_line = first_visible_line.saturating_sub(view_height); - let last_line = first_visible_line - .saturating_add(view_height.saturating_mul(2)) - .min(len_lines); - - let new_doc_inlay_hint_id = DocumentInlayHintsId { - first_line, - last_line, - }; - // Don't recompute the annotations in case nothing has changed about the view - if !doc.inlay_hints_oudated - && doc - .inlay_hints(view_id) - .map_or(false, |dih| dih.id == new_doc_inlay_hint_id) - { - return None; - } + let language_servers = doc.language_servers_with_feature(LanguageServerFeature::InlayHints); + let language_server = language_servers.iter().find(|language_server| { + matches!( + language_server.capabilities().inlay_hint_provider, + Some( + lsp::OneOf::Left(true) + | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)) + ) + ) + })?; + + let doc_text = doc.text(); + let len_lines = doc_text.len_lines(); + + // Compute ~3 times the current view height of inlay hints, that way some scrolling + // will not show half the view with hints and half without while still being faster + // than computing all the hints for the full file (which could be dozens of time + // longer than the view is). + let view_height = view.inner_height(); + let first_visible_line = doc_text.char_to_line(view.offset.anchor.min(doc_text.len_chars())); + let first_line = first_visible_line.saturating_sub(view_height); + let last_line = first_visible_line + .saturating_add(view_height.saturating_mul(2)) + .min(len_lines); + + let new_doc_inlay_hints_id = DocumentInlayHintsId { + first_line, + last_line, + }; + // Don't recompute the annotations in case nothing has changed about the view + if !doc.inlay_hints_oudated + && doc + .inlay_hints(view_id) + .map_or(false, |dih| dih.id == new_doc_inlay_hints_id) + { + return None; + } - let doc_slice = doc_text.slice(..); - let first_char_in_range = doc_slice.line_to_char(first_line); - let last_char_in_range = doc_slice.line_to_char(last_line); + let doc_slice = doc_text.slice(..); + let first_char_in_range = doc_slice.line_to_char(first_line); + let last_char_in_range = doc_slice.line_to_char(last_line); - let range = helix_lsp::util::range_to_lsp_range( - doc_text, - helix_core::Range::new(first_char_in_range, last_char_in_range), - language_server.offset_encoding(), - ); + let range = helix_lsp::util::range_to_lsp_range( + doc_text, + helix_core::Range::new(first_char_in_range, last_char_in_range), + language_server.offset_encoding(), + ); - ( - language_server.text_document_range_inlay_hints(doc.identifier(), range, None), - new_doc_inlay_hint_id, - ) - } - _ => return None, - }; + let offset_encoding = language_server.offset_encoding(); let callback = super::make_job_callback( - future?, + language_server.text_document_range_inlay_hints(doc.identifier(), range, None)?, move |editor, _compositor, response: Option>| { // The config was modified or the window was closed while the request was in flight if !editor.config().lsp.display_inlay_hints || editor.tree.try_get(view_id).is_none() { @@ -1572,8 +1649,8 @@ fn compute_inlay_hints_for_view( }; // If we have neither hints nor an LSP, empty the inlay hints since they're now oudated - let (mut hints, offset_encoding) = match (response, doc.language_server()) { - (Some(h), Some(ls)) if !h.is_empty() => (h, ls.offset_encoding()), + let mut hints = match response { + Some(hints) if !hints.is_empty() => hints, _ => { doc.set_inlay_hints( view_id, diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 81a24059a900..b78de7729a22 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1329,23 +1329,20 @@ fn lsp_workspace_command( if event != PromptEvent::Validate { return Ok(()); } - - let (_, doc) = current!(cx.editor); - - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => { - cx.editor - .set_status("Language server not active for current buffer"); - return Ok(()); - } - }; - - let options = match &language_server.capabilities().execute_command_provider { - Some(options) => options, + let doc = doc!(cx.editor); + let language_servers = + doc.language_servers_with_feature(LanguageServerFeature::WorkspaceCommand); + let (language_server_id, options) = match language_servers.iter().find_map(|ls| { + ls.capabilities() + .execute_command_provider + .as_ref() + .map(|options| (ls.id(), options)) + }) { + Some(id_options) => id_options, None => { - cx.editor - .set_status("Workspace commands are not supported for this language server"); + cx.editor.set_status( + "No active language servers for this document support workspace commands", + ); return Ok(()); } }; @@ -1362,8 +1359,8 @@ fn lsp_workspace_command( let callback = async move { let call: job::Callback = Callback::EditorCompositor(Box::new( move |_editor: &mut Editor, compositor: &mut Compositor| { - let picker = ui::Picker::new(commands, (), |cx, command, _action| { - execute_lsp_command(cx.editor, command.clone()); + let picker = ui::Picker::new(commands, (), move |cx, command, _action| { + execute_lsp_command(cx.editor, language_server_id, command.clone()); }); compositor.push(Box::new(overlaid(picker))) }, @@ -1376,6 +1373,7 @@ fn lsp_workspace_command( if options.commands.iter().any(|c| c == &command) { execute_lsp_command( cx.editor, + language_server_id, helix_lsp::lsp::Command { title: command.clone(), arguments: None, @@ -1426,7 +1424,7 @@ fn lsp_restart( .collect(); for document_id in document_ids_to_refresh { - cx.editor.refresh_language_server(document_id); + cx.editor.refresh_language_servers(document_id); } Ok(()) @@ -1443,21 +1441,63 @@ fn lsp_stop( let doc = doc!(cx.editor); - let ls_id = doc - .language_server() - .map(|ls| ls.id()) - .context("LSP not running for the current document")?; + // TODO this stops language servers which may be used in another doc/language type that uses the same language servers + // I'm not sure if this is really what we want + let ls_shutdown_names = doc + .language_servers() + .iter() + .map(|ls| ls.name()) + .collect::>(); - let config = doc - .language_config() - .context("LSP not defined for the current document")?; - cx.editor.language_servers.stop(config); + for ls_name in &ls_shutdown_names { + cx.editor.language_servers.stop(ls_name); + } + + let doc_ids_active_clients: Vec<_> = cx + .editor + .documents() + .filter_map(|doc| { + let doc_active_ls_ids: Vec<_> = doc + .language_servers() + .iter() + .filter(|ls| !ls_shutdown_names.contains(&ls.name())) + .map(|ls| ls.id()) + .collect(); + + let active_clients: Vec<_> = cx + .editor + .language_servers + .iter_clients() + .filter(|client| doc_active_ls_ids.contains(&client.id())) + .map(Clone::clone) + .collect(); + + if active_clients.len() != doc.language_servers().len() { + Some((doc.id(), active_clients)) + } else { + None + } + }) + .collect(); + + for (doc_id, active_clients) in doc_ids_active_clients { + let doc = cx.editor.documents.get_mut(&doc_id).unwrap(); + + let stopped_clients: Vec<_> = doc + .language_servers() + .iter() + .filter(|ls| { + !active_clients + .iter() + .any(|active_ls| active_ls.id() == ls.id()) + }) + .map(|ls| ls.id()) + .collect(); // is necessary because of borrow-checking - for doc in cx.editor.documents_mut() { - if doc.language_server().map_or(false, |ls| ls.id() == ls_id) { - doc.set_language_server(None); - doc.set_diagnostics(Default::default()); + for client_id in stopped_clients { + doc.clear_diagnostics(client_id) } + doc.set_language_servers(active_clients); } Ok(()) @@ -1850,7 +1890,7 @@ fn language( doc.detect_indent_and_line_ending(); let id = doc.id(); - cx.editor.refresh_language_server(id); + cx.editor.refresh_language_servers(id); Ok(()) } @@ -2588,7 +2628,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "lsp-restart", aliases: &[], - doc: "Restarts the Language Server that is in use by the current doc", + doc: "Restarts the language servers used by the current doc", fun: lsp_restart, signature: CommandSignature::none(), }, diff --git a/helix-term/src/health.rs b/helix-term/src/health.rs index 480c2c67579b..031f982ccea2 100644 --- a/helix-term/src/health.rs +++ b/helix-term/src/health.rs @@ -2,7 +2,10 @@ use crossterm::{ style::{Color, Print, Stylize}, tty::IsTty, }; -use helix_core::config::{default_syntax_loader, user_syntax_loader}; +use helix_core::{ + config::{default_syntax_loader, user_syntax_loader}, + syntax::LanguageServerFeatureConfiguration, +}; use helix_loader::grammar::load_runtime_file; use helix_view::clipboard::get_clipboard_provider; use std::io::Write; @@ -192,10 +195,14 @@ pub fn languages_all() -> std::io::Result<()> { for lang in &syn_loader_conf.language { column(&lang.language_id, Color::Reset); - let lsp = lang - .language_server - .as_ref() - .map(|lsp| lsp.command.to_string()); + // TODO multiple language servers (check binary for each supported language server, not just the first) + + let lsp = lang.language_servers.first().and_then(|lsp| { + syn_loader_conf + .language_server + .get(lsp.name()) + .map(|config| config.command.clone()) + }); check_binary(lsp); let dap = lang.debugger.as_ref().map(|dap| dap.command.to_string()); @@ -264,11 +271,15 @@ pub fn language(lang_str: String) -> std::io::Result<()> { } }; + // TODO multiple language servers probe_protocol( "language server", - lang.language_server - .as_ref() - .map(|lsp| lsp.command.to_string()), + lang.language_servers.first().and_then(|lsp| { + syn_loader_conf + .language_server + .get(lsp.name()) + .map(|config| config.command.clone()) + }), )?; probe_protocol( diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index c5c405801f28..859403a7f64e 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -15,7 +15,7 @@ use helix_view::{graphics::Rect, Document, Editor}; use crate::commands; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; -use helix_lsp::{lsp, util}; +use helix_lsp::{lsp, util, OffsetEncoding}; impl menu::Item for CompletionItem { type Data = (); @@ -38,6 +38,7 @@ impl menu::Item for CompletionItem { || self.item.tags.as_ref().map_or(false, |tags| { tags.contains(&lsp::CompletionItemTag::DEPRECATED) }); + menu::Row::new(vec![ menu::Cell::from(Span::styled( self.item.label.as_str(), @@ -79,19 +80,16 @@ impl menu::Item for CompletionItem { } None => "", }), - // self.detail.as_deref().unwrap_or("") - // self.label_details - // .as_ref() - // .or(self.detail()) - // .as_str(), ]) } } #[derive(Debug, PartialEq, Default, Clone)] -struct CompletionItem { - item: lsp::CompletionItem, - resolved: bool, +pub struct CompletionItem { + pub item: lsp::CompletionItem, + pub language_server_id: usize, + pub offset_encoding: OffsetEncoding, + pub resolved: bool, } /// Wraps a Menu. @@ -109,21 +107,13 @@ impl Completion { pub fn new( editor: &Editor, savepoint: Arc, - mut items: Vec, - offset_encoding: helix_lsp::OffsetEncoding, + mut items: Vec, start_offset: usize, trigger_offset: usize, ) -> Self { let replace_mode = editor.config().completion_replace; // Sort completion items according to their preselect status (given by the LSP server) - items.sort_by_key(|item| !item.preselect.unwrap_or(false)); - let items = items - .into_iter() - .map(|item| CompletionItem { - item, - resolved: false, - }) - .collect(); + items.sort_by_key(|item| !item.item.preselect.unwrap_or(false)); // Then create the menu let menu = Menu::new(items, (), move |editor: &mut Editor, item, event| { @@ -131,7 +121,6 @@ impl Completion { doc: &Document, view_id: ViewId, item: &CompletionItem, - offset_encoding: helix_lsp::OffsetEncoding, trigger_offset: usize, include_placeholder: bool, replace_mode: bool, @@ -154,6 +143,8 @@ impl Completion { } }; + let offset_encoding = item.offset_encoding; + let Some(range) = util::lsp_range_to_range(doc.text(), edit.range, offset_encoding) else{ return Transaction::new(doc.text()); }; @@ -247,15 +238,8 @@ impl Completion { // always present here let item = item.unwrap(); - let transaction = item_to_transaction( - doc, - view.id, - item, - offset_encoding, - trigger_offset, - true, - replace_mode, - ); + let transaction = + item_to_transaction(doc, view.id, item, trigger_offset, true, replace_mode); doc.apply_temporary(&transaction, view.id); } PromptEvent::Validate => { @@ -267,10 +251,15 @@ impl Completion { // always present here let mut item = item.unwrap().clone(); + let language_server = editor + .language_servers + .get_by_id(item.language_server_id) + .unwrap(); + // resolve item if not yet resolved if !item.resolved { if let Some(resolved) = - Self::resolve_completion_item(doc, item.item.clone()) + Self::resolve_completion_item(language_server, item.item.clone()) { item.item = resolved; } @@ -281,7 +270,6 @@ impl Completion { doc, view.id, &item, - offset_encoding, trigger_offset, false, replace_mode, @@ -299,7 +287,7 @@ impl Completion { let transaction = util::generate_transaction_from_edits( doc.text(), additional_edits, - offset_encoding, // TODO: should probably transcode in Client + item.offset_encoding, // TODO: should probably transcode in Client ); doc.apply(&transaction, view.id); } @@ -323,10 +311,17 @@ impl Completion { } fn resolve_completion_item( - doc: &Document, + language_server: &helix_lsp::Client, completion_item: lsp::CompletionItem, ) -> Option { - let language_server = doc.language_server()?; + let completion_resolve_provider = language_server + .capabilities() + .completion_provider + .as_ref()? + .resolve_provider; + if completion_resolve_provider != Some(true) { + return None; + } let future = language_server.resolve_completion_item(completion_item)?; let response = helix_lsp::block_on(future); @@ -397,8 +392,11 @@ impl Completion { Some(item) if !item.resolved => item.clone(), _ => return false, }; - - let language_server = match doc!(cx.editor).language_server() { + let language_server = match cx + .editor + .language_servers + .get_by_id(current_item.language_server_id) + { Some(language_server) => language_server, None => return false, }; @@ -422,13 +420,14 @@ impl Completion { .unwrap() .completion { - completion.replace_item( - current_item, - CompletionItem { - item: resolved_item, - resolved: true, - }, - ); + let resolved_item = CompletionItem { + item: resolved_item, + language_server_id: current_item.language_server_id, + offset_encoding: current_item.offset_encoding, + resolved: true, + }; + + completion.replace_item(current_item, resolved_item); } }, ); diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index f0989fa887ab..43b5d1af6ec3 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -33,7 +33,7 @@ use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc}; use tui::{buffer::Buffer as Surface, text::Span}; -use super::statusline; +use super::{completion::CompletionItem, statusline}; use super::{document::LineDecoration, lsp::SignatureHelp}; pub struct EditorView { @@ -650,7 +650,7 @@ impl EditorView { .primary() .cursor(doc.text().slice(..)); - let diagnostics = doc.diagnostics().iter().filter(|diagnostic| { + let diagnostics = doc.shown_diagnostics().filter(|diagnostic| { diagnostic.range.start <= cursor && diagnostic.range.end >= cursor }); @@ -953,20 +953,13 @@ impl EditorView { &mut self, editor: &mut Editor, savepoint: Arc, - items: Vec, - offset_encoding: helix_lsp::OffsetEncoding, + items: Vec, start_offset: usize, trigger_offset: usize, size: Rect, ) -> Option { - let mut completion = Completion::new( - editor, - savepoint, - items, - offset_encoding, - start_offset, - trigger_offset, - ); + let mut completion = + Completion::new(editor, savepoint, items, start_offset, trigger_offset); if completion.is_empty() { // skip if we got no completion results diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 3e9a14b06307..118836c0f4d7 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -17,7 +17,7 @@ mod text; use crate::compositor::{Component, Compositor}; use crate::filter_picker_entry; use crate::job::{self, Callback}; -pub use completion::Completion; +pub use completion::{Completion, CompletionItem}; pub use editor::EditorView; pub use markdown::Markdown; pub use menu::Menu; @@ -238,6 +238,7 @@ pub mod completers { use crate::ui::prompt::Completion; use fuzzy_matcher::skim::SkimMatcherV2 as Matcher; use fuzzy_matcher::FuzzyMatcher; + use helix_core::syntax::LanguageServerFeature; use helix_view::document::SCRATCH_BUFFER_NAME; use helix_view::theme; use helix_view::{editor::Config, Editor}; @@ -393,17 +394,13 @@ pub mod completers { pub fn lsp_workspace_command(editor: &Editor, input: &str) -> Vec { let matcher = Matcher::default(); - let (_, doc) = current_ref!(editor); - - let language_server = match doc.language_server() { - Some(language_server) => language_server, - None => { - return vec![]; - } - }; - - let options = match &language_server.capabilities().execute_command_provider { - Some(options) => options, + let language_servers = + doc!(editor).language_servers_with_feature(LanguageServerFeature::WorkspaceCommand); + let options = match language_servers + .into_iter() + .find_map(|ls| ls.capabilities().execute_command_provider.as_ref()) + { + Some(id_options) => id_options, None => { return vec![]; } diff --git a/helix-term/src/ui/statusline.rs b/helix-term/src/ui/statusline.rs index 887863519319..b10e8076d404 100644 --- a/helix-term/src/ui/statusline.rs +++ b/helix-term/src/ui/statusline.rs @@ -197,15 +197,16 @@ where ); } +// TODO think about handling multiple language servers fn render_lsp_spinner(context: &mut RenderContext, write: F) where F: Fn(&mut RenderContext, String, Option",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[c],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},c]}]}}}());hljs.registerLanguage("bash",function(){"use strict";return function(e){const s={};Object.assign(s,{className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{/,end:/\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b-?[a-z\._]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},s]}}}());hljs.registerLanguage("c-like",function(){"use strict";return function(e){function t(e){return"(?:"+e+")?"}var n="(decltype\\(auto\\)|"+t("[a-zA-Z_]\\w*::")+"[a-zA-Z_]\\w*"+t("<.*?>")+")",r={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},a={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},i={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(a,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:t("[a-zA-Z_]\\w*::")+e.IDENT_RE,relevance:0},c=t("[a-zA-Z_]\\w*::")+e.IDENT_RE+"\\s*\\(",l={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:l,contains:d.concat([{begin:/\(/,end:/\)/,keywords:l,contains:d.concat(["self"]),relevance:0}]),relevance:0},u={className:"function",begin:"("+n+"[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\w\s\*&:<>]/,contains:[{begin:"decltype\\(auto\\)",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:l,disableAutodetect:!0,illegal:"",keywords:l,contains:["self",r]},{begin:e.IDENT_RE+"::",keywords:l},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}());hljs.registerLanguage("coffeescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((e=>n=>!e.includes(n))(["var","const","let","function","static"])).join(" "),literal:n.concat(["yes","no","on","off"]).join(" "),built_in:a.concat(["npm","print"]).join(" ")},i="[A-Za-z$_][0-9A-Za-z$_]*",s={className:"subst",begin:/#\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?",relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/"/,end:/"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:"regexp",variants:[{begin:"///",end:"///",contains:[s,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)",relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+i},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{begin:"```",end:"```"},{begin:"`",end:"`"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/,end:/\)/,keywords:t,contains:["self"].concat(o)}]};return{name:"CoffeeScript",aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/,contains:o.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{className:"function",begin:"^\\s*"+i+"\\s*=\\s*(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[c,l]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function",begin:"(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[l]}]},{className:"class",beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[c]},c]},{begin:i+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}}());hljs.registerLanguage("ruby",function(){"use strict";return function(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},s={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},r=[e.COMMENT("#","$",{contains:[s]}),e.COMMENT("^\\=begin","^\\=end",{contains:[s],relevance:10}),e.COMMENT("^__END__","\\n$")],c={className:"subst",begin:"#\\{",end:"}",keywords:a},t={className:"string",contains:[e.BACKSLASH_ESCAPE,c],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/,contains:[e.BACKSLASH_ESCAPE,c]})]}]},b={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},d=[t,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(r)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),b].concat(r)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[t,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(r),relevance:0}].concat(r);c.contains=d,b.contains=d;var g=[{begin:/^\s*=>/,starts:{end:"$",contains:d}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:d}}];return{name:"Ruby",aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:r.concat(g).concat(d)}}}());hljs.registerLanguage("yaml",function(){"use strict";return function(e){var n="true false yes no null",a="[\\w#;/?:@&=+$,.~*\\'()[\\]]+",s={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]},i=e.inherit(s,{variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),l={end:",",endsWithParent:!0,excludeEnd:!0,contains:[],keywords:n,relevance:0},t={begin:"{",end:"}",contains:[l],illegal:"\\n",relevance:0},g={begin:"\\[",end:"\\]",contains:[l],illegal:"\\n",relevance:0},b=[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!\\w+!"+a},{className:"type",begin:"!<"+a+">"},{className:"type",begin:"!"+a},{className:"type",begin:"!!"+a},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b"},{className:"number",begin:e.C_NUMBER_RE+"\\b"},t,g,s],c=[...b];return c.pop(),c.push(i),l.contains=c,{name:"YAML",case_insensitive:!0,aliases:["yml","YAML"],contains:b}}}());hljs.registerLanguage("d",function(){"use strict";return function(e){var a={$pattern:e.UNDERSCORE_IDENT_RE,keyword:"abstract alias align asm assert auto body break byte case cast catch class const continue debug default delete deprecated do else enum export extern final finally for foreach foreach_reverse|10 goto if immutable import in inout int interface invariant is lazy macro mixin module new nothrow out override package pragma private protected public pure ref return scope shared static struct super switch synchronized template this throw try typedef typeid typeof union unittest version void volatile while with __FILE__ __LINE__ __gshared|10 __thread __traits __DATE__ __EOF__ __TIME__ __TIMESTAMP__ __VENDOR__ __VERSION__",built_in:"bool cdouble cent cfloat char creal dchar delegate double dstring float function idouble ifloat ireal long real short string ubyte ucent uint ulong ushort wchar wstring",literal:"false null true"},d="((0|[1-9][\\d_]*)|0[bB][01_]+|0[xX]([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))",n="\\\\(['\"\\?\\\\abfnrtv]|u[\\dA-Fa-f]{4}|[0-7]{1,3}|x[\\dA-Fa-f]{2}|U[\\dA-Fa-f]{8})|&[a-zA-Z\\d]{2,};",t={className:"number",begin:"\\b"+d+"(L|u|U|Lu|LU|uL|UL)?",relevance:0},_={className:"number",begin:"\\b(((0[xX](([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)\\.([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)|\\.?([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))[pP][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))|((0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(\\.\\d*|([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)))|\\d+\\.(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)|\\.(0|[1-9][\\d_]*)([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))?))([fF]|L|i|[fF]i|Li)?|"+d+"(i|[fF]i|Li))",relevance:0},r={className:"string",begin:"'("+n+"|.)",end:"'",illegal:"."},i={className:"string",begin:'"',contains:[{begin:n,relevance:0}],end:'"[cwd]?'},s=e.COMMENT("\\/\\+","\\+\\/",{contains:["self"],relevance:10});return{name:"D",keywords:a,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,{className:"string",begin:'x"[\\da-fA-F\\s\\n\\r]*"[cwd]?',relevance:10},i,{className:"string",begin:'[rq]"',end:'"[cwd]?',relevance:5},{className:"string",begin:"`",end:"`[cwd]?"},{className:"string",begin:'q"\\{',end:'\\}"'},_,t,r,{className:"meta",begin:"^#!",end:"$",relevance:5},{className:"meta",begin:"#(line)",end:"$",relevance:5},{className:"keyword",begin:"@[a-zA-Z_][a-zA-Z_\\d]*"}]}}}());hljs.registerLanguage("properties",function(){"use strict";return function(e){var n="[ \\t\\f]*",t="("+n+"[:=]"+n+"|[ \\t\\f]+)",a="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:t,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{name:".properties",case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+"+t,returnBegin:!0,contains:[{className:"attr",begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",endsParent:!0,relevance:0}],starts:s},{begin:a+t,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:a,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:a+n+"$"}]}}}());hljs.registerLanguage("http",function(){"use strict";return function(e){var n="HTTP/[0-9\\.]+";return{name:"HTTP",aliases:["https"],illegal:"\\S",contains:[{begin:"^"+n,end:"$",contains:[{className:"number",begin:"\\b\\d{3}\\b"}]},{begin:"^[A-Z]+ (.*?) "+n+"$",returnBegin:!0,end:"$",contains:[{className:"string",begin:" ",end:" ",excludeBegin:!0,excludeEnd:!0},{begin:n},{className:"keyword",begin:"[A-Z]+"}]},{className:"attribute",begin:"^\\w",end:": ",excludeEnd:!0,illegal:"\\n|\\s|=",starts:{end:"$",relevance:0}},{begin:"\\n\\n",starts:{subLanguage:[],endsWithParent:!0}}]}}}());hljs.registerLanguage("haskell",function(){"use strict";return function(e){var n={variants:[e.COMMENT("--","$"),e.COMMENT("{-","-}",{contains:["self"]})]},i={className:"meta",begin:"{-#",end:"#-}"},a={className:"meta",begin:"^#",end:"$"},s={className:"type",begin:"\\b[A-Z][\\w']*",relevance:0},l={begin:"\\(",end:"\\)",illegal:'"',contains:[i,a,{className:"type",begin:"\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?"},e.inherit(e.TITLE_MODE,{begin:"[_a-z][\\w']*"}),n]};return{name:"Haskell",aliases:["hs"],keywords:"let in if then else case of where do module import hiding qualified type data newtype deriving class instance as default infix infixl infixr foreign export ccall stdcall cplusplus jvm dotnet safe unsafe family forall mdo proc rec",contains:[{beginKeywords:"module",end:"where",keywords:"module where",contains:[l,n],illegal:"\\W\\.|;"},{begin:"\\bimport\\b",end:"$",keywords:"import qualified as hiding",contains:[l,n],illegal:"\\W\\.|;"},{className:"class",begin:"^(\\s*)?(class|instance)\\b",end:"where",keywords:"class family instance where",contains:[s,l,n]},{className:"class",begin:"\\b(data|(new)?type)\\b",end:"$",keywords:"data family type newtype deriving",contains:[i,s,l,{begin:"{",end:"}",contains:l.contains},n]},{beginKeywords:"default",end:"$",contains:[s,l,n]},{beginKeywords:"infix infixl infixr",end:"$",contains:[e.C_NUMBER_MODE,n]},{begin:"\\bforeign\\b",end:"$",keywords:"foreign import export ccall stdcall cplusplus jvm dotnet safe unsafe",contains:[s,e.QUOTE_STRING_MODE,n]},{className:"meta",begin:"#!\\/usr\\/bin\\/env runhaskell",end:"$"},i,a,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,s,e.inherit(e.TITLE_MODE,{begin:"^[_a-z][\\w']*"}),n,{begin:"->|<-"}]}}}());hljs.registerLanguage("handlebars",function(){"use strict";function e(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(n){const a={"builtin-name":"action bindattr collection component concat debugger each each-in get hash if in input link-to loc log lookup mut outlet partial query-params render template textarea unbound unless view with yield"},t=/\[.*?\]/,s=/[^\s!"#%&'()*+,.\/;<=>@\[\\\]^`{|}~]+/,i=e("(",/'.*?'/,"|",/".*?"/,"|",t,"|",s,"|",/\.|\//,")+"),r=e("(",t,"|",s,")(?==)"),l={begin:i,lexemes:/[\w.\/]+/},c=n.inherit(l,{keywords:{literal:"true false undefined null"}}),o={begin:/\(/,end:/\)/},m={className:"attr",begin:r,relevance:0,starts:{begin:/=/,end:/=/,starts:{contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,c,o]}}},d={contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,{begin:/as\s+\|/,keywords:{keyword:"as"},end:/\|/,contains:[{begin:/\w+/}]},m,c,o],returnEnd:!0},g=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/\)/})});o.contains=[g];const u=n.inherit(l,{keywords:a,className:"name",starts:n.inherit(d,{end:/}}/})}),b=n.inherit(l,{keywords:a,className:"name"}),h=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/}}/})});return{name:"Handlebars",aliases:["hbs","html.hbs","html.handlebars","htmlbars"],case_insensitive:!0,subLanguage:"xml",contains:[{begin:/\\\{\{/,skip:!0},{begin:/\\\\(?=\{\{)/,skip:!0},n.COMMENT(/\{\{!--/,/--\}\}/),n.COMMENT(/\{\{!/,/\}\}/),{className:"template-tag",begin:/\{\{\{\{(?!\/)/,end:/\}\}\}\}/,contains:[u],starts:{end:/\{\{\{\{\//,returnEnd:!0,subLanguage:"xml"}},{className:"template-tag",begin:/\{\{\{\{\//,end:/\}\}\}\}/,contains:[b]},{className:"template-tag",begin:/\{\{#/,end:/\}\}/,contains:[u]},{className:"template-tag",begin:/\{\{(?=else\}\})/,end:/\}\}/,keywords:"else"},{className:"template-tag",begin:/\{\{\//,end:/\}\}/,contains:[b]},{className:"template-variable",begin:/\{\{\{/,end:/\}\}\}/,contains:[h]},{className:"template-variable",begin:/\{\{/,end:/\}\}/,contains:[h]}]}}}());hljs.registerLanguage("rust",function(){"use strict";return function(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",t="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:t},illegal:""}]}}}());hljs.registerLanguage("cpp",function(){"use strict";return function(e){var t=e.getLanguage("c-like").rawDefinition();return t.disableAutodetect=!1,t.name="C++",t.aliases=["cc","c++","h++","hpp","hh","hxx","cxx"],t}}());hljs.registerLanguage("ini",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(...n){return n.map(n=>e(n)).join("")}return function(a){var s={className:"number",relevance:0,variants:[{begin:/([\+\-]+)?[\d]+_[\d_]+/},{begin:a.NUMBER_RE}]},i=a.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];var t={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{begin:/\$\{(.*?)}/}]},r={className:"literal",begin:/\bon|off|true|false|yes|no\b/},l={className:"string",contains:[a.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}]},c={begin:/\[/,end:/\]/,contains:[i,r,t,l,s,"self"],relevance:0},g="("+[/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/].map(n=>e(n)).join("|")+")";return{name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/,contains:[i,{className:"section",begin:/\[+/,end:/\]+/},{begin:n(g,"(\\s*\\.\\s*",g,")*",n("(?=",/\s*=\s*[^#\s]/,")")),className:"attr",starts:{end:/$/,contains:[i,c,r,t,l,s]}}]}}}());hljs.registerLanguage("objectivec",function(){"use strict";return function(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n,keyword:"@interface @class @protocol @implementation"};return{name:"Objective-C",aliases:["mm","objc","obj-c"],keywords:{$pattern:n,keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+_.keyword.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}}());hljs.registerLanguage("apache",function(){"use strict";return function(e){var n={className:"number",begin:"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?"};return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:"section",begin:"",contains:[n,{className:"number",begin:":\\d{1,5}"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute",begin:/\w+/,relevance:0,keywords:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"},contains:[{className:"meta",begin:"\\s\\[",end:"\\]$"},{className:"variable",begin:"[\\$%]\\{",end:"\\}",contains:["self",{className:"number",begin:"[\\$%]\\d+"}]},n,{className:"number",begin:"\\d+"},e.QUOTE_STRING_MODE]}}],illegal:/\S/}}}());hljs.registerLanguage("java",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(e){return a("(",e,")?")}function a(...n){return n.map(n=>e(n)).join("")}function s(...n){return"("+n.map(n=>e(n)).join("|")+")"}return function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",i={className:"meta",begin:"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},r=e=>a("[",e,"]+([",e,"_]*[",e,"]+)?"),c={className:"number",variants:[{begin:`\\b(0[bB]${r("01")})[lL]?`},{begin:`\\b(0${r("0-7")})[dDfFlL]?`},{begin:a(/\b0[xX]/,s(a(r("a-fA-F0-9"),/\./,r("a-fA-F0-9")),a(r("a-fA-F0-9"),/\.?/),a(/\./,r("a-fA-F0-9"))),/([pP][+-]?(\d+))?/,/[fFdDlL]?/)},{begin:a(/\b/,s(a(/\d*\./,r("\\d")),r("\\d")),/[eE][+-]?[\d]+[dDfF]?/)},{begin:a(/\b/,r(/\d/),n(/\.?/),n(r(/\d/)),/[dDfFlL]?/)}],relevance:0};return{name:"Java",aliases:["jsp"],keywords:t,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}());hljs.registerLanguage("x86asm",function(){"use strict";return function(s){return{name:"Intel x86 Assembly",case_insensitive:!0,keywords:{$pattern:"[.%]?"+s.IDENT_RE,keyword:"lock rep repe repz repne repnz xaquire xrelease bnd nobnd aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63",built_in:"ip eip rip al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 cs ds es fs gs ss st st0 st1 st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 k0 k1 k2 k3 k4 k5 k6 k7 bnd0 bnd1 bnd2 bnd3 cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d r0h r1h r2h r3h r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l db dw dd dq dt ddq do dy dz resb resw resd resq rest resdq reso resy resz incbin equ times byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr",meta:"%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif %if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep %endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment .nolist __FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ __UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend align alignb sectalign daz nodaz up down zero default option assume public bits use16 use32 use64 default section segment absolute extern global common cpu float __utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ __float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ __Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__"},contains:[s.COMMENT(";","$",{relevance:0}),{className:"number",variants:[{begin:"\\b(?:([0-9][0-9_]*)?\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|(0[Xx])?[0-9][0-9_]*\\.?[0-9_]*(?:[pP](?:[+-]?[0-9_]+)?)?)\\b",relevance:0},{begin:"\\$[0-9][0-9A-Fa-f]*",relevance:0},{begin:"\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\b"},{begin:"\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\b"}]},s.QUOTE_STRING_MODE,{className:"string",variants:[{begin:"'",end:"[^\\\\]'"},{begin:"`",end:"[^\\\\]`"}],relevance:0},{className:"symbol",variants:[{begin:"^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)"},{begin:"^\\s*%%[A-Za-z0-9_$#@~.?]*:"}],relevance:0},{className:"subst",begin:"%[0-9]+",relevance:0},{className:"subst",begin:"%!S+",relevance:0},{className:"meta",begin:/^\s*\.[\w_-]+/}]}}}());hljs.registerLanguage("kotlin",function(){"use strict";return function(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},i={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},s={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[s,i]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,s,i]}]};i.contains.push(t);var r={className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"},l={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(t,{className:"meta-string"})]}]},c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]},d=o;return d.variants[1].contains=[o],o.variants[1].contains=[d],{name:"Kotlin",aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,r,l,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,c],relevance:0},e.C_LINE_COMMENT_MODE,c,r,l,t,e.C_NUMBER_MODE]},c]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},r,l]},t,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},{className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0}]}}}());hljs.registerLanguage("armasm",function(){"use strict";return function(s){const e={variants:[s.COMMENT("^[ \\t]*(?=#)","$",{relevance:0,excludeBegin:!0}),s.COMMENT("[;@]","$",{relevance:0}),s.C_LINE_COMMENT_MODE,s.C_BLOCK_COMMENT_MODE]};return{name:"ARM Assembly",case_insensitive:!0,aliases:["arm"],keywords:{$pattern:"\\.?"+s.IDENT_RE,meta:".2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .arm .thumb .code16 .code32 .force_thumb .thumb_func .ltorg ALIAS ALIGN ARM AREA ASSERT ATTR CN CODE CODE16 CODE32 COMMON CP DATA DCB DCD DCDU DCDO DCFD DCFDU DCI DCQ DCQU DCW DCWU DN ELIF ELSE END ENDFUNC ENDIF ENDP ENTRY EQU EXPORT EXPORTAS EXTERN FIELD FILL FUNCTION GBLA GBLL GBLS GET GLOBAL IF IMPORT INCBIN INCLUDE INFO KEEP LCLA LCLL LCLS LTORG MACRO MAP MEND MEXIT NOFP OPT PRESERVE8 PROC QN READONLY RELOC REQUIRE REQUIRE8 RLIST FN ROUT SETA SETL SETS SN SPACE SUBT THUMB THUMBX TTL WHILE WEND ",built_in:"r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 pc lr sp ip sl sb fp a1 a2 a3 a4 v1 v2 v3 v4 v5 v6 v7 v8 f0 f1 f2 f3 f4 f5 f6 f7 p0 p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12 p13 p14 p15 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 q0 q1 q2 q3 q4 q5 q6 q7 q8 q9 q10 q11 q12 q13 q14 q15 cpsr_c cpsr_x cpsr_s cpsr_f cpsr_cx cpsr_cxs cpsr_xs cpsr_xsf cpsr_sf cpsr_cxsf spsr_c spsr_x spsr_s spsr_f spsr_cx spsr_cxs spsr_xs spsr_xsf spsr_sf spsr_cxsf s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15 s16 s17 s18 s19 s20 s21 s22 s23 s24 s25 s26 s27 s28 s29 s30 s31 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 d10 d11 d12 d13 d14 d15 d16 d17 d18 d19 d20 d21 d22 d23 d24 d25 d26 d27 d28 d29 d30 d31 {PC} {VAR} {TRUE} {FALSE} {OPT} {CONFIG} {ENDIAN} {CODESIZE} {CPU} {FPU} {ARCHITECTURE} {PCSTOREOFFSET} {ARMASM_VERSION} {INTER} {ROPI} {RWPI} {SWST} {NOSWST} . @"},contains:[{className:"keyword",begin:"\\b(adc|(qd?|sh?|u[qh]?)?add(8|16)?|usada?8|(q|sh?|u[qh]?)?(as|sa)x|and|adrl?|sbc|rs[bc]|asr|b[lx]?|blx|bxj|cbn?z|tb[bh]|bic|bfc|bfi|[su]bfx|bkpt|cdp2?|clz|clrex|cmp|cmn|cpsi[ed]|cps|setend|dbg|dmb|dsb|eor|isb|it[te]{0,3}|lsl|lsr|ror|rrx|ldm(([id][ab])|f[ds])?|ldr((s|ex)?[bhd])?|movt?|mvn|mra|mar|mul|[us]mull|smul[bwt][bt]|smu[as]d|smmul|smmla|mla|umlaal|smlal?([wbt][bt]|d)|mls|smlsl?[ds]|smc|svc|sev|mia([bt]{2}|ph)?|mrr?c2?|mcrr2?|mrs|msr|orr|orn|pkh(tb|bt)|rbit|rev(16|sh)?|sel|[su]sat(16)?|nop|pop|push|rfe([id][ab])?|stm([id][ab])?|str(ex)?[bhd]?|(qd?)?sub|(sh?|q|u[qh]?)?sub(8|16)|[su]xt(a?h|a?b(16)?)|srs([id][ab])?|swpb?|swi|smi|tst|teq|wfe|wfi|yield)(eq|ne|cs|cc|mi|pl|vs|vc|hi|ls|ge|lt|gt|le|al|hs|lo)?[sptrx]?(?=\\s)"},e,s.QUOTE_STRING_MODE,{className:"string",begin:"'",end:"[^\\\\]'",relevance:0},{className:"title",begin:"\\|",end:"\\|",illegal:"\\n",relevance:0},{className:"number",variants:[{begin:"[#$=]?0x[0-9a-f]+"},{begin:"[#$=]?0b[01]+"},{begin:"[#$=]\\d+"},{begin:"\\b\\d+"}],relevance:0},{className:"symbol",variants:[{begin:"^[ \\t]*[a-z_\\.\\$][a-z0-9_\\.\\$]+:"},{begin:"^[a-z_\\.\\$][a-z0-9_\\.\\$]+"},{begin:"[=#]\\w+"}],relevance:0}]}}}());hljs.registerLanguage("go",function(){"use strict";return function(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{name:"Go",aliases:["golang"],keywords:n,illegal:">>|\.\.\.) /},i={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},s={begin:/\{\{/,relevance:0},r={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,s,i]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,i]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},t={className:"params",variants:[{begin:/\(\s*\)/,skip:!0,className:null},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:["self",a,l,r,e.HASH_COMMENT_MODE]}]};return i.contains=[r,l,a],{name:"Python",aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,l,{beginKeywords:"if",relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,t,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}}());hljs.registerLanguage("shell",function(){"use strict";return function(s){return{name:"Shell Session",aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}}());hljs.registerLanguage("scala",function(){"use strict";return function(e){var n={className:"subst",variants:[{begin:"\\$[A-Za-z0-9_]+"},{begin:"\\${",end:"}"}]},a={className:"string",variants:[{begin:'"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:'"""',end:'"""',relevance:10},{begin:'[a-z]+"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,n]},{className:"string",begin:'[a-z]+"""',end:'"""',contains:[n],relevance:10}]},s={className:"type",begin:"\\b[A-Z][A-Za-z0-9_]*",relevance:0},t={className:"title",begin:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,relevance:0},i={className:"class",beginKeywords:"class object trait type",end:/[:={\[\n;]/,excludeEnd:!0,contains:[{beginKeywords:"extends with",relevance:10},{begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},t]},l={className:"function",beginKeywords:"def",end:/[:={\[(\n;]/,excludeEnd:!0,contains:[t]};return{name:"Scala",keywords:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,{className:"symbol",begin:"'\\w[\\w\\d_]*(?!')"},s,l,i,e.C_NUMBER_MODE,{className:"meta",begin:"@[A-Za-z]+"}]}}}());hljs.registerLanguage("julia",function(){"use strict";return function(e){var r="[A-Za-z_\\u00A1-\\uFFFF][A-Za-z_0-9\\u00A1-\\uFFFF]*",t={$pattern:r,keyword:"in isa where baremodule begin break catch ccall const continue do else elseif end export false finally for function global if import importall let local macro module quote return true try using while type immutable abstract bitstype typealias ",literal:"true false ARGS C_NULL DevNull ENDIAN_BOM ENV I Inf Inf16 Inf32 Inf64 InsertionSort JULIA_HOME LOAD_PATH MergeSort NaN NaN16 NaN32 NaN64 PROGRAM_FILE QuickSort RoundDown RoundFromZero RoundNearest RoundNearestTiesAway RoundNearestTiesUp RoundToZero RoundUp STDERR STDIN STDOUT VERSION catalan e|0 eu|0 eulergamma golden im nothing pi γ π φ ",built_in:"ANY AbstractArray AbstractChannel AbstractFloat AbstractMatrix AbstractRNG AbstractSerializer AbstractSet AbstractSparseArray AbstractSparseMatrix AbstractSparseVector AbstractString AbstractUnitRange AbstractVecOrMat AbstractVector Any ArgumentError Array AssertionError Associative Base64DecodePipe Base64EncodePipe Bidiagonal BigFloat BigInt BitArray BitMatrix BitVector Bool BoundsError BufferStream CachingPool CapturedException CartesianIndex CartesianRange Cchar Cdouble Cfloat Channel Char Cint Cintmax_t Clong Clonglong ClusterManager Cmd CodeInfo Colon Complex Complex128 Complex32 Complex64 CompositeException Condition ConjArray ConjMatrix ConjVector Cptrdiff_t Cshort Csize_t Cssize_t Cstring Cuchar Cuint Cuintmax_t Culong Culonglong Cushort Cwchar_t Cwstring DataType Date DateFormat DateTime DenseArray DenseMatrix DenseVecOrMat DenseVector Diagonal Dict DimensionMismatch Dims DirectIndexString Display DivideError DomainError EOFError EachLine Enum Enumerate ErrorException Exception ExponentialBackOff Expr Factorization FileMonitor Float16 Float32 Float64 Function Future GlobalRef GotoNode HTML Hermitian IO IOBuffer IOContext IOStream IPAddr IPv4 IPv6 IndexCartesian IndexLinear IndexStyle InexactError InitError Int Int128 Int16 Int32 Int64 Int8 IntSet Integer InterruptException InvalidStateException Irrational KeyError LabelNode LinSpace LineNumberNode LoadError LowerTriangular MIME Matrix MersenneTwister Method MethodError MethodTable Module NTuple NewvarNode NullException Nullable Number ObjectIdDict OrdinalRange OutOfMemoryError OverflowError Pair ParseError PartialQuickSort PermutedDimsArray Pipe PollingFileWatcher ProcessExitedException Ptr QuoteNode RandomDevice Range RangeIndex Rational RawFD ReadOnlyMemoryError Real ReentrantLock Ref Regex RegexMatch RemoteChannel RemoteException RevString RoundingMode RowVector SSAValue SegmentationFault SerializationState Set SharedArray SharedMatrix SharedVector Signed SimpleVector Slot SlotNumber SparseMatrixCSC SparseVector StackFrame StackOverflowError StackTrace StepRange StepRangeLen StridedArray StridedMatrix StridedVecOrMat StridedVector String SubArray SubString SymTridiagonal Symbol Symmetric SystemError TCPSocket Task Text TextDisplay Timer Tridiagonal Tuple Type TypeError TypeMapEntry TypeMapLevel TypeName TypeVar TypedSlot UDPSocket UInt UInt128 UInt16 UInt32 UInt64 UInt8 UndefRefError UndefVarError UnicodeError UniformScaling Union UnionAll UnitRange Unsigned UpperTriangular Val Vararg VecElement VecOrMat Vector VersionNumber Void WeakKeyDict WeakRef WorkerConfig WorkerPool "},a={keywords:t,illegal:/<\//},n={className:"subst",begin:/\$\(/,end:/\)/,keywords:t},o={className:"variable",begin:"\\$"+r},i={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],variants:[{begin:/\w*"""/,end:/"""\w*/,relevance:10},{begin:/\w*"/,end:/"\w*/}]},l={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],begin:"`",end:"`"},s={className:"meta",begin:"@"+r};return a.name="Julia",a.contains=[{className:"number",begin:/(\b0x[\d_]*(\.[\d_]*)?|0x\.\d[\d_]*)p[-+]?\d+|\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\b\d[\d_]*(\.[\d_]*)?|\.\d[\d_]*)([eEfF][-+]?\d+)?/,relevance:0},{className:"string",begin:/'(.|\\[xXuU][a-zA-Z0-9]+)'/},i,l,s,{className:"comment",variants:[{begin:"#=",end:"=#",relevance:10},{begin:"#",end:"$"}]},e.HASH_COMMENT_MODE,{className:"keyword",begin:"\\b(((abstract|primitive)\\s+)type|(mutable\\s+)?struct)\\b"},{begin:/<:/}],n.contains=a.contains,a}}());hljs.registerLanguage("php-template",function(){"use strict";return function(n){return{name:"PHP template",subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]}]}}}());hljs.registerLanguage("scss",function(){"use strict";return function(e){var t={className:"variable",begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b"},i={className:"number",begin:"#[0-9A-Fa-f]+"};return e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{name:"SCSS",case_insensitive:!0,illegal:"[=/|']",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:"\\#[A-Za-z0-9_-]+",relevance:0},{className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0},{className:"selector-attr",begin:"\\[",end:"\\]",illegal:"$"},{className:"selector-tag",begin:"\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\b",relevance:0},{className:"selector-pseudo",begin:":(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)"},{className:"selector-pseudo",begin:"::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)"},t,{className:"attribute",begin:"\\b(src|z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\b",illegal:"[^\\s]"},{begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b"},{begin:":",end:";",contains:[t,i,e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:"meta",begin:"!important"}]},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",returnBegin:!0,keywords:"and or not only",contains:[{begin:"@[a-z-]+",className:"keyword"},t,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,i,e.CSS_NUMBER_MODE]}]}}}());hljs.registerLanguage("r",function(){"use strict";return function(e){var n="([a-zA-Z]|\\.[a-zA-Z.])[a-zA-Z0-9._]*";return{name:"R",contains:[e.HASH_COMMENT_MODE,{begin:n,keywords:{$pattern:n,keyword:"function if in break next repeat else for return switch while try tryCatch stop warning require library attach detach source setMethod setGeneric setGroupGeneric setClass ...",literal:"NULL NA TRUE FALSE T F Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10"},relevance:0},{className:"number",begin:"0[xX][0-9a-fA-F]+[Li]?\\b",relevance:0},{className:"number",begin:"\\d+(?:[eE][+\\-]?\\d*)?L\\b",relevance:0},{className:"number",begin:"\\d+\\.(?!\\d)(?:i\\b)?",relevance:0},{className:"number",begin:"\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{className:"number",begin:"\\.\\d+(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{begin:"`",end:"`",relevance:0},{className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:'"',end:'"'},{begin:"'",end:"'"}]}]}}}());hljs.registerLanguage("sql",function(){"use strict";return function(e){var t=e.COMMENT("--","$");return{name:"SQL",case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,keywords:{$pattern:/[\w\.]+/,keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]}}}());hljs.registerLanguage("c",function(){"use strict";return function(e){var n=e.getLanguage("c-like").rawDefinition();return n.name="C",n.aliases=["c","h"],n}}());hljs.registerLanguage("json",function(){"use strict";return function(n){var e={literal:"true false null"},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],t=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],a={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:e},l={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[n.BACKSLASH_ESCAPE],illegal:"\\n"},n.inherit(a,{begin:/:/})].concat(i),illegal:"\\S"},s={begin:"\\[",end:"\\]",contains:[n.inherit(a)],illegal:"\\S"};return t.push(l,s),i.forEach((function(n){t.push(n)})),{name:"JSON",contains:t,keywords:e,illegal:"\\S"}}}());hljs.registerLanguage("python-repl",function(){"use strict";return function(n){return{aliases:["pycon"],contains:[{className:"meta",starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}}}());hljs.registerLanguage("markdown",function(){"use strict";return function(n){const e={begin:"<",end:">",subLanguage:"xml",relevance:0},a={begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},i={className:"strong",contains:[],variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\*{2}/,end:/\*{2}/}]},s={className:"emphasis",contains:[],variants:[{begin:/\*(?!\*)/,end:/\*/},{begin:/_(?!_)/,end:/_/,relevance:0}]};i.contains.push(s),s.contains.push(i);var c=[e,a];return i.contains=i.contains.concat(c),s.contains=s.contains.concat(c),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:c=c.concat(i,s)},{begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n",contains:c}]}]},e,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)",end:"\\s+",excludeEnd:!0},i,s,{className:"quote",begin:"^>\\s+",contains:c,end:"$"},{className:"code",variants:[{begin:"(`{3,})(.|\\n)*?\\1`*[ ]*"},{begin:"(~{3,})(.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))",contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},a,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}}());hljs.registerLanguage("javascript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function s(e){return r("(?=",e,")")}function r(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(t){var i="[A-Za-z$_][0-9A-Za-z$_]*",c={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/},o={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.join(" "),literal:n.join(" "),built_in:a.join(" ")},l={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:t.C_NUMBER_RE+"n?"}],relevance:0},E={className:"subst",begin:"\\$\\{",end:"\\}",keywords:o,contains:[]},d={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"xml"}},g={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"css"}},u={className:"string",begin:"`",end:"`",contains:[t.BACKSLASH_ESCAPE,E]};E.contains=[t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,l,t.REGEXP_MODE];var b=E.contains.concat([{begin:/\(/,end:/\)/,contains:["self"].concat(E.contains,[t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE])},t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE]),_={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:b};return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:o,contains:[t.SHEBANG({binary:"node",relevance:5}),{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,t.C_LINE_COMMENT_MODE,t.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:i+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),t.C_BLOCK_COMMENT_MODE,l,{begin:r(/[{,\n]\s*/,s(r(/(((\/\/.*)|(\/\*(.|\n)*\*\/))\s*)*/,i+"\\s*:"))),relevance:0,contains:[{className:"attr",begin:i+s("\\s*:"),relevance:0}]},{begin:"("+t.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+t.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:t.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:o,contains:b}]}]},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:"<>",end:""},{begin:c.begin,end:c.end}],subLanguage:"xml",contains:[{begin:c.begin,end:c.end,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[t.inherit(t.TITLE_MODE,{begin:i}),_],illegal:/\[|%/},{begin:/\$[(.]/},t.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},t.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor",end:/\{/,excludeEnd:!0},{begin:"(get|set)\\s+(?="+i+"\\()",end:/{/,keywords:"get set",contains:[t.inherit(t.TITLE_MODE,{begin:i}),{begin:/\(\)/},_]}],illegal:/#(?!!)/}}}());hljs.registerLanguage("typescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.concat(["type","namespace","typedef","interface","public","private","protected","implements","declare","abstract","readonly"]).join(" "),literal:n.join(" "),built_in:a.concat(["any","void","number","boolean","string","object","never","enum"]).join(" ")},s={className:"meta",begin:"@[A-Za-z$_][0-9A-Za-z$_]*"},i={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:r.C_NUMBER_RE+"n?"}],relevance:0},o={className:"subst",begin:"\\$\\{",end:"\\}",keywords:t,contains:[]},c={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"xml"}},l={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"css"}},E={className:"string",begin:"`",end:"`",contains:[r.BACKSLASH_ESCAPE,o]};o.contains=[r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,i,r.REGEXP_MODE];var d={begin:"\\(",end:/\)/,keywords:t,contains:["self",r.QUOTE_STRING_MODE,r.APOS_STRING_MODE,r.NUMBER_MODE]},u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,s,d]};return{name:"TypeScript",aliases:["ts"],keywords:t,contains:[r.SHEBANG(),{className:"meta",begin:/^\s*['"]use strict['"]/},r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,i,{begin:"("+r.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,r.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+r.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:r.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:d.contains}]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/[\{;]/,excludeEnd:!0,keywords:t,contains:["self",r.inherit(r.TITLE_MODE,{begin:"[A-Za-z$_][0-9A-Za-z$_]*"}),u],illegal:/%/,relevance:0},{beginKeywords:"constructor",end:/[\{;]/,excludeEnd:!0,contains:["self",u]},{begin:/module\./,keywords:{built_in:"module"},relevance:0},{beginKeywords:"module",end:/\{/,excludeEnd:!0},{beginKeywords:"interface",end:/\{/,excludeEnd:!0,keywords:"interface extends"},{begin:/\$[(.]/},{begin:"\\."+r.IDENT_RE,relevance:0},s,d]}}}());hljs.registerLanguage("plaintext",function(){"use strict";return function(t){return{name:"Plain text",aliases:["text","txt"],disableAutodetect:!0}}}());hljs.registerLanguage("less",function(){"use strict";return function(e){var n="([\\w-]+|@{[\\w-]+})",a=[],s=[],t=function(e){return{className:"string",begin:"~?"+e+".*?"+e}},r=function(e,n,a){return{className:e,begin:n,relevance:a}},i={begin:"\\(",end:"\\)",contains:s,relevance:0};s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,t("'"),t('"'),e.CSS_NUMBER_MODE,{begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]",excludeEnd:!0}},r("number","#[0-9A-Fa-f]+\\b"),i,r("variable","@@?[\\w-]+",10),r("variable","@{[\\w-]+}"),r("built_in","~?`[^`]*?`"),{className:"attribute",begin:"[\\w-]+\\s*:",end:":",returnBegin:!0,excludeEnd:!0},{className:"meta",begin:"!important"});var c=s.concat({begin:"{",end:"}",contains:a}),l={beginKeywords:"when",endsWithParent:!0,contains:[{beginKeywords:"and not"}].concat(s)},o={begin:n+"\\s*:",returnBegin:!0,end:"[;}]",relevance:0,contains:[{className:"attribute",begin:n,end:":",excludeEnd:!0,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}]},g={className:"keyword",begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b",starts:{end:"[;{}]",returnEnd:!0,contains:s,relevance:0}},d={className:"variable",variants:[{begin:"@[\\w-]+\\s*:",relevance:15},{begin:"@[\\w-]+"}],starts:{end:"[;}]",returnEnd:!0,contains:c}},b={variants:[{begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:n,end:"{"}],returnBegin:!0,returnEnd:!0,illegal:"[<='$\"]",relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l,r("keyword","all\\b"),r("variable","@{[\\w-]+}"),r("selector-tag",n+"%?",0),r("selector-id","#"+n),r("selector-class","\\."+n,0),r("selector-tag","&",0),{className:"selector-attr",begin:"\\[",end:"\\]"},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"\\(",end:"\\)",contains:c},{begin:"!important"}]};return a.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,g,d,o,b),{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:a}}}());hljs.registerLanguage("lua",function(){"use strict";return function(e){var t={begin:"\\[=*\\[",end:"\\]=*\\]",contains:["self"]},a=[e.COMMENT("--(?!\\[=*\\[)","$"),e.COMMENT("--\\[=*\\[","\\]=*\\]",{contains:[t],relevance:10})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE,literal:"true false nil",keyword:"and break do else elseif end for goto if in local not or repeat return then until while",built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove"},contains:a.concat([{className:"function",beginKeywords:"function",end:"\\)",contains:[e.inherit(e.TITLE_MODE,{begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params",begin:"\\(",endsWithParent:!0,contains:a}].concat(a)},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string",begin:"\\[=*\\[",end:"\\]=*\\]",contains:[t],relevance:5}])}}}()); diff --git a/book/theme/index.hbs b/book/theme/index.hbs index 6e0cce0aaa99..ed27410f6437 100644 --- a/book/theme/index.hbs +++ b/book/theme/index.hbs @@ -15,7 +15,6 @@ {{> head}} - @@ -53,18 +52,19 @@ {{#if mathjax_support}} - + {{/if}} +
- - - - +
{{> header}} -