diff --git a/.cargo/config.toml b/.cargo/config.toml index b016eca31aec..af4312dc8790 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,17 @@ +# we use tokio_unstable to enable runtime::Handle::id so we can separate +# globals from multiple parallel tests. If that function ever does get removed +# its possible to replace (with some additional overhead and effort) +# Annoyingly build.rustflags doesn't work here because it gets overwritten +# if people have their own global target.<..> config (for example to enable mold) +# specifying flags this way is more robust as they get merged +# This still gets overwritten by RUST_FLAGS though, luckily it shouldn't be necessary +# to set those most of the time. If downstream does overwrite this its not a huge +# deal since it will only break tests anyway +[target."cfg(all())"] +rustflags = ["--cfg", "tokio_unstable", "-C", "target-feature=-crt-static"] + + [alias] xtask = "run --package xtask --" integration-test = "test --features integration --profile integration --workspace --test integration" + diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 22151c37865d..3d47c20884b1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: shared-key: "build" - name: Cache test tree-sitter grammar - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: runtime/grammars key: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }} diff --git a/.github/workflows/cachix.yml b/.github/workflows/cachix.yml index 0620cbf12758..57f0a0db4553 100644 --- a/.github/workflows/cachix.yml +++ b/.github/workflows/cachix.yml @@ -14,10 +14,10 @@ jobs: uses: actions/checkout@v4 - name: Install nix - uses: cachix/install-nix-action@v24 + uses: cachix/install-nix-action@v25 - name: Authenticate with Cachix - uses: cachix/cachix-action@v13 + uses: cachix/cachix-action@v14 with: name: helix authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} diff --git a/.ignore b/.ignore deleted file mode 100644 index 0c4493ee8f41..000000000000 --- a/.ignore +++ /dev/null @@ -1,2 +0,0 @@ -# Things that we don't want ripgrep to search that we do want in git -# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering diff --git a/Cargo.lock b/Cargo.lock index 3bfe61320457..73e548ae5a5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -62,9 +62,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.76" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59d2a3357dde987206219e78ecfbbb6e8dad06cbb65292758d3270e6254f7355" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" [[package]] name = "arc-swap" @@ -101,9 +101,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bstr" @@ -145,9 +145,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cc" -version = "1.0.84" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8e7c90afad890484a21653d08b6e209ae34770fb5ee298f9c699fcc1e5c856" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "libc", ] @@ -171,14 +171,14 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", - "windows-targets 0.48.0", + "windows-targets 0.52.0", ] [[package]] @@ -285,7 +285,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "crossterm_winapi", "filedescriptor", "futures-core", @@ -330,7 +330,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] @@ -347,7 +347,7 @@ checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] @@ -358,9 +358,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encoding_rs" @@ -448,6 +448,18 @@ dependencies = [ "winapi", ] +[[package]] +name = "filetime" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", +] + [[package]] name = "flate2" version = "1.0.27" @@ -475,9 +487,9 @@ checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -522,50 +534,57 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "gix" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721c7497ab24b665ed5a1eb2b3526936aa60068e61ba260651f7e77c52feec69" +checksum = "31887c304d9a935f3e5494fb5d6a0106c34e965168ec0db9b457424eedd0c741" dependencies = [ "gix-actor", + "gix-attributes", + "gix-command", "gix-commitgraph", "gix-config", "gix-date", "gix-diff", "gix-discover", "gix-features", + "gix-filter", "gix-fs", "gix-glob", "gix-hash", "gix-hashtable", + "gix-ignore", + "gix-index", "gix-lock", "gix-macros", "gix-object", "gix-odb", "gix-pack", "gix-path", + "gix-pathspec", "gix-ref", "gix-refspec", "gix-revision", "gix-revwalk", "gix-sec", + "gix-submodule", "gix-tempfile", "gix-trace", "gix-traverse", "gix-url", "gix-utils", "gix-validate", + "gix-worktree", "once_cell", "parking_lot", "smallvec", "thiserror", - "unicode-normalization", ] [[package]] name = "gix-actor" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "886014c4865b93ce268f1d3eddd4fd3261242c3f3ee61eb36009f913016a9059" +checksum = "0a7bb9fad6125c81372987c06469601d37e1a2d421511adb69971b9083517a8a" dependencies = [ "bstr", "btoi", @@ -575,20 +594,58 @@ dependencies = [ "winnow 0.5.28", ] +[[package]] +name = "gix-attributes" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "214ee3792e504ee1ce206b36dcafa4f328ca313d1e2ac0b41433d68ef4e14260" +dependencies = [ + "bstr", + "gix-glob", + "gix-path", + "gix-quote", + "gix-trace", + "kstring", + "smallvec", + "thiserror", + "unicode-bom", +] + +[[package]] +name = "gix-bitmap" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b6cd0f246180034ddafac9b00a112f19178135b21eb031b3f79355891f7325" +dependencies = [ + "thiserror", +] + [[package]] name = "gix-chunk" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ade37ef69870de0ed966b97c57a3a947a22ff3482a52c3b99b205f77bcb08fb" +checksum = "003ec6deacf68076a0c157271a127e0bb2c031c1a41f7168cbe5d248d9b85c78" dependencies = [ "thiserror", ] +[[package]] +name = "gix-command" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ffc7db3fb50b7dae6ecd937a3527cb725f444614df2ad8988d81806f13f09" +dependencies = [ + "bstr", + "gix-path", + "gix-trace", + "shell-words", +] + [[package]] name = "gix-commitgraph" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7559ea9cefee188cd88b05afcc8e3ef7a3cb4a5c647bccf06b981e591b02b77" +checksum = "82dbd7fb959862e3df2583331f0ad032ac93533e8a52f1b0694bc517f5d292bc" dependencies = [ "bstr", "gix-chunk", @@ -600,9 +657,9 @@ dependencies = [ [[package]] name = "gix-config" -version = "0.33.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c35dc7f9c00a42bbc9cfa1ca2ec0a78ad1b76ff0736d3d35dfd612962244467" +checksum = "e62bf2073b6ce3921ffa6d8326f645f30eec5fc4a8e8a4bc0fcb721a2f3f69dc" dependencies = [ "bstr", "gix-config-value", @@ -621,11 +678,11 @@ dependencies = [ [[package]] name = "gix-config-value" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39f388cb2396aee82d6f460a2e7770659bdf8854e9e5478f7d2b1324a9698284" +checksum = "5b8a1e7bfb37a46ed0b8468db37a6d8a0a61d56bdbe4603ae492cb322e5f3958" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "bstr", "gix-path", "libc", @@ -634,9 +691,9 @@ dependencies = [ [[package]] name = "gix-date" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d85f4a01e0d05c3de585e28bae514d4baf01655e3fc3f14ce6f30bf62405345" +checksum = "fb7f3dfb72bebe3449b5e642be64e3c6ccbe9821c8b8f19f487cf5bfbbf4067e" dependencies = [ "bstr", "itoa", @@ -646,9 +703,9 @@ dependencies = [ [[package]] name = "gix-diff" -version = "0.39.0" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac0e75f5afd2f6c47c800b6b0a000a08045739d0450d20482e8faa42543f62d1" +checksum = "cbdcb5e49c4b9729dd1c361040ae5c3cd7c497b2260b18c954f62db3a63e98cf" dependencies = [ "bstr", "gix-hash", @@ -658,12 +715,13 @@ dependencies = [ [[package]] name = "gix-discover" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0b63ce2ad81632ac1a84ac370f85a5e580c3261580bd85ea17ff35d3a0bba18" +checksum = "b4669218f3ec0cbbf8f16857b32200890f8ca585f36f5817242e4115fe4551af" dependencies = [ "bstr", "dunce", + "gix-fs", "gix-hash", "gix-path", "gix-ref", @@ -673,14 +731,15 @@ dependencies = [ [[package]] name = "gix-features" -version = "0.37.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "befe7edea299a824504b5acc96d7a3a538125b38c42f3a8379f6912a29c90c81" +checksum = "184f7f7d4e45db0e2a362aeaf12c06c5e84817d0ef91d08e8e90170dad9f0b07" dependencies = [ "crc32fast", "flate2", "gix-hash", "gix-trace", + "gix-utils", "libc", "once_cell", "prodash", @@ -690,21 +749,43 @@ dependencies = [ ] [[package]] -name = "gix-fs" +name = "gix-filter" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "007017ce93b819ea52c0ec68306f7d72212a2d307c3d70f1548c7141c015d0a1" +checksum = "9240862840fb740d209422937195e129e4ed3da49af212383260134bea8f6c1a" +dependencies = [ + "bstr", + "encoding_rs", + "gix-attributes", + "gix-command", + "gix-hash", + "gix-object", + "gix-packetline-blocking", + "gix-path", + "gix-quote", + "gix-trace", + "gix-utils", + "smallvec", + "thiserror", +] + +[[package]] +name = "gix-fs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4436e883d5769f9fb18677b8712b49228357815f9e4104174a6fc2d8461a437b" dependencies = [ "gix-features", + "gix-utils", ] [[package]] name = "gix-glob" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61fb116c2516d3a1170e010118f639944a6389872c875a008960cdab2a44ac72" +checksum = "4965a1d06d0ab84a29d4a67697a97352ab14ae1da821084e5afb1fd6d8191ca0" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "bstr", "gix-features", "gix-path", @@ -712,9 +793,9 @@ dependencies = [ [[package]] name = "gix-hash" -version = "0.14.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c52c3170a17b6031833cd2eb4ad7bc23f2755d06e6e70f78dec21d42e8fe1b30" +checksum = "b0ed89cdc1dce26685c80271c4287077901de3c3dd90234d5fa47c22b2268653" dependencies = [ "faster-hex", "thiserror", @@ -722,20 +803,58 @@ dependencies = [ [[package]] name = "gix-hashtable" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a838e6366a5e5b84668b6997ce0981b833136468e8ba949f424c0ef2927eba" +checksum = "ebe47d8c0887f82355e2e9e16b6cecaa4d5e5346a7a474ca78ff94de1db35a5b" dependencies = [ "gix-hash", "hashbrown 0.14.3", "parking_lot", ] +[[package]] +name = "gix-ignore" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f7069aaca4a05784c4cb44e392f0eaf627c6e57e05d3100c0e2386a37a682f0" +dependencies = [ + "bstr", + "gix-glob", + "gix-path", + "gix-trace", + "unicode-bom", +] + +[[package]] +name = "gix-index" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7152181ba8f0a3addc5075dd612cea31fc3e252b29c8be8c45f4892bf87426" +dependencies = [ + "bitflags 2.4.2", + "bstr", + "btoi", + "filetime", + "gix-bitmap", + "gix-features", + "gix-fs", + "gix-hash", + "gix-lock", + "gix-object", + "gix-traverse", + "itoa", + "libc", + "memmap2", + "rustix", + "smallvec", + "thiserror", +] + [[package]] name = "gix-lock" -version = "12.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cf112ddee94223c119a8534dad027740dc3aba3365ac5edeef8a7f6660c74db" +checksum = "651e46174dc5e7d18b7b809d31937b6de3681b1debd78618c99162cc30fcf3e1" dependencies = [ "gix-tempfile", "gix-utils", @@ -744,20 +863,20 @@ dependencies = [ [[package]] name = "gix-macros" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc207b64189cf71bcb17fc841ab99a5d63d0845546b611e2703ce467f659323a" +checksum = "d75e7ab728059f595f6ddc1ad8771b8d6a231971ae493d9d5948ecad366ee8bb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] name = "gix-object" -version = "0.40.0" +version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4c77e47ffba92127faf632a841fce23d19547e269bd8b88e68961a70eab4e93" +checksum = "693ce9d30741506cb082ef2d8b797415b48e032cce0ab23eff894c19a7e4777b" dependencies = [ "bstr", "btoi", @@ -774,13 +893,14 @@ dependencies = [ [[package]] name = "gix-odb" -version = "0.56.0" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3254f2005cc7553ea78e85e816a09150c6f7a64e6b7627b8d1fdc56721bea73" +checksum = "8ba2fa9e81f2461b78b4d81a807867667326c84cdab48e0aed7b73a593aa1be4" dependencies = [ "arc-swap", "gix-date", "gix-features", + "gix-fs", "gix-hash", "gix-object", "gix-pack", @@ -793,9 +913,9 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.46.0" +version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02ebc8cd657eec207d82d8f876ca402361308ecd3c87a47935b0299506257b4f" +checksum = "8da5f3e78c96b76c4e6fe5e8e06b76221e4a0ee9a255aa935ed1fdf68988dfd8" dependencies = [ "clru", "gix-chunk", @@ -811,11 +931,23 @@ dependencies = [ "thiserror", ] +[[package]] +name = "gix-packetline-blocking" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8ef6dd3ea50e26f3bf572e90c034d033c804d340cd1eb386392f184a9ba2f7" +dependencies = [ + "bstr", + "faster-hex", + "gix-trace", + "thiserror", +] + [[package]] name = "gix-path" -version = "0.10.2" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10931652a3da126990ac93bce1b1c600cc99d7c268d712b6360ed52174ce1b68" +checksum = "14a6282621aed1becc3f83d64099a564b3b9063f22783d9a87ea502a3e9f2e40" dependencies = [ "bstr", "gix-trace", @@ -824,11 +956,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "gix-pathspec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cbd49750edb26b0a691e5246fc635fa554d344da825cd20fa9ee0da9c1b761f" +dependencies = [ + "bitflags 2.4.2", + "bstr", + "gix-attributes", + "gix-config-value", + "gix-glob", + "gix-path", + "thiserror", +] + [[package]] name = "gix-quote" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c43530cb94a7759807e6f8d180e17ac9c65673b891645c6c433831dc0cf4342" +checksum = "9f7dc10303d73a960d10fb82f81188b036ac3e6b11b5795b20b1a60b51d1321f" dependencies = [ "bstr", "btoi", @@ -837,9 +984,9 @@ dependencies = [ [[package]] name = "gix-ref" -version = "0.40.0" +version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baa951b3b850d6d1be4f900768e49af20b76bf9505beac22af723d57249a2f1d" +checksum = "5818958994ad7879fa566f5441ebcc48f0926aa027b28948e6fbf6578894dc31" dependencies = [ "gix-actor", "gix-date", @@ -850,6 +997,7 @@ dependencies = [ "gix-object", "gix-path", "gix-tempfile", + "gix-utils", "gix-validate", "memmap2", "thiserror", @@ -858,9 +1006,9 @@ dependencies = [ [[package]] name = "gix-refspec" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88c61f849d58c06e3068a0b601cf10127d2a07cdad00a725ed66cf303f76f6b3" +checksum = "613aa4d93034c5791d13bdc635e530f4ddab1412ddfb4a8215f76213177b61c7" dependencies = [ "bstr", "gix-hash", @@ -872,9 +1020,9 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbbf91f4200c5c76802ef5f057b96f5a336827508881fe55a780be71a794d22" +checksum = "288f6549d7666db74dc3f169a9a333694fc28ecd2f5aa7b2c979c89eb556751a" dependencies = [ "bstr", "gix-date", @@ -888,9 +1036,9 @@ dependencies = [ [[package]] name = "gix-revwalk" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab66354d83f70f2730391747d0c75f94ef3c3dd40ebde2e206db9faaf7a0a7" +checksum = "5b9b4d91dfc5c14fee61a28c65113ded720403b65a0f46169c0460f731a5d03c" dependencies = [ "gix-commitgraph", "gix-date", @@ -903,21 +1051,36 @@ dependencies = [ [[package]] name = "gix-sec" -version = "0.10.2" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9963f38a42144253ed4d571882d7db5ef644c12e6726e4b75135597cc9d0e1a" +checksum = "f8d9bf462feaf05f2121cba7399dbc6c34d88a9cad58fc1e95027791d6a3c6d2" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "gix-path", "libc", - "windows 0.52.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "gix-submodule" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73182f6c1f5ed1ed94ba16581ac62593d5e29cd1c028b2af618f836283b8f8d4" +dependencies = [ + "bstr", + "gix-config", + "gix-path", + "gix-pathspec", + "gix-refspec", + "gix-url", + "thiserror", ] [[package]] name = "gix-tempfile" -version = "12.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e76a494bd530e1a1309188ff971825a24f159c76c2db0bf71fa5dfb469a2c915" +checksum = "2d337955b7af00fb87120d053d87cdfb422a80b9ff7a3aa4057a99c79422dc30" dependencies = [ "gix-fs", "libc", @@ -928,15 +1091,15 @@ dependencies = [ [[package]] name = "gix-trace" -version = "0.1.5" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda62acb44dd86a40c7c3762a5403cfc1ac789ea559df54085cedf79864f809e" +checksum = "02b202d766a7fefc596e2cc6a89cda8ad8ad733aed82da635ac120691112a9b1" [[package]] name = "gix-traverse" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8661fab39985c9214e56d81a63ceb5886ac948cec2fba76c39494d1e0e307ea8" +checksum = "bfc30c5b5e4e838683b59e1b0574ce6bc1c35916df9709aaab32bb7751daf08b" dependencies = [ "gix-commitgraph", "gix-date", @@ -950,9 +1113,9 @@ dependencies = [ [[package]] name = "gix-url" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a0129c1e8b52736d7c5128300a4485dbc85863001371e2771ac1754bd89fd7" +checksum = "26f1981ecc700f4fd73ae62b9ca2da7c8816c8fd267f0185e3f8c21e967984ac" dependencies = [ "bstr", "gix-features", @@ -964,23 +1127,42 @@ dependencies = [ [[package]] name = "gix-utils" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab9277a5e32e85d53f738096d872a4a9b76067ad471894ad31bd99c8fa2da1dc" +checksum = "56e839f3d0798b296411263da6bee780a176ef8008a5dfc31287f7eda9266ab8" dependencies = [ "fastrand", + "unicode-normalization", ] [[package]] name = "gix-validate" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f805ebbdbaa4bfd98e2ee43e6d14099b8a4d9141f5d7b8202fea4d48e44263e7" +checksum = "ac7cc36f496bd5d96cdca0f9289bb684480725d40db60f48194aa7723b883854" dependencies = [ "bstr", "thiserror", ] +[[package]] +name = "gix-worktree" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca36bb3dc54038c66507dc75c4d8edbee2d6d5cc45227b4eb508ad13dd60a006" +dependencies = [ + "bstr", + "gix-attributes", + "gix-features", + "gix-fs", + "gix-glob", + "gix-hash", + "gix-ignore", + "gix-index", + "gix-object", + "gix-path", +] + [[package]] name = "globset" version = "0.4.14" @@ -1053,13 +1235,14 @@ version = "23.10.0" dependencies = [ "ahash", "arc-swap", - "bitflags 2.4.1", + "bitflags 2.4.2", "chrono", "dunce", "encoding_rs", "etcetera", "hashbrown 0.14.3", "helix-loader", + "helix-stdx", "imara-diff", "indoc", "log", @@ -1089,18 +1272,24 @@ dependencies = [ "anyhow", "fern", "helix-core", + "helix-stdx", "log", "serde", "serde_json", "thiserror", "tokio", - "which", ] [[package]] name = "helix-event" version = "23.10.0" dependencies = [ + "ahash", + "anyhow", + "futures-executor", + "hashbrown 0.14.3", + "log", + "once_cell", "parking_lot", "tokio", ] @@ -1113,6 +1302,7 @@ dependencies = [ "cc", "dunce", "etcetera", + "helix-stdx", "libloading", "log", "once_cell", @@ -1121,7 +1311,6 @@ dependencies = [ "threadpool", "toml", "tree-sitter", - "which", ] [[package]] @@ -1135,6 +1324,7 @@ dependencies = [ "helix-core", "helix-loader", "helix-parsec", + "helix-stdx", "log", "lsp-types", "parking_lot", @@ -1143,13 +1333,23 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "which", ] [[package]] name = "helix-parsec" version = "23.10.0" +[[package]] +name = "helix-stdx" +version = "23.10.0" +dependencies = [ + "dunce", + "etcetera", + "ropey", + "tempfile", + "which", +] + [[package]] name = "helix-term" version = "23.10.0" @@ -1168,6 +1368,7 @@ dependencies = [ "helix-event", "helix-loader", "helix-lsp", + "helix-stdx", "helix-tui", "helix-vcs", "helix-view", @@ -1189,14 +1390,13 @@ dependencies = [ "tokio-stream", "toml", "url", - "which", ] [[package]] name = "helix-tui" version = "23.10.0" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "cassowary", "crossterm", "helix-core", @@ -1230,7 +1430,7 @@ version = "23.10.0" dependencies = [ "anyhow", "arc-swap", - "bitflags 2.4.1", + "bitflags 2.4.2", "chardetng", "clipboard-win", "crossterm", @@ -1240,6 +1440,7 @@ dependencies = [ "helix-event", "helix-loader", "helix-lsp", + "helix-stdx", "helix-tui", "helix-vcs", "libc", @@ -1254,7 +1455,6 @@ dependencies = [ "tokio-stream", "toml", "url", - "which", ] [[package]] @@ -1268,11 +1468,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.5" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1286,7 +1486,7 @@ dependencies = [ "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows 0.48.0", + "windows", ] [[package]] @@ -1311,9 +1511,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" +checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" dependencies = [ "crossbeam-deque", "globset", @@ -1385,11 +1585,20 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "kstring" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747" +dependencies = [ + "static_assertions", +] + [[package]] name = "libc" -version = "0.2.151" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libloading" @@ -1412,9 +1621,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" @@ -1615,9 +1824,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" dependencies = [ "unicode-ident", ] @@ -1630,11 +1839,11 @@ checksum = "744a264d26b88a6a7e37cbad97953fa233b94d585236310bcbc88474b4092d79" [[package]] name = "pulldown-cmark" -version = "0.9.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" +checksum = "dce76ce678ffc8e5675b22aa1405de0b7037e2fdf8913fea40d1926c6fe1e6e7" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.2", "memchr", "unicase", ] @@ -1650,9 +1859,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.29" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -1717,9 +1926,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", @@ -1729,9 +1938,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" dependencies = [ "aho-corasick", "memchr", @@ -1762,11 +1971,11 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", @@ -1802,29 +2011,29 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "serde" -version = "1.0.193" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" dependencies = [ "itoa", "ryu", @@ -1839,7 +2048,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] @@ -1857,6 +2066,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" +[[package]] +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + [[package]] name = "signal-hook" version = "0.3.17" @@ -1919,9 +2134,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.2" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "smartstring" @@ -1975,9 +2190,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.38" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -1986,15 +2201,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.1" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" dependencies = [ "cfg-if", "fastrand", "redox_syscall 0.4.1", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2028,22 +2243,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.52" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a48fd946b02c0a526b2e9481c8e2a17755e47039164a86c4070446e3a4614d" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.52" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7fbe9b594d6568a6a1443250a7e67d80b74e1e96f6d1715e1e21cc1888291d3" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] @@ -2126,7 +2341,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] [[package]] @@ -2177,7 +2392,7 @@ dependencies = [ [[package]] name = "tree-sitter" version = "0.20.10" -source = "git+https://github.com/tree-sitter/tree-sitter?rev=ab09ae20d640711174b8da8a654f6b3dec93da1a#ab09ae20d640711174b8da8a654f6b3dec93da1a" +source = "git+https://github.com/helix-editor/tree-sitter?rev=660481dbf71413eba5a928b0b0ab8da50c1109e0#660481dbf71413eba5a928b0b0ab8da50c1109e0" dependencies = [ "cc", "regex", @@ -2333,15 +2548,15 @@ checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" [[package]] name = "which" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14" +checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c" dependencies = [ "either", "home", "once_cell", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2384,25 +2599,6 @@ dependencies = [ "windows-targets 0.48.0", ] -[[package]] -name = "windows" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" -dependencies = [ - "windows-core", - "windows-targets 0.52.0", -] - -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.0", -] - [[package]] name = "windows-sys" version = "0.45.0" @@ -2647,5 +2843,5 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.48", ] diff --git a/Cargo.toml b/Cargo.toml index 6c006fbb4ff4..91f6e7cae498 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ members = [ "helix-loader", "helix-vcs", "helix-parsec", + "helix-stdx", "xtask", ] @@ -36,7 +37,7 @@ package.helix-tui.opt-level = 2 package.helix-term.opt-level = 2 [workspace.dependencies] -tree-sitter = { version = "0.20", git = "https://github.com/tree-sitter/tree-sitter", rev = "ab09ae20d640711174b8da8a654f6b3dec93da1a" } +tree-sitter = { version = "0.20", git = "https://github.com/helix-editor/tree-sitter", rev = "660481dbf71413eba5a928b0b0ab8da50c1109e0" } nucleo = "0.2.0" [workspace.package] diff --git a/book/src/configuration.md b/book/src/configuration.md index 36e2fee2e420..a43ede76abff 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -51,7 +51,8 @@ Its settings will be merged with the configuration directory `config.toml` and t | `auto-completion` | Enable automatic pop up of auto-completion | `true` | | `auto-format` | Enable automatic formatting on save | `true` | | `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` | -| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `250` | +| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. | `250` | +| `completion-timeout` | Time in milliseconds after typing a word character before completions are shown, set to 5 for instant. | `250` | | `preview-completion-insert` | Whether to apply completion item instantly when selected | `true` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | | `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` | diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 8f1364eb865f..42cd737c00db 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -21,10 +21,10 @@ | cpon | ✓ | | ✓ | | | cpp | ✓ | ✓ | ✓ | `clangd` | | crystal | ✓ | ✓ | | `crystalline` | -| css | ✓ | | | `vscode-css-language-server` | +| css | ✓ | | ✓ | `vscode-css-language-server` | | cue | ✓ | | | `cuelsp` | | d | ✓ | ✓ | ✓ | `serve-d` | -| dart | ✓ | | ✓ | `dart` | +| dart | ✓ | ✓ | ✓ | `dart` | | dbml | ✓ | | | | | devicetree | ✓ | | | | | dhall | ✓ | ✓ | | `dhall-lsp-server` | @@ -69,6 +69,7 @@ | hcl | ✓ | | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | | hoon | ✓ | | | | +| hocon | ✓ | | ✓ | | | hosts | ✓ | | | | | html | ✓ | | | `vscode-html-language-server` | | hurl | ✓ | | ✓ | | @@ -97,7 +98,7 @@ | log | ✓ | | | | | lpf | ✓ | | | | | lua | ✓ | ✓ | ✓ | `lua-language-server` | -| make | ✓ | | | | +| make | ✓ | | ✓ | | | markdoc | ✓ | | | `markdoc-ls` | | markdown | ✓ | | | `marksman` | | markdown.inline | ✓ | | | | @@ -123,12 +124,13 @@ | pem | ✓ | | | | | perl | ✓ | ✓ | ✓ | `perlnavigator` | | php | ✓ | ✓ | ✓ | `intelephense` | +| pkl | ✓ | | ✓ | | | po | ✓ | ✓ | | | | pod | ✓ | | | | | ponylang | ✓ | ✓ | ✓ | | | prisma | ✓ | | | `prisma-language-server` | | prolog | | | | `swipl` | -| protobuf | ✓ | | ✓ | `bufls`, `pb` | +| protobuf | ✓ | ✓ | ✓ | `bufls`, `pb` | | prql | ✓ | | | | | purescript | ✓ | ✓ | | `purescript-language-server` | | python | ✓ | ✓ | ✓ | `pylsp` | @@ -145,10 +147,10 @@ | ruby | ✓ | ✓ | ✓ | `solargraph` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` | | sage | ✓ | ✓ | | | -| scala | ✓ | | ✓ | `metals` | +| scala | ✓ | ✓ | ✓ | `metals` | | scheme | ✓ | | ✓ | | | scss | ✓ | | | `vscode-css-language-server` | -| slint | ✓ | | ✓ | `slint-lsp` | +| slint | ✓ | ✓ | ✓ | `slint-lsp` | | smali | ✓ | | ✓ | | | smithy | ✓ | | | `cs` | | sml | ✓ | | | | @@ -162,6 +164,7 @@ | swift | ✓ | | | `sourcekit-lsp` | | t32 | ✓ | | | | | tablegen | ✓ | ✓ | ✓ | | +| tact | ✓ | ✓ | ✓ | | | task | ✓ | | | | | templ | ✓ | | | `templ` | | tfvars | ✓ | | ✓ | `terraform-ls` | @@ -173,7 +176,7 @@ | typescript | ✓ | ✓ | ✓ | `typescript-language-server` | | typst | ✓ | | | `typst-lsp` | | ungrammar | ✓ | | | | -| unison | ✓ | | | | +| unison | ✓ | | ✓ | | | uxntal | ✓ | | | | | v | ✓ | ✓ | ✓ | `v-analyzer` | | vala | ✓ | | | `vala-language-server` | diff --git a/book/src/guides/indent.md b/book/src/guides/indent.md index a65ac5ac1f48..be140384a1fe 100644 --- a/book/src/guides/indent.md +++ b/book/src/guides/indent.md @@ -315,6 +315,10 @@ The first argument (a capture) must/must not be equal to the second argument The first argument (a capture) must/must not match the regex given in the second argument (a string). +- `#any-of?`/`#not-any-of?`: +The first argument (a capture) must/must not be one of the other arguments +(strings). + Additionally, we support some custom predicates for indent queries: - `#not-kind-eq?`: @@ -366,4 +370,4 @@ Everything up to and including the closing brace gets an indent level of 1. Then, on the closing brace, we encounter an outdent with a scope of "all", which means the first line is included, and the indent level is cancelled out on this line. (Note these scopes are the defaults for `@indent` and `@outdent`—they are -written explicitly for demonstration.) \ No newline at end of file +written explicitly for demonstration.) diff --git a/book/src/guides/injection.md b/book/src/guides/injection.md index e842ae303ffc..0a1d2c9a280c 100644 --- a/book/src/guides/injection.md +++ b/book/src/guides/injection.md @@ -54,4 +54,7 @@ The first argument (a capture) must be equal to the second argument The first argument (a capture) must match the regex given in the second argument (a string). +- `#any-of?` (standard): +The first argument (a capture) must be one of the other arguments (strings). + [upstream-docs]: http://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection diff --git a/book/src/install.md b/book/src/install.md index 8979850132a2..07865e698b84 100644 --- a/book/src/install.md +++ b/book/src/install.md @@ -76,6 +76,15 @@ Releases are available in the `extra` repository: ```sh sudo pacman -S helix ``` + +> 💡 When installed from the `extra` repository, run Helix with `helix` instead of `hx`. +> +> For example: +> ```sh +> helix --health +> ``` +> to check health + Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available in the AUR, which builds the master branch. @@ -204,6 +213,8 @@ RUSTFLAGS="-C target-feature=-crt-static" This command will create the `hx` executable and construct the tree-sitter grammars in the local `runtime` folder. +> 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD` + > 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch > grammars with `hx --grammar fetch` and compile them with > `hx --grammar build`. This will install them in @@ -214,12 +225,12 @@ RUSTFLAGS="-C target-feature=-crt-static" #### Linux and macOS -The **runtime** directory is one below the Helix source, so either set a +The **runtime** directory is one below the Helix source, so either export a `HELIX_RUNTIME` environment variable to point to that directory and add it to your `~/.bashrc` or equivalent: ```sh -HELIX_RUNTIME=~/src/helix/runtime +export HELIX_RUNTIME=~/src/helix/runtime ``` Or, create a symbolic link: diff --git a/book/src/keymap.md b/book/src/keymap.md index c6981b28603c..a3e41666f3fd 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -205,7 +205,7 @@ Jumps to various locations. | ----- | ----------- | ------- | | `g` | Go to line number `` else start of file | `goto_file_start` | | `e` | Go to the end of the file | `goto_last_line` | -| `f` | Go to files in the selection | `goto_file` | +| `f` | Go to files in the selections | `goto_file` | | `h` | Go to the start of the line | `goto_line_start` | | `l` | Go to the end of the line | `goto_line_end` | | `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` | @@ -253,8 +253,8 @@ This layer is similar to Vim keybindings as Kakoune does not support windows. | `w`, `Ctrl-w` | Switch to next window | `rotate_view` | | `v`, `Ctrl-v` | Vertical right split | `vsplit` | | `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` | -| `f` | Go to files in the selection in horizontal splits | `goto_file` | -| `F` | Go to files in the selection in vertical splits | `goto_file` | +| `f` | Go to files in the selections in horizontal splits | `goto_file` | +| `F` | Go to files in the selections in vertical splits | `goto_file` | | `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` | | `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` | | `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` | diff --git a/contrib/helix-256p.ico b/contrib/helix-256p.ico new file mode 100644 index 000000000000..16781cc10b53 Binary files /dev/null and b/contrib/helix-256p.ico differ diff --git a/grammars.nix b/grammars.nix index 843fa02ad7dc..5152b5204dd9 100644 --- a/grammars.nix +++ b/grammars.nix @@ -28,7 +28,17 @@ owner = builtins.elemAt match 0; repo = builtins.elemAt match 1; }; - gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar; + # If `use-grammars.only` is set, use only those grammars. + # If `use-grammars.except` is set, use all other grammars. + # Otherwise use all grammars. + useGrammar = grammar: + if languagesConfig?use-grammars.only then + builtins.elem grammar.name languagesConfig.use-grammars.only + else if languagesConfig?use-grammars.except then + !(builtins.elem grammar.name languagesConfig.use-grammars.except) + else true; + grammarsToUse = builtins.filter useGrammar languagesConfig.grammar; + gitGrammars = builtins.filter isGitGrammar grammarsToUse; buildGrammar = grammar: let gh = toGitHubFetcher grammar.source.git; sourceGit = builtins.fetchTree { diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index d7fff6c6f597..8c63af8ef266 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -16,10 +16,11 @@ unicode-lines = ["ropey/unicode_lines"] integration = [] [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-loader = { path = "../helix-loader" } ropey = { version = "1.6.1", default-features = false, features = ["simd"] } -smallvec = "1.11" +smallvec = "1.13" smartstring = "1.0.1" unicode-segmentation = "1.10" unicode-width = "0.1" diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index 1e90db472f0a..c29bb3a0b0e6 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -551,7 +551,7 @@ fn query_indents<'a>( // The row/column position of the optional anchor in this query let mut anchor: Option = None; for capture in m.captures { - let capture_name = query.capture_names()[capture.index as usize].as_str(); + let capture_name = query.capture_names()[capture.index as usize]; let capture_type = match capture_name { "indent" => IndentCaptureType::Indent, "indent.always" => IndentCaptureType::IndentAlways, diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 0acdb238054c..94802eba9727 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -17,7 +17,6 @@ pub mod macros; pub mod match_brackets; pub mod movement; pub mod object; -pub mod path; mod position; pub mod search; pub mod selection; diff --git a/helix-core/src/match_brackets.rs b/helix-core/src/match_brackets.rs index f6d9885e470d..b8bcc28ca39f 100644 --- a/helix-core/src/match_brackets.rs +++ b/helix-core/src/match_brackets.rs @@ -57,10 +57,10 @@ fn find_pair( pos_: usize, traverse_parents: bool, ) -> Option { - let tree = syntax.tree(); let pos = doc.char_to_byte(pos_); - let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?; + let root = syntax.tree_for_byte_range(pos, pos + 1).root_node(); + let mut node = root.descendant_for_byte_range(pos, pos + 1)?; loop { if node.is_named() { @@ -118,7 +118,7 @@ fn find_pair( }; node = parent; } - let node = tree.root_node().named_descendant_for_byte_range(pos, pos)?; + let node = root.named_descendant_for_byte_range(pos, pos + 1)?; if node.child_count() != 0 { return None; } @@ -141,7 +141,7 @@ fn find_pair( #[must_use] pub fn find_matching_bracket_plaintext(doc: RopeSlice, cursor_pos: usize) -> Option { // Don't do anything when the cursor is not on top of a bracket. - let bracket = doc.char(cursor_pos); + let bracket = doc.get_char(cursor_pos)?; if !is_valid_bracket(bracket) { return None; } @@ -265,6 +265,12 @@ fn as_char(doc: RopeSlice, node: &Node) -> Option<(usize, char)> { mod tests { use super::*; + #[test] + fn find_matching_bracket_empty_file() { + let actual = find_matching_bracket_plaintext("".into(), 0); + assert_eq!(actual, None); + } + #[test] fn test_find_matching_bracket_current_line_plaintext() { let assert = |input: &str, pos, expected| { diff --git a/helix-core/src/movement.rs b/helix-core/src/movement.rs index 6c4f3f535f8d..54eb02fd0b19 100644 --- a/helix-core/src/movement.rs +++ b/helix-core/src/movement.rs @@ -573,16 +573,11 @@ pub fn move_parent_node_end( dir: Direction, movement: Movement, ) -> Selection { - let tree = syntax.tree(); - selection.transform(|range| { let start_from = text.char_to_byte(range.from()); let start_to = text.char_to_byte(range.to()); - let mut node = match tree - .root_node() - .named_descendant_for_byte_range(start_from, start_to) - { + let mut node = match syntax.named_descendant_for_byte_range(start_from, start_to) { Some(node) => node, None => { log::debug!( diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 8d433260e41c..24de1a338758 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -263,7 +263,7 @@ impl Display for LanguageServerFeature { GotoDeclaration => "goto-declaration", GotoDefinition => "goto-definition", GotoTypeDefinition => "goto-type-definition", - GotoReference => "goto-type-definition", + GotoReference => "goto-reference", GotoImplementation => "goto-implementation", SignatureHelp => "signature-help", Hover => "hover", @@ -1338,6 +1338,32 @@ impl Syntax { result } + pub fn tree_for_byte_range(&self, start: usize, end: usize) -> &Tree { + let mut container_id = self.root; + + for (layer_id, layer) in self.layers.iter() { + if layer.depth > self.layers[container_id].depth + && layer.contains_byte_range(start, end) + { + container_id = layer_id; + } + } + + self.layers[container_id].tree() + } + + pub fn named_descendant_for_byte_range(&self, start: usize, end: usize) -> Option> { + self.tree_for_byte_range(start, end) + .root_node() + .named_descendant_for_byte_range(start, end) + } + + pub fn descendant_for_byte_range(&self, start: usize, end: usize) -> Option> { + self.tree_for_byte_range(start, end) + .root_node() + .descendant_for_byte_range(start, end) + } + // Commenting // comment_strings_for_pos // is_commented @@ -1434,6 +1460,32 @@ impl LanguageLayer { self.tree = Some(tree); Ok(()) } + + /// Whether the layer contains the given byte range. + /// + /// If the layer has multiple ranges (i.e. combined injections), the + /// given range is considered contained if it is within the start and + /// end bytes of the first and last ranges **and** if the given range + /// starts or ends within any of the layer's ranges. + fn contains_byte_range(&self, start: usize, end: usize) -> bool { + let layer_start = self + .ranges + .first() + .expect("ranges should not be empty") + .start_byte; + let layer_end = self + .ranges + .last() + .expect("ranges should not be empty") + .end_byte; + + layer_start <= start + && layer_end >= end + && self.ranges.iter().any(|range| { + let byte_range = range.start_byte..range.end_byte; + byte_range.contains(&start) || byte_range.contains(&end) + }) + } } pub(crate) fn generate_edits( @@ -1727,7 +1779,7 @@ impl HighlightConfiguration { let mut local_scope_capture_index = None; for (i, name) in query.capture_names().iter().enumerate() { let i = Some(i as u32); - match name.as_str() { + match *name { "local.definition" => local_def_capture_index = i, "local.definition-value" => local_def_value_capture_index = i, "local.reference" => local_ref_capture_index = i, @@ -1738,7 +1790,7 @@ impl HighlightConfiguration { for (i, name) in injections_query.capture_names().iter().enumerate() { let i = Some(i as u32); - match name.as_str() { + match *name { "injection.content" => injection_content_capture_index = i, "injection.language" => injection_language_capture_index = i, "injection.filename" => injection_filename_capture_index = i, @@ -1768,7 +1820,7 @@ impl HighlightConfiguration { } /// Get a slice containing all of the highlight names used in the configuration. - pub fn names(&self) -> &[String] { + pub fn names(&self) -> &[&str] { self.query.capture_names() } @@ -1795,7 +1847,6 @@ impl HighlightConfiguration { let mut best_index = None; let mut best_match_len = 0; for (i, recognized_name) in recognized_names.iter().enumerate() { - let recognized_name = recognized_name; let mut len = 0; let mut matches = true; for (i, part) in recognized_name.split('.').enumerate() { @@ -2264,6 +2315,7 @@ impl<'a> Iterator for HighlightIter<'a> { // highlighting patterns that are disabled for local variables. if definition_highlight.is_some() || reference_highlight.is_some() { while layer.config.non_local_variable_patterns[match_.pattern_index] { + match_.remove(); if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { diff --git a/helix-dap/Cargo.toml b/helix-dap/Cargo.toml index f7acb00323e0..3521f5890aeb 100644 --- a/helix-dap/Cargo.toml +++ b/helix-dap/Cargo.toml @@ -13,6 +13,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } anyhow = "1.0" @@ -21,7 +22,6 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } -which = "5.0.0" [dev-dependencies] fern = "0.6" diff --git a/helix-dap/src/client.rs b/helix-dap/src/client.rs index acdfc5b7ec95..18af13ae7831 100644 --- a/helix-dap/src/client.rs +++ b/helix-dap/src/client.rs @@ -9,7 +9,6 @@ use helix_core::syntax::DebuggerQuirks; use serde_json::Value; use anyhow::anyhow; -pub use log::{error, info}; use std::{ collections::HashMap, future::Future, @@ -114,7 +113,7 @@ impl Client { id: usize, ) -> Result<(Self, UnboundedReceiver)> { // Resolve path to the binary - let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?; + let cmd = helix_stdx::env::which(cmd)?; let process = Command::new(cmd) .args(args) diff --git a/helix-dap/src/lib.rs b/helix-dap/src/lib.rs index 21162cb86e72..d0229249d4da 100644 --- a/helix-dap/src/lib.rs +++ b/helix-dap/src/lib.rs @@ -19,6 +19,8 @@ pub enum Error { #[error("server closed the stream")] StreamClosed, #[error(transparent)] + ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError), + #[error(transparent)] Other(#[from] anyhow::Error), } pub type Result = core::result::Result; diff --git a/helix-event/Cargo.toml b/helix-event/Cargo.toml index c20328246fa3..a5c88e93d0fd 100644 --- a/helix-event/Cargo.toml +++ b/helix-event/Cargo.toml @@ -12,5 +12,18 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot"] } -parking_lot = { version = "0.12", features = ["send_guard"] } +ahash = "0.8.3" +hashbrown = "0.14.0" +tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } +# the event registry is essentially read only but must be an rwlock so we can +# setup new events on initialization, hardware-lock-elision hugely benefits this case +# as it essentially makes the lock entirely free as long as there is no writes +parking_lot = { version = "0.12", features = ["hardware-lock-elision"] } +once_cell = "1.18" + +anyhow = "1" +log = "0.4" +futures-executor = "0.3.28" + +[features] +integration_test = [] diff --git a/helix-event/src/cancel.rs b/helix-event/src/cancel.rs new file mode 100644 index 000000000000..f027be80e8de --- /dev/null +++ b/helix-event/src/cancel.rs @@ -0,0 +1,19 @@ +use std::future::Future; + +pub use oneshot::channel as cancelation; +use tokio::sync::oneshot; + +pub type CancelTx = oneshot::Sender<()>; +pub type CancelRx = oneshot::Receiver<()>; + +pub async fn cancelable_future(future: impl Future, cancel: CancelRx) -> Option { + tokio::select! { + biased; + _ = cancel => { + None + } + res = future => { + Some(res) + } + } +} diff --git a/helix-event/src/debounce.rs b/helix-event/src/debounce.rs new file mode 100644 index 000000000000..30b6f671be71 --- /dev/null +++ b/helix-event/src/debounce.rs @@ -0,0 +1,67 @@ +//! Utilities for declaring an async (usually debounced) hook + +use std::time::Duration; + +use futures_executor::block_on; +use tokio::sync::mpsc::{self, error::TrySendError, Sender}; +use tokio::time::Instant; + +/// Async hooks provide a convenient framework for implementing (debounced) +/// async event handlers. Most synchronous event hooks will likely need to +/// debounce their events, coordinate multiple different hooks and potentially +/// track some state. `AsyncHooks` facilitate these use cases by running as +/// a background tokio task that waits for events (usually an enum) to be +/// sent through a channel. +pub trait AsyncHook: Sync + Send + 'static + Sized { + type Event: Sync + Send + 'static; + /// Called immediately whenever an event is received, this function can + /// consume the event immediately or debounce it. In case of debouncing, + /// it can either define a new debounce timeout or continue the current one + fn handle_event(&mut self, event: Self::Event, timeout: Option) -> Option; + + /// Called whenever the debounce timeline is reached + fn finish_debounce(&mut self); + + fn spawn(self) -> mpsc::Sender { + // the capacity doesn't matter too much here, unless the cpu is totally overwhelmed + // the cap will never be reached since we always immediately drain the channel + // so it should only be reached in case of total CPU overload. + // However, a bounded channel is much more efficient so it's nice to use here + let (tx, rx) = mpsc::channel(128); + tokio::spawn(run(self, rx)); + tx + } +} + +async fn run(mut hook: Hook, mut rx: mpsc::Receiver) { + let mut deadline = None; + loop { + let event = match deadline { + Some(deadline_) => { + let res = tokio::time::timeout_at(deadline_, rx.recv()).await; + match res { + Ok(event) => event, + Err(_) => { + hook.finish_debounce(); + deadline = None; + continue; + } + } + } + None => rx.recv().await, + }; + let Some(event) = event else { + break; + }; + deadline = hook.handle_event(event, deadline); + } +} + +pub fn send_blocking(tx: &Sender, data: T) { + // block_on has some overhead and in practice the channel should basically + // never be full anyway so first try sending without blocking + if let Err(TrySendError::Full(data)) = tx.try_send(data) { + // set a timeout so that we just drop a message instead of freezing the editor in the worst case + let _ = block_on(tx.send_timeout(data, Duration::from_millis(10))); + } +} diff --git a/helix-event/src/hook.rs b/helix-event/src/hook.rs new file mode 100644 index 000000000000..7fb68148368d --- /dev/null +++ b/helix-event/src/hook.rs @@ -0,0 +1,91 @@ +//! rust dynamic dispatch is extremely limited so we have to build our +//! own vtable implementation. Otherwise implementing the event system would not be possible. +//! A nice bonus of this approach is that we can optimize the vtable a bit more. Normally +//! a dyn Trait fat pointer contains two pointers: A pointer to the data itself and a +//! pointer to a global (static) vtable entry which itself contains multiple other pointers +//! (the various functions of the trait, drop, size and align). That makes dynamic +//! dispatch pretty slow (double pointer indirections). However, we only have a single function +//! in the hook trait and don't need a drop implementation (event system is global anyway +//! and never dropped) so we can just store the entire vtable inline. + +use anyhow::Result; +use std::ptr::{self, NonNull}; + +use crate::Event; + +/// Opaque handle type that represents an erased type parameter. +/// +/// If extern types were stable, this could be implemented as `extern { pub type Opaque; }` but +/// until then we can use this. +/// +/// Care should be taken that we don't use a concrete instance of this. It should only be used +/// through a reference, so we can maintain something else's lifetime. +struct Opaque(()); + +pub(crate) struct ErasedHook { + data: NonNull, + call: unsafe fn(NonNull, NonNull, NonNull), +} + +impl ErasedHook { + pub(crate) fn new_dynamic Result<()> + 'static + Send + Sync>( + hook: H, + ) -> ErasedHook { + unsafe fn call Result<()> + 'static + Send + Sync>( + hook: NonNull, + _event: NonNull, + result: NonNull, + ) { + let hook: NonNull = hook.cast(); + let result: NonNull> = result.cast(); + let hook: &F = hook.as_ref(); + let res = hook(); + ptr::write(result.as_ptr(), res) + } + + unsafe { + ErasedHook { + data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque), + call: call::, + } + } + } + + pub(crate) fn new Result<()>>(hook: F) -> ErasedHook { + unsafe fn call Result<()>>( + hook: NonNull, + event: NonNull, + result: NonNull, + ) { + let hook: NonNull = hook.cast(); + let mut event: NonNull = event.cast(); + let result: NonNull> = result.cast(); + let hook: &F = hook.as_ref(); + let res = hook(event.as_mut()); + ptr::write(result.as_ptr(), res) + } + + unsafe { + ErasedHook { + data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque), + call: call::, + } + } + } + + pub(crate) unsafe fn call(&self, event: &mut E) -> Result<()> { + let mut res = Ok(()); + + unsafe { + (self.call)( + self.data, + NonNull::from(event).cast(), + NonNull::from(&mut res).cast(), + ); + } + res + } +} + +unsafe impl Sync for ErasedHook {} +unsafe impl Send for ErasedHook {} diff --git a/helix-event/src/lib.rs b/helix-event/src/lib.rs index 9c082b93a474..894de5e8d628 100644 --- a/helix-event/src/lib.rs +++ b/helix-event/src/lib.rs @@ -1,8 +1,203 @@ //! `helix-event` contains systems that allow (often async) communication between -//! different editor components without strongly coupling them. Currently this -//! crate only contains some smaller facilities but the intend is to add more -//! functionality in the future ( like a generic hook system) +//! different editor components without strongly coupling them. Specifically +//! it allows defining synchronous hooks that run when certain editor events +//! occur. +//! +//! The core of the event system are hook callbacks and the [`Event`] trait. A +//! hook is essentially just a closure `Fn(event: &mut impl Event) -> Result<()>` +//! that gets called every time an appropriate event is dispatched. The implementation +//! details of the [`Event`] trait are considered private. The [`events`] macro is +//! provided which automatically declares event types. Similarly the `register_hook` +//! macro should be used to (safely) declare event hooks. +//! +//! Hooks run synchronously which can be advantageous since they can modify the +//! current editor state right away (for example to immediately hide the completion +//! popup). However, they can not contain their own state without locking since +//! they only receive immutable references. For handler that want to track state, do +//! expensive background computations or debouncing an [`AsyncHook`] is preferable. +//! Async hooks are based around a channels that receive events specific to +//! that `AsyncHook` (usually an enum). These events can be sent by synchronous +//! hooks. Due to some limitations around tokio channels the [`send_blocking`] +//! function exported in this crate should be used instead of the builtin +//! `blocking_send`. +//! +//! In addition to the core event system, this crate contains some message queues +//! that allow transfer of data back to the main event loop from async hooks and +//! hooks that may not have access to all application data (for example in helix-view). +//! This include the ability to control rendering ([`lock_frame`], [`request_redraw`]) and +//! display status messages ([`status`]). +//! +//! Hooks declared in helix-term can furthermore dispatch synchronous jobs to be run on the +//! main loop (including access to the compositor). Ideally that queue will be moved +//! to helix-view in the future if we manage to detach the compositor from its rendering backend. +use anyhow::Result; +pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx}; +pub use debounce::{send_blocking, AsyncHook}; pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard}; +pub use registry::Event; +mod cancel; +mod debounce; +mod hook; mod redraw; +mod registry; +#[doc(hidden)] +pub mod runtime; +pub mod status; + +#[cfg(test)] +mod test; + +pub fn register_event() { + registry::with_mut(|registry| registry.register_event::()) +} + +/// Registers a hook that will be called when an event of type `E` is dispatched. +/// This function should usually not be used directly, use the [`register_hook`] +/// macro instead. +/// +/// +/// # Safety +/// +/// `hook` must be totally generic over all lifetime parameters of `E`. For +/// example if `E` was a known type `Foo<'a, 'b>`, then the correct trait bound +/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)`, but there is no way to +/// express that kind of constraint for a generic type with the Rust type system +/// as of this writing. +pub unsafe fn register_hook_raw( + hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync, +) { + registry::with_mut(|registry| registry.register_hook(hook)) +} + +/// Register a hook solely by event name +pub fn register_dynamic_hook( + hook: impl Fn() -> Result<()> + 'static + Send + Sync, + id: &str, +) -> Result<()> { + registry::with_mut(|reg| reg.register_dynamic_hook(hook, id)) +} + +pub fn dispatch(e: impl Event) { + registry::with(|registry| registry.dispatch(e)); +} + +/// Macro to declare events +/// +/// # Examples +/// +/// ``` no-compile +/// events! { +/// FileWrite(&Path) +/// ViewScrolled{ view: View, new_pos: ViewOffset } +/// DocumentChanged<'a> { old_doc: &'a Rope, doc: &'a mut Document, changes: &'a ChangeSet } +/// } +/// +/// fn init() { +/// register_event::(); +/// register_event::(); +/// register_event::(); +/// } +/// +/// fn save(path: &Path, content: &str){ +/// std::fs::write(path, content); +/// dispatch(FileWrite(path)); +/// } +/// ``` +#[macro_export] +macro_rules! events { + ($name: ident<$($lt: lifetime),*> { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => { + pub struct $name<$($lt),*> { $(pub $data: $data_ty),* } + unsafe impl<$($lt),*> $crate::Event for $name<$($lt),*> { + const ID: &'static str = stringify!($name); + const LIFETIMES: usize = $crate::events!(@sum $(1, $lt),*); + type Static = $crate::events!(@replace_lt $name, $('static, $lt),*); + } + $crate::events!{ $($rem)* } + }; + ($name: ident { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => { + pub struct $name { $(pub $data: $data_ty),* } + unsafe impl $crate::Event for $name { + const ID: &'static str = stringify!($name); + const LIFETIMES: usize = 0; + type Static = Self; + } + $crate::events!{ $($rem)* } + }; + () => {}; + (@replace_lt $name: ident, $($lt1: lifetime, $lt2: lifetime),* ) => {$name<$($lt1),*>}; + (@sum $($val: expr, $lt1: lifetime),* ) => {0 $(+ $val)*}; +} + +/// Safely register statically typed event hooks +#[macro_export] +macro_rules! register_hook { + // Safety: this is safe because we fully control the type of the event here and + // ensure all lifetime arguments are fully generic and the correct number of lifetime arguments + // is present + (move |$event:ident: &mut $event_ty: ident<$($lt: lifetime),*>| $body: expr) => { + let val = move |$event: &mut $event_ty<$($lt),*>| $body; + unsafe { + // Lifetimes are a bit of a pain. We want to allow events being + // non-static. Lifetimes don't actually exist at runtime so its + // fine to essentially transmute the lifetimes as long as we can + // prove soundness. The hook must therefore accept any combination + // of lifetimes. In other words fn(&'_ mut Event<'_, '_>) is ok + // but examples like fn(&'_ mut Event<'_, 'static>) or fn<'a>(&'a + // mut Event<'a, 'a>) are not. To make this safe we use a macro to + // forbid the user from specifying lifetimes manually (all lifetimes + // specified are always function generics and passed to the event so + // lifetimes can't be used multiple times and using 'static causes a + // syntax error). + // + // There is one soundness hole tough: Type Aliases allow + // "accidentally" creating these problems. For example: + // + // type Event2 = Event<'static>. + // type Event2<'a> = Event<'a, a>. + // + // These cases can be caught by counting the number of lifetimes + // parameters at the parameter declaration site and then at the hook + // declaration site. By asserting the number of lifetime parameters + // are equal we can catch all bad type aliases under one assumption: + // There are no unused lifetime parameters. Introducing a static + // would reduce the number of arguments of the alias by one in the + // above example Event2 has zero lifetime arguments while the original + // event has one lifetime argument. Similar logic applies to using + // a lifetime argument multiple times. The ASSERT below performs a + // a compile time assertion to ensure exactly this property. + // + // With unused lifetime arguments it is still one way to cause unsound code: + // + // type Event2<'a, 'b> = Event<'a, 'a>; + // + // However, this case will always emit a compiler warning/cause CI + // failures so a user would have to introduce #[allow(unused)] which + // is easily caught in review (and a very theoretical case anyway). + // If we want to be pedantic we can simply compile helix with + // forbid(unused). All of this is just a safety net to prevent + // very theoretical misuse. This won't come up in real code (and is + // easily caught in review). + #[allow(unused)] + const ASSERT: () = { + if <$event_ty as $crate::Event>::LIFETIMES != 0 + $crate::events!(@sum $(1, $lt),*){ + panic!("invalid type alias"); + } + }; + $crate::register_hook_raw::<$crate::events!(@replace_lt $event_ty, $('static, $lt),*)>(val); + } + }; + (move |$event:ident: &mut $event_ty: ident| $body: expr) => { + let val = move |$event: &mut $event_ty| $body; + unsafe { + #[allow(unused)] + const ASSERT: () = { + if <$event_ty as $crate::Event>::LIFETIMES != 0{ + panic!("invalid type alias"); + } + }; + $crate::register_hook_raw::<$event_ty>(val); + } + }; +} diff --git a/helix-event/src/redraw.rs b/helix-event/src/redraw.rs index a9915223887b..8fadb8aeaa64 100644 --- a/helix-event/src/redraw.rs +++ b/helix-event/src/redraw.rs @@ -5,16 +5,20 @@ use std::future::Future; use parking_lot::{RwLock, RwLockReadGuard}; use tokio::sync::Notify; -/// A `Notify` instance that can be used to (asynchronously) request -/// the editor the render a new frame. -static REDRAW_NOTIFY: Notify = Notify::const_new(); - -/// A `RwLock` that prevents the next frame from being -/// drawn until an exclusive (write) lock can be acquired. -/// This allows asynchsonous tasks to acquire `non-exclusive` -/// locks (read) to prevent the next frame from being drawn -/// until a certain computation has finished. -static RENDER_LOCK: RwLock<()> = RwLock::new(()); +use crate::runtime_local; + +runtime_local! { + /// A `Notify` instance that can be used to (asynchronously) request + /// the editor to render a new frame. + static REDRAW_NOTIFY: Notify = Notify::const_new(); + + /// A `RwLock` that prevents the next frame from being + /// drawn until an exclusive (write) lock can be acquired. + /// This allows asynchronous tasks to acquire `non-exclusive` + /// locks (read) to prevent the next frame from being drawn + /// until a certain computation has finished. + static RENDER_LOCK: RwLock<()> = RwLock::new(()); +} pub type RenderLockGuard = RwLockReadGuard<'static, ()>; diff --git a/helix-event/src/registry.rs b/helix-event/src/registry.rs new file mode 100644 index 000000000000..d43c48ac4375 --- /dev/null +++ b/helix-event/src/registry.rs @@ -0,0 +1,131 @@ +//! A global registry where events are registered and can be +//! subscribed to by registering hooks. The registry identifies event +//! types using their type name so multiple event with the same type name +//! may not be registered (will cause a panic to ensure soundness) + +use std::any::TypeId; + +use anyhow::{bail, Result}; +use hashbrown::hash_map::Entry; +use hashbrown::HashMap; +use parking_lot::RwLock; + +use crate::hook::ErasedHook; +use crate::runtime_local; + +pub struct Registry { + events: HashMap<&'static str, TypeId, ahash::RandomState>, + handlers: HashMap<&'static str, Vec, ahash::RandomState>, +} + +impl Registry { + pub fn register_event(&mut self) { + let ty = TypeId::of::(); + assert_eq!(ty, TypeId::of::()); + match self.events.entry(E::ID) { + Entry::Occupied(entry) => { + if entry.get() == &ty { + // don't warn during tests to avoid log spam + #[cfg(not(feature = "integration_test"))] + panic!("Event {} was registered multiple times", E::ID); + } else { + panic!("Multiple events with ID {} were registered", E::ID); + } + } + Entry::Vacant(ent) => { + ent.insert(ty); + self.handlers.insert(E::ID, Vec::new()); + } + } + } + + /// # Safety + /// + /// `hook` must be totally generic over all lifetime parameters of `E`. For + /// example if `E` was a known type `Foo<'a, 'b> then the correct trait bound + /// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)` but there is no way to + /// express that kind of constraint for a generic type with the rust type system + /// right now. + pub unsafe fn register_hook( + &mut self, + hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync, + ) { + // ensure event type ids match so we can rely on them always matching + let id = E::ID; + let Some(&event_id) = self.events.get(id) else { + panic!("Tried to register handler for unknown event {id}"); + }; + assert!( + TypeId::of::() == event_id, + "Tried to register invalid hook for event {id}" + ); + let hook = ErasedHook::new(hook); + self.handlers.get_mut(id).unwrap().push(hook); + } + + pub fn register_dynamic_hook( + &mut self, + hook: impl Fn() -> Result<()> + 'static + Send + Sync, + id: &str, + ) -> Result<()> { + // ensure event type ids match so we can rely on them always matching + if self.events.get(id).is_none() { + bail!("Tried to register handler for unknown event {id}"); + }; + let hook = ErasedHook::new_dynamic(hook); + self.handlers.get_mut(id).unwrap().push(hook); + Ok(()) + } + + pub fn dispatch(&self, mut event: E) { + let Some(hooks) = self.handlers.get(E::ID) else { + log::error!("Dispatched unknown event {}", E::ID); + return; + }; + let event_id = self.events[E::ID]; + + assert_eq!( + TypeId::of::(), + event_id, + "Tried to dispatch invalid event {}", + E::ID + ); + + for hook in hooks { + // safety: event type is the same + if let Err(err) = unsafe { hook.call(&mut event) } { + log::error!("{} hook failed: {err:#?}", E::ID); + crate::status::report_blocking(err); + } + } + } +} + +runtime_local! { + static REGISTRY: RwLock = RwLock::new(Registry { + // hardcoded random number is good enough here we don't care about DOS resistance + // and avoids the additional complexity of `Option` + events: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 9978, 38322, 3280080)), + handlers: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 99078, 382322, 3282938)), + }); +} + +pub(crate) fn with(f: impl FnOnce(&Registry) -> T) -> T { + f(®ISTRY.read()) +} + +pub(crate) fn with_mut(f: impl FnOnce(&mut Registry) -> T) -> T { + f(&mut REGISTRY.write()) +} + +/// # Safety +/// The number of specified lifetimes and the static type *must* be correct. +/// This is ensured automatically by the [`events`](crate::events) +/// macro. +pub unsafe trait Event: Sized { + /// Globally unique (case sensitive) string that identifies this type. + /// A good candidate is the events type name + const ID: &'static str; + const LIFETIMES: usize; + type Static: Event + 'static; +} diff --git a/helix-event/src/runtime.rs b/helix-event/src/runtime.rs new file mode 100644 index 000000000000..8da465ef345d --- /dev/null +++ b/helix-event/src/runtime.rs @@ -0,0 +1,88 @@ +//! The event system makes use of global to decouple different systems. +//! However, this can cause problems for the integration test system because +//! it runs multiple helix applications in parallel. Making the globals +//! thread-local does not work because a applications can/does have multiple +//! runtime threads. Instead this crate implements a similar notion to a thread +//! local but instead of being local to a single thread, the statics are local to +//! a single tokio-runtime. The implementation requires locking so it's not exactly efficient. +//! +//! Therefore this function is only enabled during integration tests and behaves like +//! a normal static otherwise. I would prefer this module to be fully private and to only +//! export the macro but the macro still need to construct these internals so it's marked +//! `doc(hidden)` instead + +use std::ops::Deref; + +#[cfg(not(feature = "integration_test"))] +pub struct RuntimeLocal { + /// inner API used in the macro, not part of public API + #[doc(hidden)] + pub __data: T, +} + +#[cfg(not(feature = "integration_test"))] +impl Deref for RuntimeLocal { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.__data + } +} + +#[cfg(not(feature = "integration_test"))] +#[macro_export] +macro_rules! runtime_local { + ($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => { + $($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal { + __data: $init + };)* + }; +} + +#[cfg(feature = "integration_test")] +pub struct RuntimeLocal { + data: + parking_lot::RwLock>, + init: fn() -> T, +} + +#[cfg(feature = "integration_test")] +impl RuntimeLocal { + /// inner API used in the macro, not part of public API + #[doc(hidden)] + pub const fn __new(init: fn() -> T) -> Self { + Self { + data: parking_lot::RwLock::new(hashbrown::HashMap::with_hasher( + ahash::RandomState::with_seeds(423, 9978, 38322, 3280080), + )), + init, + } + } +} + +#[cfg(feature = "integration_test")] +impl Deref for RuntimeLocal { + type Target = T; + fn deref(&self) -> &T { + let id = tokio::runtime::Handle::current().id(); + let guard = self.data.read(); + match guard.get(&id) { + Some(res) => res, + None => { + drop(guard); + let data = Box::leak(Box::new((self.init)())); + let mut guard = self.data.write(); + guard.insert(id, data); + data + } + } + } +} + +#[cfg(feature = "integration_test")] +#[macro_export] +macro_rules! runtime_local { + ($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => { + $($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal::__new(|| $init);)* + }; +} diff --git a/helix-event/src/status.rs b/helix-event/src/status.rs new file mode 100644 index 000000000000..fdca676244cd --- /dev/null +++ b/helix-event/src/status.rs @@ -0,0 +1,68 @@ +//! A queue of async messages/errors that will be shown in the editor + +use std::borrow::Cow; +use std::time::Duration; + +use crate::{runtime_local, send_blocking}; +use once_cell::sync::OnceCell; +use tokio::sync::mpsc::{Receiver, Sender}; + +/// Describes the severity level of a [`StatusMessage`]. +#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord)] +pub enum Severity { + Hint, + Info, + Warning, + Error, +} + +pub struct StatusMessage { + pub severity: Severity, + pub message: Cow<'static, str>, +} + +impl From for StatusMessage { + fn from(err: anyhow::Error) -> Self { + StatusMessage { + severity: Severity::Error, + message: err.to_string().into(), + } + } +} + +impl From<&'static str> for StatusMessage { + fn from(msg: &'static str) -> Self { + StatusMessage { + severity: Severity::Info, + message: msg.into(), + } + } +} + +runtime_local! { + static MESSAGES: OnceCell> = OnceCell::new(); +} + +pub async fn report(msg: impl Into) { + // if the error channel overflows just ignore it + let _ = MESSAGES + .wait() + .send_timeout(msg.into(), Duration::from_millis(10)) + .await; +} + +pub fn report_blocking(msg: impl Into) { + let messages = MESSAGES.wait(); + send_blocking(messages, msg.into()) +} + +/// Must be called once during editor startup exactly once +/// before any of the messages in this module can be used +/// +/// # Panics +/// If called multiple times +pub fn setup() -> Receiver { + let (tx, rx) = tokio::sync::mpsc::channel(128); + let _ = MESSAGES.set(tx); + rx +} diff --git a/helix-event/src/test.rs b/helix-event/src/test.rs new file mode 100644 index 000000000000..a1283ada1074 --- /dev/null +++ b/helix-event/src/test.rs @@ -0,0 +1,90 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; +use std::time::Duration; + +use parking_lot::Mutex; + +use crate::{dispatch, events, register_dynamic_hook, register_event, register_hook}; +#[test] +fn smoke_test() { + events! { + Event1 { content: String } + Event2 { content: usize } + } + register_event::(); + register_event::(); + + // setup hooks + let res1: Arc> = Arc::default(); + let acc = Arc::clone(&res1); + register_hook!(move |event: &mut Event1| { + acc.lock().push_str(&event.content); + Ok(()) + }); + let res2: Arc = Arc::default(); + let acc = Arc::clone(&res2); + register_hook!(move |event: &mut Event2| { + acc.fetch_add(event.content, Ordering::Relaxed); + Ok(()) + }); + + // triggers events + let thread = std::thread::spawn(|| { + for i in 0..1000 { + dispatch(Event2 { content: i }); + } + }); + std::thread::sleep(Duration::from_millis(1)); + dispatch(Event1 { + content: "foo".to_owned(), + }); + dispatch(Event2 { content: 42 }); + dispatch(Event1 { + content: "bar".to_owned(), + }); + dispatch(Event1 { + content: "hello world".to_owned(), + }); + thread.join().unwrap(); + + // check output + assert_eq!(&**res1.lock(), "foobarhello world"); + assert_eq!( + res2.load(Ordering::Relaxed), + 42 + (0..1000usize).sum::() + ); +} + +#[test] +fn dynamic() { + events! { + Event3 {} + Event4 { count: usize } + }; + register_event::(); + register_event::(); + + let count = Arc::new(AtomicUsize::new(0)); + let count1 = count.clone(); + let count2 = count.clone(); + register_dynamic_hook( + move || { + count1.fetch_add(2, Ordering::Relaxed); + Ok(()) + }, + "Event3", + ) + .unwrap(); + register_dynamic_hook( + move || { + count2.fetch_add(3, Ordering::Relaxed); + Ok(()) + }, + "Event4", + ) + .unwrap(); + dispatch(Event3 {}); + dispatch(Event4 { count: 0 }); + dispatch(Event3 {}); + assert_eq!(count.load(Ordering::Relaxed), 7) +} diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index c40bf4dbc559..469bedc10f0b 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -15,6 +15,8 @@ name = "hx-loader" path = "src/main.rs" [dependencies] +helix-stdx = { path = "../helix-stdx" } + anyhow = "1" serde = { version = "1.0", features = ["derive"] } toml = "0.7" @@ -22,14 +24,13 @@ etcetera = "0.8" tree-sitter.workspace = true once_cell = "1.19" log = "0.4" -which = "5.0.0" # TODO: these two should be on !wasm32 only # cloning/compiling tree-sitter grammars cc = { version = "1" } threadpool = { version = "1.0" } -tempfile = "3.8.1" +tempfile = "3.9.0" dunce = "1.0.4" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] diff --git a/helix-loader/src/grammar.rs b/helix-loader/src/grammar.rs index 66111aebb829..7977c6df8a42 100644 --- a/helix-loader/src/grammar.rs +++ b/helix-loader/src/grammar.rs @@ -86,10 +86,8 @@ pub fn get_language(name: &str) -> Result { } fn ensure_git_is_available() -> Result<()> { - match which::which("git") { - Ok(_cmd) => Ok(()), - Err(err) => Err(anyhow::anyhow!("'git' could not be found ({err})")), - } + helix_stdx::env::which("git")?; + Ok(()) } pub fn fetch_grammars() -> Result<()> { diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 5337d6027877..f8fac67035e5 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -1,14 +1,13 @@ pub mod config; pub mod grammar; +use helix_stdx::{env::current_working_dir, path}; + use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; use std::path::{Path, PathBuf}; -use std::sync::RwLock; pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); -static CWD: RwLock> = RwLock::new(None); - static RUNTIME_DIRS: once_cell::sync::Lazy> = once_cell::sync::Lazy::new(prioritize_runtime_dirs); @@ -16,31 +15,6 @@ static CONFIG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCe static LOG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); -// Get the current working directory. -// This information is managed internally as the call to std::env::current_dir -// might fail if the cwd has been deleted. -pub fn current_working_dir() -> PathBuf { - if let Some(path) = &*CWD.read().unwrap() { - return path.clone(); - } - - let path = std::env::current_dir() - .and_then(dunce::canonicalize) - .expect("Couldn't determine current working directory"); - let mut cwd = CWD.write().unwrap(); - *cwd = Some(path.clone()); - - path -} - -pub fn set_current_working_dir(path: impl AsRef) -> std::io::Result<()> { - let path = dunce::canonicalize(path)?; - std::env::set_current_dir(&path)?; - let mut cwd = CWD.write().unwrap(); - *cwd = Some(path); - Ok(()) -} - pub fn initialize_config_file(specified_file: Option) { let config_file = specified_file.unwrap_or_else(default_config_file); ensure_parent_dir(&config_file); @@ -79,7 +53,8 @@ fn prioritize_runtime_dirs() -> Vec { rt_dirs.push(conf_rt_dir); if let Ok(dir) = std::env::var("HELIX_RUNTIME") { - rt_dirs.push(dir.into()); + let dir = path::expand_tilde(dir); + rt_dirs.push(path::normalize(dir)); } // If this variable is set during build time, it will always be included @@ -280,21 +255,9 @@ fn ensure_parent_dir(path: &Path) { mod merge_toml_tests { use std::str; - use super::{current_working_dir, merge_toml_values, set_current_working_dir}; + use super::merge_toml_values; use toml::Value; - #[test] - fn current_dir_is_set() { - let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap(); - let cwd = current_working_dir(); - assert_ne!(cwd, new_path); - - set_current_working_dir(&new_path).expect("Couldn't set new path"); - - let cwd = current_working_dir(); - assert_eq!(cwd, new_path); - } - #[test] fn language_toml_map_merges() { const USER: &str = r#" diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index 851351e0e9bc..8e9e3407c860 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -13,6 +13,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-loader = { path = "../helix-loader" } helix-parsec = { path = "../helix-parsec" } @@ -28,5 +29,4 @@ serde_json = "1.0" thiserror = "1.0" tokio = { version = "1.35", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio-stream = "0.1.14" -which = "5.0.0" parking_lot = "0.12.1" diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 682d4db66488..94bad6faf9d8 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -1,27 +1,29 @@ use crate::{ + file_operations::FileOperationsInterest, find_lsp_workspace, jsonrpc, transport::{Payload, Transport}, Call, Error, OffsetEncoding, Result, }; -use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope}; +use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope}; use helix_loader::{self, VERSION_AND_GIT_HASH}; +use helix_stdx::path; use lsp::{ notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport, - DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder, - WorkspaceFoldersChangeEvent, + DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, SignatureHelp, Url, + WorkspaceFolder, WorkspaceFoldersChangeEvent, }; use lsp_types as lsp; use parking_lot::Mutex; use serde::Deserialize; use serde_json::Value; -use std::future::Future; -use std::process::Stdio; use std::sync::{ atomic::{AtomicU64, Ordering}, Arc, }; use std::{collections::HashMap, path::PathBuf}; +use std::{future::Future, sync::OnceLock}; +use std::{path::Path, process::Stdio}; use tokio::{ io::{BufReader, BufWriter}, process::{Child, Command}, @@ -50,6 +52,7 @@ pub struct Client { server_tx: UnboundedSender, request_counter: AtomicU64, pub(crate) capabilities: OnceCell, + pub(crate) file_operation_interest: OnceLock, config: Option, root_path: std::path::PathBuf, root_uri: Option, @@ -68,7 +71,7 @@ impl Client { may_support_workspace: bool, ) -> bool { let (workspace, workspace_is_cwd) = find_workspace(); - let workspace = path::get_normalized_path(&workspace); + let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) @@ -182,7 +185,7 @@ impl Client { doc_path: Option<&std::path::PathBuf>, ) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc)> { // Resolve path to the binary - let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?; + let cmd = helix_stdx::env::which(cmd)?; let process = Command::new(cmd) .envs(server_environment) @@ -204,7 +207,7 @@ impl Client { let (server_rx, server_tx, initialize_notify) = Transport::start(reader, writer, stderr, id, name.clone()); let (workspace, workspace_is_cwd) = find_workspace(); - let workspace = path::get_normalized_path(&workspace); + let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) @@ -232,6 +235,7 @@ impl Client { server_tx, request_counter: AtomicU64::new(0), capabilities: OnceCell::new(), + file_operation_interest: OnceLock::new(), config, req_timeout, root_path, @@ -277,6 +281,11 @@ impl Client { .expect("language server not yet initialized!") } + pub(crate) fn file_operations_intests(&self) -> &FileOperationsInterest { + self.file_operation_interest + .get_or_init(|| FileOperationsInterest::new(self.capabilities())) + } + /// Client has to be initialized otherwise this function panics #[inline] pub fn supports_feature(&self, feature: LanguageServerFeature) -> bool { @@ -716,27 +725,27 @@ impl Client { }) } - pub fn prepare_file_rename( + pub fn will_rename( &self, - old_uri: &lsp::Url, - new_uri: &lsp::Url, + old_path: &Path, + new_path: &Path, + is_dir: bool, ) -> Option>> { - let capabilities = self.capabilities.get().unwrap(); - - // Return early if the server does not support willRename feature - match &capabilities.workspace { - Some(workspace) => match &workspace.file_operations { - Some(op) => { - op.will_rename.as_ref()?; - } - _ => return None, - }, - _ => return None, + let capabilities = self.file_operations_intests(); + if !capabilities.will_rename.has_interest(old_path, is_dir) { + return None; } - + let url_from_path = |path| { + let url = if is_dir { + Url::from_directory_path(path) + } else { + Url::from_file_path(path) + }; + Some(url.ok()?.to_string()) + }; let files = vec![lsp::FileRename { - old_uri: old_uri.to_string(), - new_uri: new_uri.to_string(), + old_uri: url_from_path(old_path)?, + new_uri: url_from_path(new_path)?, }]; let request = self.call_with_timeout::( lsp::RenameFilesParams { files }, @@ -750,27 +759,28 @@ impl Client { }) } - pub fn did_file_rename( + pub fn did_rename( &self, - old_uri: &lsp::Url, - new_uri: &lsp::Url, + old_path: &Path, + new_path: &Path, + is_dir: bool, ) -> Option>> { - let capabilities = self.capabilities.get().unwrap(); - - // Return early if the server does not support DidRename feature - match &capabilities.workspace { - Some(workspace) => match &workspace.file_operations { - Some(op) => { - op.did_rename.as_ref()?; - } - _ => return None, - }, - _ => return None, + let capabilities = self.file_operations_intests(); + if !capabilities.did_rename.has_interest(new_path, is_dir) { + return None; } + let url_from_path = |path| { + let url = if is_dir { + Url::from_directory_path(path) + } else { + Url::from_file_path(path) + }; + Some(url.ok()?.to_string()) + }; let files = vec![lsp::FileRename { - old_uri: old_uri.to_string(), - new_uri: new_uri.to_string(), + old_uri: url_from_path(old_path)?, + new_uri: url_from_path(new_path)?, }]; Some(self.notify::(lsp::RenameFilesParams { files })) } @@ -998,6 +1008,7 @@ impl Client { text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, + context: lsp::CompletionContext, ) -> Option>> { let capabilities = self.capabilities.get().unwrap(); @@ -1009,13 +1020,12 @@ impl Client { text_document, position, }, + context: Some(context), // TODO: support these tokens by async receiving and updating the choice list work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, partial_result_params: lsp::PartialResultParams { partial_result_token: None, }, - context: None, - // lsp::CompletionContext { trigger_kind: , trigger_character: Some(), } }; Some(self.call::(params)) @@ -1062,7 +1072,7 @@ impl Client { text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, - ) -> Option>> { + ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support signature help. @@ -1078,7 +1088,8 @@ impl Client { // lsp::SignatureHelpContext }; - Some(self.call::(params)) + let res = self.call::(params); + Some(async move { Ok(serde_json::from_value(res.await?)?) }) } pub fn text_document_range_inlay_hints( diff --git a/helix-lsp/src/file_operations.rs b/helix-lsp/src/file_operations.rs new file mode 100644 index 000000000000..98ac32a40989 --- /dev/null +++ b/helix-lsp/src/file_operations.rs @@ -0,0 +1,105 @@ +use std::path::Path; + +use globset::{GlobBuilder, GlobSet}; + +use crate::lsp; + +#[derive(Default, Debug)] +pub(crate) struct FileOperationFilter { + dir_globs: GlobSet, + file_globs: GlobSet, +} + +impl FileOperationFilter { + fn new(capability: Option<&lsp::FileOperationRegistrationOptions>) -> FileOperationFilter { + let Some(cap) = capability else { + return FileOperationFilter::default(); + }; + let mut dir_globs = GlobSet::builder(); + let mut file_globs = GlobSet::builder(); + for filter in &cap.filters { + // TODO: support other url schemes + let is_non_file_schema = filter + .scheme + .as_ref() + .is_some_and(|schema| schema != "file"); + if is_non_file_schema { + continue; + } + let ignore_case = filter + .pattern + .options + .as_ref() + .and_then(|opts| opts.ignore_case) + .unwrap_or(false); + let mut glob_builder = GlobBuilder::new(&filter.pattern.glob); + glob_builder.case_insensitive(!ignore_case); + let glob = match glob_builder.build() { + Ok(glob) => glob, + Err(err) => { + log::error!("invalid glob send by LS: {err}"); + continue; + } + }; + match filter.pattern.matches { + Some(lsp::FileOperationPatternKind::File) => { + file_globs.add(glob); + } + Some(lsp::FileOperationPatternKind::Folder) => { + dir_globs.add(glob); + } + None => { + file_globs.add(glob.clone()); + dir_globs.add(glob); + } + }; + } + let file_globs = file_globs.build().unwrap_or_else(|err| { + log::error!("invalid globs send by LS: {err}"); + GlobSet::empty() + }); + let dir_globs = dir_globs.build().unwrap_or_else(|err| { + log::error!("invalid globs send by LS: {err}"); + GlobSet::empty() + }); + FileOperationFilter { + dir_globs, + file_globs, + } + } + + pub(crate) fn has_interest(&self, path: &Path, is_dir: bool) -> bool { + if is_dir { + self.dir_globs.is_match(path) + } else { + self.file_globs.is_match(path) + } + } +} + +#[derive(Default, Debug)] +pub(crate) struct FileOperationsInterest { + // TODO: support other notifications + // did_create: FileOperationFilter, + // will_create: FileOperationFilter, + pub did_rename: FileOperationFilter, + pub will_rename: FileOperationFilter, + // did_delete: FileOperationFilter, + // will_delete: FileOperationFilter, +} + +impl FileOperationsInterest { + pub fn new(capabilities: &lsp::ServerCapabilities) -> FileOperationsInterest { + let capabilities = capabilities + .workspace + .as_ref() + .and_then(|capabilities| capabilities.file_operations.as_ref()); + let Some(capabilities) = capabilities else { + return FileOperationsInterest::default(); + }; + FileOperationsInterest { + did_rename: FileOperationFilter::new(capabilities.did_rename.as_ref()), + will_rename: FileOperationFilter::new(capabilities.will_rename.as_ref()), + } + } +} diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index b6a990659aaa..4ce445aeed0c 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -1,5 +1,6 @@ mod client; pub mod file_event; +mod file_operations; pub mod jsonrpc; pub mod snippet; mod transport; @@ -11,10 +12,10 @@ pub use lsp::{Position, Url}; pub use lsp_types as lsp; use futures_util::stream::select_all::SelectAll; -use helix_core::{ - path, - syntax::{LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures}, +use helix_core::syntax::{ + LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures, }; +use helix_stdx::path; use tokio::sync::mpsc::UnboundedReceiver; use std::{ @@ -44,6 +45,8 @@ pub enum Error { #[error("Unhandled")] Unhandled, #[error(transparent)] + ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError), + #[error(transparent)] Other(#[from] anyhow::Error), } @@ -549,6 +552,7 @@ pub enum MethodCall { WorkspaceConfiguration(lsp::ConfigurationParams), RegisterCapability(lsp::RegistrationParams), UnregisterCapability(lsp::UnregistrationParams), + ShowDocument(lsp::ShowDocumentParams), } impl MethodCall { @@ -576,6 +580,10 @@ impl MethodCall { let params: lsp::UnregistrationParams = params.parse()?; Self::UnregisterCapability(params) } + lsp::request::ShowDocument::METHOD => { + let params: lsp::ShowDocumentParams = params.parse()?; + Self::ShowDocument(params) + } _ => { return Err(Error::Unhandled); } @@ -915,10 +923,17 @@ fn start_client( } // next up, notify - _client + let notification_result = _client .notify::(lsp::InitializedParams {}) - .await - .unwrap(); + .await; + + if let Err(e) = notification_result { + log::error!( + "failed to notify language server of its initialization: {}", + e + ); + return; + } initialize_notify.notify_one(); }); @@ -946,10 +961,10 @@ pub fn find_lsp_workspace( let mut file = if file.is_absolute() { file.to_path_buf() } else { - let current_dir = helix_loader::current_working_dir(); + let current_dir = helix_stdx::env::current_working_dir(); current_dir.join(file) }; - file = path::get_normalized_path(&file); + file = path::normalize(&file); if !file.starts_with(workspace) { return None; @@ -966,7 +981,7 @@ pub fn find_lsp_workspace( if root_dirs .iter() - .any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor) + .any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor) { // if the worskapce is the cwd do not search any higher for workspaces // but specify diff --git a/helix-lsp/src/transport.rs b/helix-lsp/src/transport.rs index 9fdd30aa01cf..f2f35d6abf4b 100644 --- a/helix-lsp/src/transport.rs +++ b/helix-lsp/src/transport.rs @@ -270,7 +270,14 @@ impl Transport { } }; } - Err(Error::StreamClosed) => { + Err(err) => { + if !matches!(err, Error::StreamClosed) { + error!( + "Exiting {} after unexpected error: {err:?}", + &transport.name + ); + } + // Close any outstanding requests. for (id, tx) in transport.pending_requests.lock().await.drain() { match tx.send(Err(Error::StreamClosed)).await { @@ -300,10 +307,6 @@ impl Transport { } break; } - Err(err) => { - error!("{} err: <- {err:?}", transport.name); - break; - } } } } diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml new file mode 100644 index 000000000000..e77f8b91fb65 --- /dev/null +++ b/helix-stdx/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "helix-stdx" +description = "Standard library extensions" +include = ["src/**/*", "README.md"] +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +categories.workspace = true +repository.workspace = true +homepage.workspace = true + +[dependencies] +dunce = "1.0" +etcetera = "0.8" +ropey = { version = "1.6.1", default-features = false } +which = "6.0" + +[dev-dependencies] +tempfile = "3.9" diff --git a/helix-stdx/src/env.rs b/helix-stdx/src/env.rs new file mode 100644 index 000000000000..90a0aee8737d --- /dev/null +++ b/helix-stdx/src/env.rs @@ -0,0 +1,80 @@ +use std::{ + ffi::OsStr, + path::{Path, PathBuf}, + sync::RwLock, +}; + +static CWD: RwLock> = RwLock::new(None); + +// Get the current working directory. +// This information is managed internally as the call to std::env::current_dir +// might fail if the cwd has been deleted. +pub fn current_working_dir() -> PathBuf { + if let Some(path) = &*CWD.read().unwrap() { + return path.clone(); + } + + let path = std::env::current_dir() + .map(crate::path::normalize) + .expect("Couldn't determine current working directory"); + let mut cwd = CWD.write().unwrap(); + *cwd = Some(path.clone()); + + path +} + +pub fn set_current_working_dir(path: impl AsRef) -> std::io::Result<()> { + let path = crate::path::canonicalize(path); + std::env::set_current_dir(&path)?; + let mut cwd = CWD.write().unwrap(); + *cwd = Some(path); + Ok(()) +} + +pub fn env_var_is_set(env_var_name: &str) -> bool { + std::env::var_os(env_var_name).is_some() +} + +pub fn binary_exists>(binary_name: T) -> bool { + which::which(binary_name).is_ok() +} + +pub fn which>( + binary_name: T, +) -> Result { + which::which(binary_name.as_ref()).map_err(|err| ExecutableNotFoundError { + command: binary_name.as_ref().to_string_lossy().into_owned(), + inner: err, + }) +} + +#[derive(Debug)] +pub struct ExecutableNotFoundError { + command: String, + inner: which::Error, +} + +impl std::fmt::Display for ExecutableNotFoundError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "command '{}' not found: {}", self.command, self.inner) + } +} + +impl std::error::Error for ExecutableNotFoundError {} + +#[cfg(test)] +mod tests { + use super::{current_working_dir, set_current_working_dir}; + + #[test] + fn current_dir_is_set() { + let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap(); + let cwd = current_working_dir(); + assert_ne!(cwd, new_path); + + set_current_working_dir(&new_path).expect("Couldn't set new path"); + + let cwd = current_working_dir(); + assert_eq!(cwd, new_path); + } +} diff --git a/helix-stdx/src/lib.rs b/helix-stdx/src/lib.rs new file mode 100644 index 000000000000..68fe3ec37702 --- /dev/null +++ b/helix-stdx/src/lib.rs @@ -0,0 +1,3 @@ +pub mod env; +pub mod path; +pub mod rope; diff --git a/helix-core/src/path.rs b/helix-stdx/src/path.rs similarity index 56% rename from helix-core/src/path.rs rename to helix-stdx/src/path.rs index ede37e044e05..5746657c3c7b 100644 --- a/helix-core/src/path.rs +++ b/helix-stdx/src/path.rs @@ -1,6 +1,9 @@ -use etcetera::home_dir; +pub use etcetera::home_dir; + use std::path::{Component, Path, PathBuf}; +use crate::env::current_working_dir; + /// Replaces users home directory from `path` with tilde `~` if the directory /// is available, otherwise returns the path unchanged. pub fn fold_home_dir(path: &Path) -> PathBuf { @@ -16,7 +19,8 @@ pub fn fold_home_dir(path: &Path) -> PathBuf { /// Expands tilde `~` into users home directory if available, otherwise returns the path /// unchanged. The tilde will only be expanded when present as the first component of the path /// and only slash follows it. -pub fn expand_tilde(path: &Path) -> PathBuf { +pub fn expand_tilde(path: impl AsRef) -> PathBuf { + let path = path.as_ref(); let mut components = path.components().peekable(); if let Some(Component::Normal(c)) = components.peek() { if c == &"~" { @@ -30,32 +34,11 @@ pub fn expand_tilde(path: &Path) -> PathBuf { path.to_path_buf() } -/// Normalize a path, removing things like `.` and `..`. -/// -/// CAUTION: This does not resolve symlinks (unlike -/// [`std::fs::canonicalize`]). This may cause incorrect or surprising -/// behavior at times. This should be used carefully. Unfortunately, -/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often -/// fail, or on Windows returns annoying device paths. This is a problem Cargo -/// needs to improve on. -/// Copied from cargo: -pub fn get_normalized_path(path: &Path) -> PathBuf { - // normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible), - // then run handrolled normalization on the non-existent remainder - let (base, path) = path - .ancestors() - .find_map(|base| { - let canonicalized_base = dunce::canonicalize(base).ok()?; - let remainder = path.strip_prefix(base).ok()?.into(); - Some((canonicalized_base, remainder)) - }) - .unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path))); - - if path.as_os_str().is_empty() { - return base; - } - - let mut components = path.components().peekable(); +/// Normalize a path without resolving symlinks. +// Strategy: start from the first component and move up. Cannonicalize previous path, +// join component, cannonicalize new path, strip prefix and join to the final result. +pub fn normalize(path: impl AsRef) -> PathBuf { + let mut components = path.as_ref().components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { components.next(); PathBuf::from(c.as_os_str()) @@ -70,37 +53,77 @@ pub fn get_normalized_path(path: &Path) -> PathBuf { ret.push(component.as_os_str()); } Component::CurDir => {} + #[cfg(not(windows))] Component::ParentDir => { ret.pop(); } + #[cfg(windows)] + Component::ParentDir => { + if let Some(head) = ret.components().next_back() { + match head { + Component::Prefix(_) | Component::RootDir => {} + Component::CurDir => unreachable!(), + // If we left previous component as ".." it means we met a symlink before and we can't pop path. + Component::ParentDir => { + ret.push(".."); + } + Component::Normal(_) => { + if ret.is_symlink() { + ret.push(".."); + } else { + ret.pop(); + } + } + } + } + } + #[cfg(not(windows))] Component::Normal(c) => { ret.push(c); } + #[cfg(windows)] + Component::Normal(c) => 'normal: { + use std::fs::canonicalize; + + let new_path = ret.join(c); + if new_path.is_symlink() { + ret = new_path; + break 'normal; + } + let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret)); + match (can_new, can_old) { + (Ok(can_new), Ok(can_old)) => { + let striped = can_new.strip_prefix(can_old); + ret.push(striped.unwrap_or_else(|_| c.as_ref())); + } + _ => ret.push(c), + } + } } } - base.join(ret) + dunce::simplified(&ret).to_path_buf() } /// Returns the canonical, absolute form of a path with all intermediate components normalized. /// -/// This function is used instead of `std::fs::canonicalize` because we don't want to verify +/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify /// here if the path exists, just normalize it's components. -pub fn get_canonicalized_path(path: &Path) -> PathBuf { +pub fn canonicalize(path: impl AsRef) -> PathBuf { let path = expand_tilde(path); let path = if path.is_relative() { - helix_loader::current_working_dir().join(path) + current_working_dir().join(path) } else { path }; - get_normalized_path(path.as_path()) + normalize(path) } -pub fn get_relative_path(path: &Path) -> PathBuf { - let path = PathBuf::from(path); +pub fn get_relative_path(path: impl AsRef) -> PathBuf { + let path = PathBuf::from(path.as_ref()); let path = if path.is_absolute() { - let cwdir = get_normalized_path(&helix_loader::current_working_dir()); - get_normalized_path(&path) + let cwdir = normalize(current_working_dir()); + normalize(&path) .strip_prefix(cwdir) .map(PathBuf::from) .unwrap_or(path) @@ -116,8 +139,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf { /// Also strip the current working directory from the beginning of the path. /// Note that this function does not check if the truncated path is unambiguous. /// -/// ``` -/// use helix_core::path::get_truncated_path; +/// ``` +/// use helix_stdx::path::get_truncated_path; /// use std::path::Path; /// /// assert_eq!( @@ -139,8 +162,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf { /// assert_eq!(get_truncated_path("").as_path(), Path::new("")); /// ``` /// -pub fn get_truncated_path>(path: P) -> PathBuf { - let cwd = helix_loader::current_working_dir(); +pub fn get_truncated_path(path: impl AsRef) -> PathBuf { + let cwd = current_working_dir(); let path = path .as_ref() .strip_prefix(cwd) diff --git a/helix-stdx/src/rope.rs b/helix-stdx/src/rope.rs new file mode 100644 index 000000000000..4ee39d4a897a --- /dev/null +++ b/helix-stdx/src/rope.rs @@ -0,0 +1,26 @@ +use ropey::RopeSlice; + +pub trait RopeSliceExt: Sized { + fn ends_with(self, text: &str) -> bool; + fn starts_with(self, text: &str) -> bool; +} + +impl RopeSliceExt for RopeSlice<'_> { + fn ends_with(self, text: &str) -> bool { + let len = self.len_bytes(); + if len < text.len() { + return false; + } + self.get_byte_slice(len - text.len()..) + .map_or(false, |end| end == text) + } + + fn starts_with(self, text: &str) -> bool { + let len = self.len_bytes(); + if len < text.len() { + return false; + } + self.get_byte_slice(..len - text.len()) + .map_or(false, |start| start == text) + } +} diff --git a/helix-stdx/tests/path.rs b/helix-stdx/tests/path.rs new file mode 100644 index 000000000000..cc3c15cba65c --- /dev/null +++ b/helix-stdx/tests/path.rs @@ -0,0 +1,124 @@ +#![cfg(windows)] + +use std::{ + env::set_current_dir, + error::Error, + path::{Component, Path, PathBuf}, +}; + +use helix_stdx::path; +use tempfile::Builder; + +// Paths on Windows are almost always case-insensitive. +// Normalization should return the original path. +// E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`. +#[test] +fn test_case_folding_windows() -> Result<(), Box> { + // tmp/root/case + let tmp_prefix = std::env::temp_dir(); + set_current_dir(&tmp_prefix)?; + + let root = Builder::new().prefix("root-").tempdir()?; + let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?; + + let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?; + + let lowercase_case = format!( + "case-{}", + case.path() + .file_name() + .unwrap() + .to_string_lossy() + .split_at(5) + .1 + ); + let test_path = root_without_prefix.join(lowercase_case); + assert_eq!( + path::normalize(&test_path), + case.path().strip_prefix(&tmp_prefix)? + ); + + Ok(()) +} + +#[test] +fn test_normalize_path() -> Result<(), Box> { + /* + tmp/root/ + ├── link -> dir1/orig_file + ├── dir1/ + │ └── orig_file + └── dir2/ + └── dir_link -> ../dir1/ + */ + + let tmp_prefix = std::env::temp_dir(); + set_current_dir(&tmp_prefix)?; + + // Create a tree structure as shown above + let root = Builder::new().prefix("root-").tempdir()?; + let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?; + let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?; + let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?; + + // Create path and delete existing file + let dir_link = Builder::new() + .prefix("dir_link-") + .tempfile_in(&dir2)? + .path() + .to_owned(); + let link = Builder::new() + .prefix("link-") + .tempfile_in(&root)? + .path() + .to_owned(); + + use std::os::windows; + windows::fs::symlink_dir(&dir1, &dir_link)?; + windows::fs::symlink_file(&orig_file, &link)?; + + // root/link + let path = link.strip_prefix(&tmp_prefix)?; + assert_eq!( + path::normalize(path), + path, + "input {:?} and symlink last component shouldn't be resolved", + path + ); + + // root/dir2/dir_link/orig_file/../.. + let path = dir_link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(orig_file.path().file_name().unwrap()) + .join(Component::ParentDir) + .join(Component::ParentDir); + let expected = dir_link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir); + assert_eq!( + path::normalize(&path), + expected, + "input {:?} and \"..\" should not erase the simlink that goes ahead", + &path + ); + + // root/link/.././../dir2/../ + let path = link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir) + .join(Component::CurDir) + .join(Component::ParentDir) + .join(dir2.path().file_name().unwrap()) + .join(Component::ParentDir); + let expected = link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir) + .join(Component::ParentDir); + assert_eq!(path::normalize(&path), expected, "input {:?}", &path); + + Ok(()) +} diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 4ff7fc0b1798..a0d6754d0722 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -15,7 +15,7 @@ homepage.workspace = true [features] default = ["git"] unicode-lines = ["helix-core/unicode-lines"] -integration = [] +integration = ["helix-event/integration_test"] git = ["helix-vcs/git"] [[bin]] @@ -23,6 +23,7 @@ name = "hx" path = "src/main.rs" [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-event = { path = "../helix-event" } helix-view = { path = "../helix-view" } @@ -34,8 +35,6 @@ helix-loader = { path = "../helix-loader" } anyhow = "1" once_cell = "1.19" -which = "5.0.0" - tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } crossterm = { version = "0.27", features = ["event-stream"] } @@ -53,7 +52,7 @@ log = "0.4" nucleo.workspace = true ignore = "0.4" # markdown doc rendering -pulldown-cmark = { version = "0.9", default-features = false } +pulldown-cmark = { version = "0.10", default-features = false } # file type detection content_inspector = "0.2.4" @@ -73,7 +72,7 @@ grep-searcher = "0.1.13" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } -libc = "0.2.151" +libc = "0.2.153" [target.'cfg(target_os = "macos")'.dependencies] crossterm = { version = "0.27", features = ["event-stream", "use-dev-tty"] } @@ -82,6 +81,6 @@ crossterm = { version = "0.27", features = ["event-stream", "use-dev-tty"] } helix-loader = { path = "../helix-loader" } [dev-dependencies] -smallvec = "1.11" +smallvec = "1.13" indoc = "2.0.4" -tempfile = "3.8.1" +tempfile = "3.9.0" diff --git a/helix-term/build.rs b/helix-term/build.rs index b47dae8ef653..6bebf00c6152 100644 --- a/helix-term/build.rs +++ b/helix-term/build.rs @@ -6,4 +6,150 @@ fn main() { build_grammars(Some(std::env::var("TARGET").unwrap())) .expect("Failed to compile tree-sitter grammars"); } + + #[cfg(windows)] + windows_rc::link_icon_in_windows_exe("../contrib/helix-256p.ico"); +} + +#[cfg(windows)] +mod windows_rc { + use std::io::prelude::Write; + use std::{env, io, path::Path, path::PathBuf, process}; + + pub(crate) fn link_icon_in_windows_exe(icon_path: &str) { + let rc_exe = find_rc_exe().expect("Windows SDK is to be installed along with MSVC"); + + let output = env::var("OUT_DIR").expect("Env var OUT_DIR should have been set by compiler"); + let output_dir = PathBuf::from(output); + + let rc_path = output_dir.join("resource.rc"); + write_resource_file(&rc_path, icon_path).unwrap(); + + let resource_file = PathBuf::from(&output_dir).join("resource.lib"); + compile_with_toolkit_msvc(rc_exe, resource_file, rc_path); + + println!("cargo:rustc-link-search=native={}", output_dir.display()); + println!("cargo:rustc-link-lib=dylib=resource"); + } + + fn compile_with_toolkit_msvc(rc_exe: PathBuf, output: PathBuf, input: PathBuf) { + let mut command = process::Command::new(rc_exe); + let command = command.arg(format!( + "/I{}", + env::var("CARGO_MANIFEST_DIR") + .expect("CARGO_MANIFEST_DIR should have been set by Cargo") + )); + + let status = command + .arg(format!("/fo{}", output.display())) + .arg(format!("{}", input.display())) + .output() + .unwrap(); + + println!( + "RC Output:\n{}\n------", + String::from_utf8_lossy(&status.stdout) + ); + println!( + "RC Error:\n{}\n------", + String::from_utf8_lossy(&status.stderr) + ); + } + + fn find_rc_exe() -> io::Result { + let find_reg_key = process::Command::new("reg") + .arg("query") + .arg(r"HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots") + .arg("/reg:32") + .arg("/v") + .arg("KitsRoot10") + .output(); + + match find_reg_key { + Err(find_reg_key) => { + return Err(io::Error::new( + io::ErrorKind::Other, + format!("Failed to run registry query: {}", find_reg_key), + )) + } + Ok(find_reg_key) => { + if find_reg_key.status.code().unwrap() != 0 { + return Err(io::Error::new( + io::ErrorKind::Other, + "Can not find Windows SDK", + )); + } else { + let lines = String::from_utf8(find_reg_key.stdout) + .expect("Should be able to parse the output"); + let mut lines: Vec<&str> = lines.lines().collect(); + let mut rc_exe_paths: Vec = Vec::new(); + lines.reverse(); + for line in lines { + if line.trim().starts_with("KitsRoot") { + let kit: String = line + .chars() + .skip(line.find("REG_SZ").unwrap() + 6) + .skip_while(|c| c.is_whitespace()) + .collect(); + + let p = PathBuf::from(&kit); + let rc = if cfg!(target_arch = "x86_64") { + p.join(r"bin\x64\rc.exe") + } else { + p.join(r"bin\x86\rc.exe") + }; + + if rc.exists() { + println!("{:?}", rc); + rc_exe_paths.push(rc.to_owned()); + } + + if let Ok(bin) = p.join("bin").read_dir() { + for e in bin.filter_map(|e| e.ok()) { + let p = if cfg!(target_arch = "x86_64") { + e.path().join(r"x64\rc.exe") + } else { + e.path().join(r"x86\rc.exe") + }; + if p.exists() { + println!("{:?}", p); + rc_exe_paths.push(p.to_owned()); + } + } + } + } + } + if rc_exe_paths.is_empty() { + return Err(io::Error::new( + io::ErrorKind::Other, + "Can not find Windows SDK", + )); + } + + println!("{:?}", rc_exe_paths); + let rc_path = rc_exe_paths.pop().unwrap(); + + let rc_exe = if !rc_path.exists() { + if cfg!(target_arch = "x86_64") { + PathBuf::from(rc_path.parent().unwrap()).join(r"bin\x64\rc.exe") + } else { + PathBuf::from(rc_path.parent().unwrap()).join(r"bin\x86\rc.exe") + } + } else { + rc_path + }; + + println!("Selected RC path: '{}'", rc_exe.display()); + Ok(rc_exe) + } + } + } + } + + fn write_resource_file(rc_path: &Path, icon_path: &str) -> io::Result<()> { + let mut f = std::fs::File::create(rc_path)?; + writeln!(f, "{} ICON \"{}\"", 1, icon_path)?; + + Ok(()) + } } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 3abe9cae54ac..b5150a13af70 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -1,16 +1,12 @@ use arc_swap::{access::Map, ArcSwap}; use futures_util::Stream; -use helix_core::{ - chars::char_is_word, - diagnostic::{DiagnosticTag, NumberOrString}, - path::get_relative_path, - pos_at_coords, syntax, Selection, -}; +use helix_core::{diagnostic::Severity, pos_at_coords, syntax, Selection}; use helix_lsp::{ lsp::{self, notification::Notification}, - util::lsp_pos_to_pos, + util::lsp_range_to_range, LspProgressMap, }; +use helix_stdx::path::get_relative_path; use helix_view::{ align_view, document::DocumentSavedEventResult, @@ -25,15 +21,15 @@ use tui::backend::Backend; use crate::{ args::Args, - commands::apply_workspace_edit, compositor::{Compositor, Event}, config::Config, + handlers, job::Jobs, keymap::Keymaps, ui::{self, overlay::overlaid}, }; -use log::{debug, error, warn}; +use log::{debug, error, info, warn}; #[cfg(not(feature = "integration"))] use std::io::stdout; use std::{collections::btree_map::Entry, io::stdin, path::Path, sync::Arc}; @@ -142,6 +138,7 @@ impl Application { let area = terminal.size().expect("couldn't get terminal size"); let mut compositor = Compositor::new(area); let config = Arc::new(ArcSwap::from_pointee(config)); + let handlers = handlers::setup(config.clone()); let mut editor = Editor::new( area, theme_loader.clone(), @@ -149,6 +146,7 @@ impl Application { Arc::new(Map::new(Arc::clone(&config), |config: &Config| { &config.editor })), + handlers, ); let keys = Box::new(Map::new(Arc::clone(&config), |config: &Config| { @@ -325,10 +323,21 @@ impl Application { Some(event) = input_stream.next() => { self.handle_terminal_events(event).await; } - Some(callback) = self.jobs.futures.next() => { - self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); + Some(callback) = self.jobs.callbacks.recv() => { + self.jobs.handle_callback(&mut self.editor, &mut self.compositor, Ok(Some(callback))); self.render().await; } + Some(msg) = self.jobs.status_messages.recv() => { + let severity = match msg.severity{ + helix_event::status::Severity::Hint => Severity::Hint, + helix_event::status::Severity::Info => Severity::Info, + helix_event::status::Severity::Warning => Severity::Warning, + helix_event::status::Severity::Error => Severity::Error, + }; + // TODO: show multiple status messages at once to avoid clobbering + self.editor.status_msg = Some((msg.message, severity)); + helix_event::request_redraw(); + } Some(callback) = self.jobs.wait_futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render().await; @@ -392,6 +401,12 @@ impl Application { self.editor.syn_loader = self.syn_loader.clone(); for document in self.editor.documents.values_mut() { document.detect_language(self.syn_loader.clone()); + let diagnostics = Editor::doc_diagnostics( + &self.editor.language_servers, + &self.editor.diagnostics, + document, + ); + document.replace_diagnostics(diagnostics, &[], None); } Ok(()) @@ -557,18 +572,8 @@ impl Application { let lines = doc_save_event.text.len_lines(); let bytes = doc_save_event.text.len_bytes(); - if doc.path() != Some(&doc_save_event.path) { - doc.set_path(Some(&doc_save_event.path)); - - let loader = self.editor.syn_loader.clone(); - - // borrowing the same doc again to get around the borrow checker - let doc = doc_mut!(self.editor, &doc_save_event.doc_id); - let id = doc.id(); - doc.detect_language(loader); - self.editor.refresh_language_servers(id); - } - + self.editor + .set_doc_path(doc_save_event.doc_id, &doc_save_event.path); // TODO: fix being overwritten by lsp self.editor.set_status(format!( "'{}' written, {}L {}B", @@ -675,9 +680,13 @@ impl Application { Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => { let notification = match Notification::parse(&method, params) { Ok(notification) => notification, + Err(helix_lsp::Error::Unhandled) => { + info!("Ignoring Unhandled notification from Language Server"); + return; + } Err(err) => { - log::error!( - "received malformed notification from Language Server: {}", + error!( + "Ignoring unknown notification from Language Server: {}", err ); return; @@ -731,7 +740,6 @@ impl Application { log::error!("Discarding publishDiagnostic notification sent by an uninitialized server: {}", language_server.name()); return; } - let offset_encoding = language_server.offset_encoding(); // have to inline the function because of borrow checking... let doc = self.editor.documents.values_mut() .find(|doc| doc.path().map(|p| p == &path).unwrap_or(false)) @@ -745,11 +753,10 @@ impl Application { true }); - if let Some(doc) = doc { + let mut unchanged_diag_sources = Vec::new(); + if let Some(doc) = &doc { let lang_conf = doc.language.clone(); - let text = doc.text().clone(); - let mut unchaged_diag_sources_ = Vec::new(); if let Some(lang_conf) = &lang_conf { if let Some(old_diagnostics) = self.editor.diagnostics.get(¶ms.uri) @@ -774,118 +781,11 @@ impl Application { }) .map(|(d, _)| d); if new_diagnostics.eq(old_diagnostics) { - unchaged_diag_sources_.push(source.clone()) + unchanged_diag_sources.push(source.clone()) } } } } - - let unchaged_diag_sources = &unchaged_diag_sources_; - let diagnostics = - params.diagnostics.iter().filter_map(move |diagnostic| { - use helix_core::diagnostic::{Diagnostic, Range, Severity::*}; - use lsp::DiagnosticSeverity; - - if diagnostic.source.as_ref().map_or(false, |source| { - unchaged_diag_sources.contains(source) - }) { - return None; - } - - // TODO: convert inside server - let start = if let Some(start) = lsp_pos_to_pos( - &text, - diagnostic.range.start, - offset_encoding, - ) { - start - } else { - log::warn!("lsp position out of bounds - {:?}", diagnostic); - return None; - }; - - let end = if let Some(end) = - lsp_pos_to_pos(&text, diagnostic.range.end, offset_encoding) - { - end - } else { - log::warn!("lsp position out of bounds - {:?}", diagnostic); - return None; - }; - let severity = - diagnostic.severity.map(|severity| match severity { - DiagnosticSeverity::ERROR => Error, - DiagnosticSeverity::WARNING => Warning, - DiagnosticSeverity::INFORMATION => Info, - DiagnosticSeverity::HINT => Hint, - severity => unreachable!( - "unrecognized diagnostic severity: {:?}", - severity - ), - }); - - if let Some(lang_conf) = &lang_conf { - if let Some(severity) = severity { - if severity < lang_conf.diagnostic_severity { - return None; - } - } - }; - - let code = match diagnostic.code.clone() { - Some(x) => match x { - lsp::NumberOrString::Number(x) => { - Some(NumberOrString::Number(x)) - } - lsp::NumberOrString::String(x) => { - Some(NumberOrString::String(x)) - } - }, - None => None, - }; - - let tags = if let Some(tags) = &diagnostic.tags { - let new_tags = tags - .iter() - .filter_map(|tag| match *tag { - lsp::DiagnosticTag::DEPRECATED => { - Some(DiagnosticTag::Deprecated) - } - lsp::DiagnosticTag::UNNECESSARY => { - Some(DiagnosticTag::Unnecessary) - } - _ => None, - }) - .collect(); - - new_tags - } else { - Vec::new() - }; - - let ends_at_word = start != end - && end != 0 - && text.get_char(end - 1).map_or(false, char_is_word); - let starts_at_word = start != end - && text.get_char(start).map_or(false, char_is_word); - - Some(Diagnostic { - range: Range { start, end }, - ends_at_word, - starts_at_word, - zero_width: start == end, - line: diagnostic.range.start.line as usize, - message: diagnostic.message.clone(), - severity, - code, - tags, - source: diagnostic.source.clone(), - data: diagnostic.data.clone(), - language_server_id: server_id, - }) - }); - - doc.replace_diagnostics(diagnostics, unchaged_diag_sources, server_id); } let diagnostics = params.diagnostics.into_iter().map(|d| (d, server_id)); @@ -910,6 +810,27 @@ impl Application { diagnostics.sort_unstable_by_key(|(d, server_id)| { (d.severity, d.range.start, *server_id) }); + + if let Some(doc) = doc { + let diagnostic_of_language_server_and_not_in_unchanged_sources = + |diagnostic: &lsp::Diagnostic, ls_id| { + ls_id == server_id + && diagnostic.source.as_ref().map_or(true, |source| { + !unchanged_diag_sources.contains(source) + }) + }; + let diagnostics = Editor::doc_diagnostics_with_filter( + &self.editor.language_servers, + &self.editor.diagnostics, + doc, + diagnostic_of_language_server_and_not_in_unchanged_sources, + ); + doc.replace_diagnostics( + diagnostics, + &unchanged_diag_sources, + Some(server_id), + ); + } } Notification::ShowMessage(params) => { log::warn!("unhandled window/showMessage: {:?}", params); @@ -1017,7 +938,7 @@ impl Application { // Clear any diagnostics for documents with this server open. for doc in self.editor.documents_mut() { - doc.clear_diagnostics(server_id); + doc.clear_diagnostics(Some(server_id)); } // Remove the language server from the registry. @@ -1071,11 +992,9 @@ impl Application { let language_server = language_server!(); if language_server.is_initialized() { let offset_encoding = language_server.offset_encoding(); - let res = apply_workspace_edit( - &mut self.editor, - offset_encoding, - ¶ms.edit, - ); + let res = self + .editor + .apply_workspace_edit(offset_encoding, ¶ms.edit); Ok(json!(lsp::ApplyWorkspaceEditResponse { applied: res.is_ok(), @@ -1176,6 +1095,13 @@ impl Application { } Ok(serde_json::Value::Null) } + Ok(MethodCall::ShowDocument(params)) => { + let language_server = language_server!(); + let offset_encoding = language_server.offset_encoding(); + + let result = self.handle_show_document(params, offset_encoding); + Ok(json!(result)) + } }; tokio::spawn(language_server!().reply(id, reply)); @@ -1184,6 +1110,68 @@ impl Application { } } + fn handle_show_document( + &mut self, + params: lsp::ShowDocumentParams, + offset_encoding: helix_lsp::OffsetEncoding, + ) -> lsp::ShowDocumentResult { + if let lsp::ShowDocumentParams { + external: Some(true), + uri, + .. + } = params + { + self.jobs.callback(crate::open_external_url_callback(uri)); + return lsp::ShowDocumentResult { success: true }; + }; + + let lsp::ShowDocumentParams { + uri, + selection, + take_focus, + .. + } = params; + + let path = match uri.to_file_path() { + Ok(path) => path, + Err(err) => { + log::error!("unsupported file URI: {}: {:?}", uri, err); + return lsp::ShowDocumentResult { success: false }; + } + }; + + let action = match take_focus { + Some(true) => helix_view::editor::Action::Replace, + _ => helix_view::editor::Action::VerticalSplit, + }; + + let doc_id = match self.editor.open(&path, action) { + Ok(id) => id, + Err(err) => { + log::error!("failed to open path: {:?}: {:?}", uri, err); + return lsp::ShowDocumentResult { success: false }; + } + }; + + let doc = doc_mut!(self.editor, &doc_id); + if let Some(range) = selection { + // TODO: convert inside server + if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) { + let view = view_mut!(self.editor); + + // we flip the range so that the cursor sits on the start of the symbol + // (for example start of the function). + doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); + if action.align_view(view, doc.id()) { + align_view(doc, view, Align::Center); + } + } else { + log::warn!("lsp position out of bounds - {:?}", range); + }; + }; + lsp::ShowDocumentResult { success: true } + } + async fn claim_term(&mut self) -> std::io::Result<()> { let terminal_config = self.config.load().editor.clone().into(); self.terminal.claim(terminal_config) diff --git a/helix-term/src/args.rs b/helix-term/src/args.rs index 6a49889b678a..0b1c9cde08da 100644 --- a/helix-term/src/args.rs +++ b/helix-term/src/args.rs @@ -90,10 +90,9 @@ impl Args { } } arg if arg.starts_with('+') => { - let arg = &arg[1..]; - line_number = match arg.parse::() { - Ok(n) => n.saturating_sub(1), - _ => anyhow::bail!("bad line number after +"), + match arg[1..].parse::() { + Ok(n) => line_number = n.saturating_sub(1), + _ => args.files.push(parse_file(arg)), }; } arg => args.files.push(parse_file(arg)), diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 1b8f9e1f5949..d44f477b7376 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -5,7 +5,6 @@ pub(crate) mod typed; pub use dap::*; use helix_vcs::Hunk; pub use lsp::*; -use tokio::sync::oneshot; use tui::widgets::Row; pub use typed::*; @@ -33,7 +32,7 @@ use helix_core::{ }; use helix_view::{ document::{FormatterError, Mode, SCRATCH_BUFFER_NAME}, - editor::{Action, CompleteAction}, + editor::Action, info::Info, input::KeyEvent, keyboard::KeyCode, @@ -52,14 +51,10 @@ use crate::{ filter_picker_entry, job::Callback, keymap::ReverseKeymap, - ui::{ - self, editor::InsertEvent, lsp::SignatureHelp, overlay::overlaid, CompletionItem, Picker, - Popup, Prompt, PromptEvent, - }, + ui::{self, overlay::overlaid, Picker, Popup, Prompt, PromptEvent}, }; use crate::job::{self, Jobs}; -use futures_util::{stream::FuturesUnordered, TryStreamExt}; use std::{ collections::{HashMap, HashSet}, fmt, @@ -88,7 +83,7 @@ pub struct Context<'a> { pub count: Option, pub editor: &'a mut Editor, - pub callback: Option, + pub callback: Vec, pub on_next_key_callback: Option, pub jobs: &'a mut Jobs, } @@ -96,16 +91,18 @@ pub struct Context<'a> { impl<'a> Context<'a> { /// Push a new component onto the compositor. pub fn push_layer(&mut self, component: Box) { - self.callback = Some(Box::new(|compositor: &mut Compositor, _| { - compositor.push(component) - })); + self.callback + .push(Box::new(|compositor: &mut Compositor, _| { + compositor.push(component) + })); } /// Call `replace_or_push` on the Compositor pub fn replace_or_push_layer(&mut self, id: &'static str, component: T) { - self.callback = Some(Box::new(move |compositor: &mut Compositor, _| { - compositor.replace_or_push(id, component); - })); + self.callback + .push(Box::new(move |compositor: &mut Compositor, _| { + compositor.replace_or_push(id, component); + })); } #[inline] @@ -337,9 +334,9 @@ impl MappableCommand { goto_implementation, "Goto implementation", goto_file_start, "Goto line number else file start", goto_file_end, "Goto file end", - goto_file, "Goto files/URLs in selection", - goto_file_hsplit, "Goto files in selection (hsplit)", - goto_file_vsplit, "Goto files in selection (vsplit)", + goto_file, "Goto files/URLs in selections", + goto_file_hsplit, "Goto files in selections (hsplit)", + goto_file_vsplit, "Goto files in selections (vsplit)", goto_reference, "Goto references", goto_window_top, "Goto window top", goto_window_center, "Goto window center", @@ -795,7 +792,7 @@ fn goto_buffer(editor: &mut Editor, direction: Direction) { let iter = editor.documents.keys(); let mut iter = iter.rev().skip_while(|id| *id != ¤t); iter.next(); // skip current item - iter.next().or_else(|| editor.documents.keys().rev().next()) + iter.next().or_else(|| editor.documents.keys().next_back()) } } .unwrap(); @@ -1227,7 +1224,7 @@ fn open_url(cx: &mut Context, url: Url, action: Action) { .unwrap_or_default(); if url.scheme() != "file" { - return open_external_url(cx, url); + return cx.jobs.callback(crate::open_external_url_callback(url)); } let content_type = std::fs::File::open(url.path()).and_then(|file| { @@ -1240,7 +1237,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) { // we attempt to open binary files - files that can't be open in helix - using external // program as well, e.g. pdf files or images match content_type { - Ok(content_inspector::ContentType::BINARY) => open_external_url(cx, url), + Ok(content_inspector::ContentType::BINARY) => { + cx.jobs.callback(crate::open_external_url_callback(url)) + } Ok(_) | Err(_) => { let path = &rel_path.join(url.path()); if path.is_dir() { @@ -1253,23 +1252,6 @@ fn open_url(cx: &mut Context, url: Url, action: Action) { } } -/// Opens URL in external program. -fn open_external_url(cx: &mut Context, url: Url) { - let commands = open::commands(url.as_str()); - cx.jobs.callback(async { - for cmd in commands { - let mut command = tokio::process::Command::new(cmd.get_program()); - command.args(cmd.get_args()); - if command.output().await.is_ok() { - return Ok(job::Callback::Editor(Box::new(|_| {}))); - } - } - Ok(job::Callback::Editor(Box::new(move |editor| { - editor.set_error("Opening URL in external program failed") - }))) - }); -} - fn extend_word_impl(cx: &mut Context, extend_fn: F) where F: Fn(RopeSlice, Range, usize) -> Range, @@ -2184,7 +2166,7 @@ fn global_search(cx: &mut Context) { type Data = Option; fn format(&self, current_path: &Self::Data) -> Row { - let relative_path = helix_core::path::get_relative_path(&self.path) + let relative_path = helix_stdx::path::get_relative_path(&self.path) .to_string_lossy() .into_owned(); if current_path @@ -2233,7 +2215,7 @@ fn global_search(cx: &mut Context) { .case_smart(smart_case) .build(regex.as_str()) { - let search_root = helix_loader::current_working_dir(); + let search_root = helix_stdx::env::current_working_dir(); if !search_root.exists() { cx.editor .set_error("Current working directory does not exist"); @@ -2606,7 +2588,6 @@ fn delete_by_selection_insert_mode( ); } doc.apply(&transaction, view.id); - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } fn delete_selection(cx: &mut Context) { @@ -2680,10 +2661,6 @@ fn insert_mode(cx: &mut Context) { .transform(|range| Range::new(range.to(), range.from())); doc.set_selection(view.id, selection); - - // [TODO] temporary workaround until we're not using the idle timer to - // trigger auto completions any more - cx.editor.clear_idle_timer(); } // inserts at the end of each selection @@ -2746,7 +2723,7 @@ fn file_picker_in_current_buffer_directory(cx: &mut Context) { } fn file_picker_in_current_directory(cx: &mut Context) { - let cwd = helix_loader::current_working_dir(); + let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor .set_error("Current working directory does not exist"); @@ -2774,7 +2751,7 @@ fn buffer_picker(cx: &mut Context) { let path = self .path .as_deref() - .map(helix_core::path::get_relative_path); + .map(helix_stdx::path::get_relative_path); let path = match path.as_deref().and_then(Path::to_str) { Some(path) => path, None => SCRATCH_BUFFER_NAME, @@ -2804,7 +2781,7 @@ fn buffer_picker(cx: &mut Context) { .editor .documents .values() - .map(|doc| new_meta(doc)) + .map(new_meta) .collect::>(); // mru @@ -2841,7 +2818,7 @@ fn jumplist_picker(cx: &mut Context) { let path = self .path .as_deref() - .map(helix_core::path::get_relative_path); + .map(helix_stdx::path::get_relative_path); let path = match path.as_deref().and_then(Path::to_str) { Some(path) => path, None => SCRATCH_BUFFER_NAME, @@ -2949,7 +2926,7 @@ pub fn command_palette(cx: &mut Context) { let register = cx.register; let count = cx.count; - cx.callback = Some(Box::new( + cx.callback.push(Box::new( move |compositor: &mut Compositor, cx: &mut compositor::Context| { let keymap = compositor.find::().unwrap().keymaps.map() [&cx.editor.mode] @@ -2969,7 +2946,7 @@ pub fn command_palette(cx: &mut Context) { register, count, editor: cx.editor, - callback: None, + callback: Vec::new(), on_next_key_callback: None, jobs: cx.jobs, }; @@ -2997,7 +2974,7 @@ pub fn command_palette(cx: &mut Context) { fn last_picker(cx: &mut Context) { // TODO: last picker does not seem to work well with buffer_picker - cx.callback = Some(Box::new(|compositor, cx| { + cx.callback.push(Box::new(|compositor, cx| { if let Some(picker) = compositor.last_picker.take() { compositor.push(picker); } else { @@ -3350,7 +3327,7 @@ fn exit_select_mode(cx: &mut Context) { fn goto_first_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let selection = match doc.shown_diagnostics().next() { + let selection = match doc.diagnostics().first() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; @@ -3359,7 +3336,7 @@ fn goto_first_diag(cx: &mut Context) { fn goto_last_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let selection = match doc.shown_diagnostics().last() { + let selection = match doc.diagnostics().last() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; @@ -3375,9 +3352,10 @@ fn goto_next_diag(cx: &mut Context) { .cursor(doc.text().slice(..)); let diag = doc - .shown_diagnostics() + .diagnostics() + .iter() .find(|diag| diag.range.start > cursor_pos) - .or_else(|| doc.shown_diagnostics().next()); + .or_else(|| doc.diagnostics().first()); let selection = match diag { Some(diag) => Selection::single(diag.range.start, diag.range.end), @@ -3395,10 +3373,11 @@ fn goto_prev_diag(cx: &mut Context) { .cursor(doc.text().slice(..)); let diag = doc - .shown_diagnostics() + .diagnostics() + .iter() .rev() .find(|diag| diag.range.start < cursor_pos) - .or_else(|| doc.shown_diagnostics().last()); + .or_else(|| doc.diagnostics().last()); let selection = match diag { // NOTE: the selection is reversed because we're jumping to the @@ -3507,9 +3486,10 @@ fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range { } pub mod insert { + use crate::events::PostInsertChar; + use super::*; pub type Hook = fn(&Rope, &Selection, char) -> Option; - pub type PostHook = fn(&mut Context, char); /// Exclude the cursor in range. fn exclude_cursor(text: RopeSlice, range: Range, cursor: Range) -> Range { @@ -3523,88 +3503,6 @@ pub mod insert { } } - // It trigger completion when idle timer reaches deadline - // Only trigger completion if the word under cursor is longer than n characters - pub fn idle_completion(cx: &mut Context) { - let config = cx.editor.config(); - let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - let cursor = doc.selection(view.id).primary().cursor(text); - - use helix_core::chars::char_is_word; - let mut iter = text.chars_at(cursor); - iter.reverse(); - for _ in 0..config.completion_trigger_len { - match iter.next() { - Some(c) if char_is_word(c) => {} - _ => return, - } - } - super::completion(cx); - } - - fn language_server_completion(cx: &mut Context, ch: char) { - let config = cx.editor.config(); - if !config.auto_completion { - return; - } - - use helix_lsp::lsp; - // if ch matches completion char, trigger completion - let doc = doc_mut!(cx.editor); - let trigger_completion = doc - .language_servers_with_feature(LanguageServerFeature::Completion) - .any(|ls| { - // TODO: what if trigger is multiple chars long - matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions { - trigger_characters: Some(triggers), - .. - }) if triggers.iter().any(|trigger| trigger.contains(ch))) - }); - - if trigger_completion { - cx.editor.clear_idle_timer(); - super::completion(cx); - } - } - - fn signature_help(cx: &mut Context, ch: char) { - use helix_lsp::lsp; - // if ch matches signature_help char, trigger - let doc = doc_mut!(cx.editor); - // TODO support multiple language servers (not just the first that is found), likely by merging UI somehow - let Some(language_server) = doc - .language_servers_with_feature(LanguageServerFeature::SignatureHelp) - .next() - else { - return; - }; - - let capabilities = language_server.capabilities(); - - if let lsp::ServerCapabilities { - signature_help_provider: - Some(lsp::SignatureHelpOptions { - trigger_characters: Some(triggers), - // TODO: retrigger_characters - .. - }), - .. - } = capabilities - { - // TODO: what if trigger is multiple chars long - let is_trigger = triggers.iter().any(|trigger| trigger.contains(ch)); - // lsp doesn't tell us when to close the signature help, so we request - // the help information again after common close triggers which should - // return None, which in turn closes the popup. - let close_triggers = &[')', ';', '.']; - - if is_trigger || close_triggers.contains(&ch) { - super::signature_help_impl(cx, SignatureHelpInvoked::Automatic); - } - } - } - // The default insert hook: simply insert the character #[allow(clippy::unnecessary_wraps)] // need to use Option<> because of the Hook signature fn insert(doc: &Rope, selection: &Selection, ch: char) -> Option { @@ -3634,12 +3532,7 @@ pub mod insert { doc.apply(&t, view.id); } - // TODO: need a post insert hook too for certain triggers (autocomplete, signature help, etc) - // this could also generically look at Transaction, but it's a bit annoying to look at - // Operation instead of Change. - for hook in &[language_server_completion, signature_help] { - hook(cx, c); - } + helix_event::dispatch(PostInsertChar { c, cx }); } pub fn smart_tab(cx: &mut Context) { @@ -3758,7 +3651,7 @@ pub mod insert { (pos, pos, local_offs) }; - let new_range = if doc.restore_cursor { + let new_range = if range.cursor(text) > range.anchor { // when appending, extend the range by local_offs Range::new( range.anchor + global_offs, @@ -3864,8 +3757,6 @@ pub mod insert { }); let (view, doc) = current!(cx.editor); doc.apply(&transaction, view.id); - - lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } pub fn delete_char_forward(cx: &mut Context) { @@ -4185,9 +4076,13 @@ fn replace_with_yanked(cx: &mut Context) { } fn replace_with_yanked_impl(editor: &mut Editor, register: char, count: usize) { - let Some(values) = editor.registers + let Some(values) = editor + .registers .read(register, editor) - .filter(|values| values.len() > 0) else { return }; + .filter(|values| values.len() > 0) + else { + return; + }; let values: Vec<_> = values.map(|value| value.to_string()).collect(); let (view, doc) = current!(editor); @@ -4224,7 +4119,9 @@ fn replace_selections_with_primary_clipboard(cx: &mut Context) { } fn paste(editor: &mut Editor, register: char, pos: Paste, count: usize) { - let Some(values) = editor.registers.read(register, editor) else { return }; + let Some(values) = editor.registers.read(register, editor) else { + return; + }; let values: Vec<_> = values.map(|value| value.to_string()).collect(); let (view, doc) = current!(editor); @@ -4513,151 +4410,14 @@ fn remove_primary_selection(cx: &mut Context) { } pub fn completion(cx: &mut Context) { - use helix_lsp::{lsp, util::pos_to_lsp_pos}; - let (view, doc) = current!(cx.editor); + let range = doc.selection(view.id).primary(); + let text = doc.text().slice(..); + let cursor = range.cursor(text); - let savepoint = if let Some(CompleteAction::Selected { savepoint }) = &cx.editor.last_completion - { - savepoint.clone() - } else { - doc.savepoint(view) - }; - - let text = savepoint.text.clone(); - let cursor = savepoint.cursor(); - - let mut seen_language_servers = HashSet::new(); - - let mut futures: FuturesUnordered<_> = doc - .language_servers_with_feature(LanguageServerFeature::Completion) - .filter(|ls| seen_language_servers.insert(ls.id())) - .map(|language_server| { - let language_server_id = language_server.id(); - let offset_encoding = language_server.offset_encoding(); - let pos = pos_to_lsp_pos(&text, cursor, offset_encoding); - let doc_id = doc.identifier(); - let completion_request = language_server.completion(doc_id, pos, None).unwrap(); - - async move { - let json = completion_request.await?; - let response: Option = serde_json::from_value(json)?; - - let items = match response { - Some(lsp::CompletionResponse::Array(items)) => items, - // TODO: do something with is_incomplete - Some(lsp::CompletionResponse::List(lsp::CompletionList { - is_incomplete: _is_incomplete, - items, - })) => items, - None => Vec::new(), - } - .into_iter() - .map(|item| CompletionItem { - item, - language_server_id, - resolved: false, - }) - .collect(); - - anyhow::Ok(items) - } - }) - .collect(); - - // setup a channel that allows the request to be canceled - let (tx, rx) = oneshot::channel(); - // set completion_request so that this request can be canceled - // by setting completion_request, the old channel stored there is dropped - // and the associated request is automatically dropped - cx.editor.completion_request_handle = Some(tx); - let future = async move { - let items_future = async move { - let mut items = Vec::new(); - // TODO if one completion request errors, all other completion requests are discarded (even if they're valid) - while let Some(mut lsp_items) = futures.try_next().await? { - items.append(&mut lsp_items); - } - anyhow::Ok(items) - }; - tokio::select! { - biased; - _ = rx => { - Ok(Vec::new()) - } - res = items_future => { - res - } - } - }; - - let trigger_offset = cursor; - - // TODO: trigger_offset should be the cursor offset but we also need a starting offset from where we want to apply - // completion filtering. For example logger.te| should filter the initial suggestion list with "te". - - use helix_core::chars; - let mut iter = text.chars_at(cursor); - iter.reverse(); - let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count(); - let start_offset = cursor.saturating_sub(offset); - - let trigger_doc = doc.id(); - let trigger_view = view.id; - - // FIXME: The commands Context can only have a single callback - // which means it gets overwritten when executing keybindings - // with multiple commands or macros. This would mean that completion - // might be incorrectly applied when repeating the insertmode action - // - // TODO: to solve this either make cx.callback a Vec of callbacks or - // alternatively move `last_insert` to `helix_view::Editor` - cx.callback = Some(Box::new( - move |compositor: &mut Compositor, _cx: &mut compositor::Context| { - let ui = compositor.find::().unwrap(); - ui.last_insert.1.push(InsertEvent::RequestCompletion); - }, - )); - - cx.jobs.callback(async move { - let items = future.await?; - let call = move |editor: &mut Editor, compositor: &mut Compositor| { - let (view, doc) = current_ref!(editor); - // check if the completion request is stale. - // - // Completions are completed asynchronously and therefore the user could - //switch document/view or leave insert mode. In all of thoise cases the - // completion should be discarded - if editor.mode != Mode::Insert || view.id != trigger_view || doc.id() != trigger_doc { - return; - } - - if items.is_empty() { - // editor.set_error("No completion available"); - return; - } - let size = compositor.size(); - let ui = compositor.find::().unwrap(); - let completion_area = ui.set_completion( - editor, - savepoint, - items, - start_offset, - trigger_offset, - size, - ); - let size = compositor.size(); - let signature_help_area = compositor - .find_id::>(SignatureHelp::ID) - .map(|signature_help| signature_help.area(size, editor)); - // Delete the signature help popup if they intersect. - if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b)) - { - compositor.remove(SignatureHelp::ID); - } - }; - Ok(Callback::EditorCompositor(Box::new(call))) - }); + cx.editor + .handlers + .trigger_completions(cursor, doc.id(), view.id); } // comments @@ -4836,10 +4596,6 @@ fn move_node_bound_impl(cx: &mut Context, dir: Direction, movement: Movement) { ); doc.set_selection(view.id, selection); - - // [TODO] temporary workaround until we're not using the idle timer to - // trigger auto completions any more - editor.clear_idle_timer(); } }; @@ -5827,7 +5583,7 @@ fn replay_macro(cx: &mut Context) { cx.editor.macro_replaying.push(reg); let count = cx.count(); - cx.callback = Some(Box::new(move |compositor, cx| { + cx.callback.push(Box::new(move |compositor, cx| { for _ in 0..count { for &key in keys.iter() { compositor.handle_event(&compositor::Event::Key(key), cx); diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs index e9fde4767642..d62b0a4e5b4e 100644 --- a/helix-term/src/commands/dap.rs +++ b/helix-term/src/commands/dap.rs @@ -78,7 +78,7 @@ fn thread_picker( }) .with_preview(move |editor, thread| { let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?; - let frame = frames.get(0)?; + let frame = frames.first()?; let path = frame.source.as_ref()?.path.clone()?; let pos = Some(( frame.line.saturating_sub(1), @@ -166,7 +166,7 @@ pub fn dap_start_impl( // TODO: avoid refetching all of this... pass a config in let template = match name { Some(name) => config.templates.iter().find(|t| t.name == name), - None => config.templates.get(0), + None => config.templates.first(), } .ok_or_else(|| anyhow!("No debug config with given name"))?; @@ -217,7 +217,7 @@ pub fn dap_start_impl( } } - args.insert("cwd", to_value(helix_loader::current_working_dir())?); + args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?); let args = to_value(args).unwrap(); diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index ac6a1a2134cf..a1f7bf17dc88 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -1,4 +1,4 @@ -use futures_util::{future::BoxFuture, stream::FuturesUnordered, FutureExt}; +use futures_util::{stream::FuturesUnordered, FutureExt}; use helix_lsp::{ block_on, lsp::{ @@ -8,22 +8,21 @@ use helix_lsp::{ util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range}, Client, OffsetEncoding, }; -use serde_json::Value; use tokio_stream::StreamExt; use tui::{ text::{Span, Spans}, widgets::Row, }; -use super::{align_view, push_jump, Align, Context, Editor, Open}; +use super::{align_view, push_jump, Align, Context, Editor}; -use helix_core::{ - path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, -}; +use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection}; +use helix_stdx::path; use helix_view::{ - document::{DocumentInlayHints, DocumentInlayHintsId, Mode}, + document::{DocumentInlayHints, DocumentInlayHintsId}, editor::Action, graphics::Margin, + handlers::lsp::SignatureHelpInvoked, theme::Style, Document, View, }; @@ -31,10 +30,7 @@ use helix_view::{ use crate::{ compositor::{self, Compositor}, job::Callback, - ui::{ - self, lsp::SignatureHelp, overlay::overlaid, DynamicPicker, FileLocation, Picker, Popup, - PromptEvent, - }, + ui::{self, overlay::overlaid, DynamicPicker, FileLocation, Picker, Popup, PromptEvent}, }; use std::{ @@ -43,7 +39,6 @@ use std::{ fmt::Write, future::Future, path::PathBuf, - sync::Arc, }; /// Gets the first language server that is attached to a document which supports a specific feature. @@ -731,8 +726,7 @@ pub fn code_action(cx: &mut Context) { resolved_code_action.as_ref().unwrap_or(code_action); if let Some(ref workspace_edit) = resolved_code_action.edit { - log::debug!("edit: {:?}", workspace_edit); - let _ = apply_workspace_edit(editor, offset_encoding, workspace_edit); + let _ = editor.apply_workspace_edit(offset_encoding, workspace_edit); } // if code action provides both edit and command first the edit @@ -792,63 +786,6 @@ pub fn execute_lsp_command(editor: &mut Editor, language_server_id: usize, cmd: }); } -pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { - use lsp::ResourceOp; - use std::fs; - match op { - ResourceOp::Create(op) => { - let path = op.uri.to_file_path().unwrap(); - let ignore_if_exists = op.options.as_ref().map_or(false, |options| { - !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - }); - if ignore_if_exists && path.exists() { - Ok(()) - } else { - // Create directory if it does not exist - if let Some(dir) = path.parent() { - if !dir.is_dir() { - fs::create_dir_all(dir)?; - } - } - - fs::write(&path, []) - } - } - ResourceOp::Delete(op) => { - let path = op.uri.to_file_path().unwrap(); - if path.is_dir() { - let recursive = op - .options - .as_ref() - .and_then(|options| options.recursive) - .unwrap_or(false); - - if recursive { - fs::remove_dir_all(&path) - } else { - fs::remove_dir(&path) - } - } else if path.is_file() { - fs::remove_file(&path) - } else { - Ok(()) - } - } - ResourceOp::Rename(op) => { - let from = op.old_uri.to_file_path().unwrap(); - let to = op.new_uri.to_file_path().unwrap(); - let ignore_if_exists = op.options.as_ref().map_or(false, |options| { - !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - }); - if ignore_if_exists && to.exists() { - Ok(()) - } else { - fs::rename(from, &to) - } - } - } -} - #[derive(Debug)] pub struct ApplyEditError { pub kind: ApplyEditErrorKind, @@ -876,168 +813,20 @@ impl ToString for ApplyEditErrorKind { } } -///TODO make this transactional (and set failureMode to transactional) -pub fn apply_workspace_edit( - editor: &mut Editor, - offset_encoding: OffsetEncoding, - workspace_edit: &lsp::WorkspaceEdit, -) -> Result<(), ApplyEditError> { - let mut apply_edits = |uri: &helix_lsp::Url, - version: Option, - text_edits: Vec| - -> Result<(), ApplyEditErrorKind> { - let path = match uri.to_file_path() { - Ok(path) => path, - Err(_) => { - let err = format!("unable to convert URI to filepath: {}", uri); - log::error!("{}", err); - editor.set_error(err); - return Err(ApplyEditErrorKind::UnknownURISchema); - } - }; - - let current_view_id = view!(editor).id; - let doc_id = match editor.open(&path, Action::Load) { - Ok(doc_id) => doc_id, - Err(err) => { - let err = format!("failed to open document: {}: {}", uri, err); - log::error!("{}", err); - editor.set_error(err); - return Err(ApplyEditErrorKind::FileNotFound); - } - }; - - let doc = doc_mut!(editor, &doc_id); - if let Some(version) = version { - if version != doc.version() { - let err = format!("outdated workspace edit for {path:?}"); - log::error!("{err}, expected {} but got {version}", doc.version()); - editor.set_error(err); - return Err(ApplyEditErrorKind::DocumentChanged); - } - } - - // Need to determine a view for apply/append_changes_to_history - let selections = doc.selections(); - let view_id = if selections.contains_key(¤t_view_id) { - // use current if possible - current_view_id - } else { - // Hack: we take the first available view_id - selections - .keys() - .next() - .copied() - .expect("No view_id available") - }; - - let transaction = helix_lsp::util::generate_transaction_from_edits( - doc.text(), - text_edits, - offset_encoding, - ); - let view = view_mut!(editor, view_id); - doc.apply(&transaction, view.id); - doc.append_changes_to_history(view); - Ok(()) - }; - - if let Some(ref document_changes) = workspace_edit.document_changes { - match document_changes { - lsp::DocumentChanges::Edits(document_edits) => { - for (i, document_edit) in document_edits.iter().enumerate() { - let edits = document_edit - .edits - .iter() - .map(|edit| match edit { - lsp::OneOf::Left(text_edit) => text_edit, - lsp::OneOf::Right(annotated_text_edit) => { - &annotated_text_edit.text_edit - } - }) - .cloned() - .collect(); - apply_edits( - &document_edit.text_document.uri, - document_edit.text_document.version, - edits, - ) - .map_err(|kind| ApplyEditError { - kind, - failed_change_idx: i, - })?; - } - } - lsp::DocumentChanges::Operations(operations) => { - log::debug!("document changes - operations: {:?}", operations); - for (i, operation) in operations.iter().enumerate() { - match operation { - lsp::DocumentChangeOperation::Op(op) => { - apply_document_resource_op(op).map_err(|io| ApplyEditError { - kind: ApplyEditErrorKind::IoError(io), - failed_change_idx: i, - })?; - } - - lsp::DocumentChangeOperation::Edit(document_edit) => { - let edits = document_edit - .edits - .iter() - .map(|edit| match edit { - lsp::OneOf::Left(text_edit) => text_edit, - lsp::OneOf::Right(annotated_text_edit) => { - &annotated_text_edit.text_edit - } - }) - .cloned() - .collect(); - apply_edits( - &document_edit.text_document.uri, - document_edit.text_document.version, - edits, - ) - .map_err(|kind| ApplyEditError { - kind, - failed_change_idx: i, - })?; - } - } - } - } - } - - return Ok(()); - } - - if let Some(ref changes) = workspace_edit.changes { - log::debug!("workspace changes: {:?}", changes); - for (i, (uri, text_edits)) in changes.iter().enumerate() { - let text_edits = text_edits.to_vec(); - apply_edits(uri, None, text_edits).map_err(|kind| ApplyEditError { - kind, - failed_change_idx: i, - })?; - } - } - - Ok(()) -} - +/// Precondition: `locations` should be non-empty. fn goto_impl( editor: &mut Editor, compositor: &mut Compositor, locations: Vec, offset_encoding: OffsetEncoding, ) { - let cwdir = helix_loader::current_working_dir(); + let cwdir = helix_stdx::env::current_working_dir(); match locations.as_slice() { [location] => { jump_to_location(editor, location, offset_encoding, Action::Replace); } - [] => { - editor.set_error("No definition found."); - } + [] => unreachable!("`locations` should be non-empty for `goto_impl`"), _locations => { let picker = Picker::new(locations, cwdir, move |cx, location, action| { jump_to_location(cx.editor, location, offset_encoding, action) @@ -1079,7 +868,11 @@ where future, move |editor, compositor, response: Option| { let items = to_locations(response); - goto_impl(editor, compositor, items, offset_encoding); + if items.is_empty() { + editor.set_error("No definition found."); + } else { + goto_impl(editor, compositor, items, offset_encoding); + } }, ); } @@ -1139,151 +932,19 @@ pub fn goto_reference(cx: &mut Context) { future, move |editor, compositor, response: Option>| { let items = response.unwrap_or_default(); - goto_impl(editor, compositor, items, offset_encoding); + if items.is_empty() { + editor.set_error("No references found."); + } else { + goto_impl(editor, compositor, items, offset_encoding); + } }, ); } -#[derive(PartialEq, Eq, Clone, Copy)] -pub enum SignatureHelpInvoked { - Manual, - Automatic, -} - pub fn signature_help(cx: &mut Context) { - signature_help_impl(cx, SignatureHelpInvoked::Manual) -} - -pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) { - let (view, doc) = current!(cx.editor); - - // TODO merge multiple language server signature help into one instead of just taking the first language server that supports it - let future = doc - .language_servers_with_feature(LanguageServerFeature::SignatureHelp) - .find_map(|language_server| { - let pos = doc.position(view.id, language_server.offset_encoding()); - language_server.text_document_signature_help(doc.identifier(), pos, None) - }); - - let Some(future) = future else { - // Do not show the message if signature help was invoked - // automatically on backspace, trigger characters, etc. - if invoked == SignatureHelpInvoked::Manual { - cx.editor - .set_error("No configured language server supports signature-help"); - } - return; - }; - signature_help_impl_with_future(cx, future.boxed(), invoked); -} - -pub fn signature_help_impl_with_future( - cx: &mut Context, - future: BoxFuture<'static, helix_lsp::Result>, - invoked: SignatureHelpInvoked, -) { - cx.callback( - future, - move |editor, compositor, response: Option| { - let config = &editor.config(); - - if !(config.lsp.auto_signature_help - || SignatureHelp::visible_popup(compositor).is_some() - || invoked == SignatureHelpInvoked::Manual) - { - return; - } - - // If the signature help invocation is automatic, don't show it outside of Insert Mode: - // it very probably means the server was a little slow to respond and the user has - // already moved on to something else, making a signature help popup will just be an - // annoyance, see https://github.com/helix-editor/helix/issues/3112 - if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert { - return; - } - - let response = match response { - // According to the spec the response should be None if there - // are no signatures, but some servers don't follow this. - Some(s) if !s.signatures.is_empty() => s, - _ => { - compositor.remove(SignatureHelp::ID); - return; - } - }; - let doc = doc!(editor); - let language = doc.language_name().unwrap_or(""); - - let signature = match response - .signatures - .get(response.active_signature.unwrap_or(0) as usize) - { - Some(s) => s, - None => return, - }; - let mut contents = SignatureHelp::new( - signature.label.clone(), - language.to_string(), - Arc::clone(&editor.syn_loader), - ); - - let signature_doc = if config.lsp.display_signature_help_docs { - signature.documentation.as_ref().map(|doc| match doc { - lsp::Documentation::String(s) => s.clone(), - lsp::Documentation::MarkupContent(markup) => markup.value.clone(), - }) - } else { - None - }; - - contents.set_signature_doc(signature_doc); - - let active_param_range = || -> Option<(usize, usize)> { - let param_idx = signature - .active_parameter - .or(response.active_parameter) - .unwrap_or(0) as usize; - let param = signature.parameters.as_ref()?.get(param_idx)?; - match ¶m.label { - lsp::ParameterLabel::Simple(string) => { - let start = signature.label.find(string.as_str())?; - Some((start, start + string.len())) - } - lsp::ParameterLabel::LabelOffsets([start, end]) => { - // LS sends offsets based on utf-16 based string representation - // but highlighting in helix is done using byte offset. - use helix_core::str_utils::char_to_byte_idx; - let from = char_to_byte_idx(&signature.label, *start as usize); - let to = char_to_byte_idx(&signature.label, *end as usize); - Some((from, to)) - } - } - }; - contents.set_active_param_range(active_param_range()); - - let old_popup = compositor.find_id::>(SignatureHelp::ID); - let mut popup = Popup::new(SignatureHelp::ID, contents) - .position(old_popup.and_then(|p| p.get_position())) - .position_bias(Open::Above) - .ignore_escape_key(true); - - // Don't create a popup if it intersects the auto-complete menu. - let size = compositor.size(); - if compositor - .find::() - .unwrap() - .completion - .as_mut() - .map(|completion| completion.area(size, editor)) - .filter(|area| area.intersects(popup.area(size, editor))) - .is_some() - { - return; - } - - compositor.replace_or_push(SignatureHelp::ID, popup); - }, - ); + cx.editor + .handlers + .trigger_signature_help(SignatureHelpInvoked::Manual, cx.editor) } pub fn hover(cx: &mut Context) { @@ -1408,7 +1069,7 @@ pub fn rename_symbol(cx: &mut Context) { match block_on(future) { Ok(edits) => { - let _ = apply_workspace_edit(cx.editor, offset_encoding, &edits); + let _ = cx.editor.apply_workspace_edit(offset_encoding, &edits); } Err(err) => cx.editor.set_error(err.to_string()), } @@ -1421,6 +1082,16 @@ pub fn rename_symbol(cx: &mut Context) { let (view, doc) = current_ref!(cx.editor); + if doc + .language_servers_with_feature(LanguageServerFeature::RenameSymbol) + .next() + .is_none() + { + cx.editor + .set_error("No configured language server supports symbol renaming"); + return; + } + let language_server_with_prepare_rename_support = doc .language_servers_with_feature(LanguageServerFeature::RenameSymbol) .find(|ls| { diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index f530ce10dc2f..b7ceeba59a18 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -7,8 +7,7 @@ use super::*; use helix_core::fuzzy::fuzzy_match; use helix_core::indent::MAX_INDENT; -use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords}; -use helix_lsp::{OffsetEncoding, Url}; +use helix_core::{encoding, line_ending, shellwords::Shellwords}; use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::editor::{Action, CloseError, ConfigEvent}; use serde_json::Value; @@ -111,7 +110,7 @@ fn open(cx: &mut compositor::Context, args: &[Cow], event: PromptEvent) -> ensure!(!args.is_empty(), "wrong argument count"); for arg in args { let (path, pos) = args::parse_file(arg); - let path = helix_core::path::expand_tilde(&path); + let path = helix_stdx::path::expand_tilde(&path); // If the path is a directory, open a file picker on that directory and update the status // message if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) { @@ -483,7 +482,7 @@ fn set_indent_style( } // Attempt to parse argument as an indent style. - let style = match args.get(0) { + let style = match args.first() { Some(arg) if "tabs".starts_with(&arg.to_lowercase()) => Some(Tabs), Some(Cow::Borrowed("0")) => Some(Tabs), Some(arg) => arg @@ -535,7 +534,7 @@ fn set_line_ending( } let arg = args - .get(0) + .first() .context("argument missing")? .to_ascii_lowercase(); @@ -674,13 +673,15 @@ pub fn write_all_impl( let mut errors: Vec<&'static str> = Vec::new(); let config = cx.editor.config(); let jobs = &mut cx.jobs; - let current_view = view!(cx.editor); - let saves: Vec<_> = cx .editor .documents - .values_mut() - .filter_map(|doc| { + .keys() + .cloned() + .collect::>() + .into_iter() + .filter_map(|id| { + let doc = doc!(cx.editor, &id); if !doc.is_modified() { return None; } @@ -691,22 +692,9 @@ pub fn write_all_impl( return None; } - // Look for a view to apply the formatting change to. If the document - // is in the current view, just use that. Otherwise, since we don't - // have any other metric available for better selection, just pick - // the first view arbitrarily so that we still commit the document - // state for undos. If somehow we have a document that has not been - // initialized with any view, initialize it with the current view. - let target_view = if doc.selections().contains_key(¤t_view.id) { - current_view.id - } else if let Some(view) = doc.selections().keys().next() { - *view - } else { - doc.ensure_view_init(current_view.id); - current_view.id - }; - - Some((doc.id(), target_view)) + // Look for a view to apply the formatting change to. + let target_view = cx.editor.get_synced_view_id(doc.id()); + Some((id, target_view)) }) .collect(); @@ -1090,18 +1078,17 @@ fn change_current_directory( return Ok(()); } - let dir = helix_core::path::expand_tilde( + let dir = helix_stdx::path::expand_tilde( args.first() .context("target directory not provided")? - .as_ref() .as_ref(), ); - helix_loader::set_current_working_dir(dir)?; + helix_stdx::env::set_current_working_dir(dir)?; cx.editor.set_status(format!( "Current working directory is now {}", - helix_loader::current_working_dir().display() + helix_stdx::env::current_working_dir().display() )); Ok(()) } @@ -1115,7 +1102,7 @@ fn show_current_directory( return Ok(()); } - let cwd = helix_loader::current_working_dir(); + let cwd = helix_stdx::env::current_working_dir(); let message = format!("Current working directory is {}", cwd.display()); if cwd.exists() { @@ -1502,7 +1489,7 @@ fn lsp_stop( for doc in cx.editor.documents_mut() { if let Some(client) = doc.remove_language_server_by_name(ls_name) { - doc.clear_diagnostics(client.id()); + doc.clear_diagnostics(Some(client.id())); } } } @@ -1558,10 +1545,7 @@ fn tree_sitter_highlight_name( let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); let byte = text.char_to_byte(cursor); - let node = syntax - .tree() - .root_node() - .descendant_for_byte_range(byte, byte)?; + let node = syntax.descendant_for_byte_range(byte, byte)?; // Query the same range as the one used in syntax highlighting. let range = { // Calculate viewport byte ranges: @@ -2008,6 +1992,10 @@ fn language( let id = doc.id(); cx.editor.refresh_language_servers(id); + let doc = doc_mut!(cx.editor); + let diagnostics = + Editor::doc_diagnostics(&cx.editor.language_servers, &cx.editor.diagnostics, doc); + doc.replace_diagnostics(diagnostics, &[], None); Ok(()) } @@ -2085,7 +2073,7 @@ fn reflow( // - The configured text-width for this language in languages.toml // - The configured text-width in the config.toml let text_width: usize = args - .get(0) + .first() .map(|num| num.parse::()) .transpose()? .or_else(|| doc.language_config().and_then(|config| config.text_width)) @@ -2124,11 +2112,7 @@ fn tree_sitter_subtree( let text = doc.text(); let from = text.char_to_byte(primary_selection.from()); let to = text.char_to_byte(primary_selection.to()); - if let Some(selected_node) = syntax - .tree() - .root_node() - .descendant_for_byte_range(from, to) - { + if let Some(selected_node) = syntax.descendant_for_byte_range(from, to) { let mut contents = String::from("```tsq\n"); helix_core::syntax::pretty_print_tree(&mut contents, selected_node)?; contents.push_str("\n```"); @@ -2419,66 +2403,14 @@ fn move_buffer( ensure!(args.len() == 1, format!(":move takes one argument")); let doc = doc!(cx.editor); - - let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string())); let old_path = doc .path() - .ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))? + .context("Scratch buffer cannot be moved. Use :write instead")? .clone(); - let old_path_as_url = doc.url().unwrap(); - let new_path_as_url = Url::from_file_path(&new_path).unwrap(); - - let edits: Vec<( - helix_lsp::Result, - OffsetEncoding, - String, - )> = doc - .language_servers() - .map(|lsp| { - ( - lsp.prepare_file_rename(&old_path_as_url, &new_path_as_url), - lsp.offset_encoding(), - lsp.name().to_owned(), - ) - }) - .filter(|(f, _, _)| f.is_some()) - .map(|(f, encoding, name)| (helix_lsp::block_on(f.unwrap()), encoding, name)) - .collect(); - - for (lsp_reply, encoding, name) in edits { - match lsp_reply { - Ok(edit) => { - if let Err(e) = apply_workspace_edit(cx.editor, encoding, &edit) { - log::error!( - ":move command failed to apply edits from lsp {}: {:?}", - name, - e - ); - }; - } - Err(e) => { - log::error!("LSP {} failed to treat willRename request: {:?}", name, e); - } - }; + let new_path = args.first().unwrap().to_string(); + if let Err(err) = cx.editor.move_path(&old_path, new_path.as_ref()) { + bail!("Could not move file: {err}"); } - - let doc = doc_mut!(cx.editor); - - doc.set_path(Some(new_path.as_path())); - if let Err(e) = std::fs::rename(&old_path, &new_path) { - doc.set_path(Some(old_path.as_path())); - bail!("Could not move file: {}", e); - }; - - doc.language_servers().for_each(|lsp| { - lsp.did_file_rename(&old_path_as_url, &new_path_as_url); - }); - - cx.editor - .language_servers - .file_event_handler - .file_changed(new_path); - Ok(()) } diff --git a/helix-term/src/events.rs b/helix-term/src/events.rs new file mode 100644 index 000000000000..49b44f775088 --- /dev/null +++ b/helix-term/src/events.rs @@ -0,0 +1,20 @@ +use helix_event::{events, register_event}; +use helix_view::document::Mode; +use helix_view::events::{DocumentDidChange, SelectionDidChange}; + +use crate::commands; +use crate::keymap::MappableCommand; + +events! { + OnModeSwitch<'a, 'cx> { old_mode: Mode, new_mode: Mode, cx: &'a mut commands::Context<'cx> } + PostInsertChar<'a, 'cx> { c: char, cx: &'a mut commands::Context<'cx> } + PostCommand<'a, 'cx> { command: & 'a MappableCommand, cx: &'a mut commands::Context<'cx> } +} + +pub fn register() { + register_event::(); + register_event::(); + register_event::(); + register_event::(); + register_event::(); +} diff --git a/helix-term/src/handlers.rs b/helix-term/src/handlers.rs new file mode 100644 index 000000000000..ef5369f8505e --- /dev/null +++ b/helix-term/src/handlers.rs @@ -0,0 +1,30 @@ +use std::sync::Arc; + +use arc_swap::ArcSwap; +use helix_event::AsyncHook; + +use crate::config::Config; +use crate::events; +use crate::handlers::completion::CompletionHandler; +use crate::handlers::signature_help::SignatureHelpHandler; + +pub use completion::trigger_auto_completion; +pub use helix_view::handlers::lsp::SignatureHelpInvoked; +pub use helix_view::handlers::Handlers; + +mod completion; +mod signature_help; + +pub fn setup(config: Arc>) -> Handlers { + events::register(); + + let completions = CompletionHandler::new(config).spawn(); + let signature_hints = SignatureHelpHandler::new().spawn(); + let handlers = Handlers { + completions, + signature_hints, + }; + completion::register_hooks(&handlers); + signature_help::register_hooks(&handlers); + handlers +} diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs new file mode 100644 index 000000000000..d71fd24fc4ea --- /dev/null +++ b/helix-term/src/handlers/completion.rs @@ -0,0 +1,465 @@ +use std::collections::HashSet; +use std::sync::Arc; +use std::time::Duration; + +use arc_swap::ArcSwap; +use futures_util::stream::FuturesUnordered; +use helix_core::chars::char_is_word; +use helix_core::syntax::LanguageServerFeature; +use helix_event::{ + cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, +}; +use helix_lsp::lsp; +use helix_lsp::util::pos_to_lsp_pos; +use helix_stdx::rope::RopeSliceExt; +use helix_view::document::{Mode, SavePoint}; +use helix_view::handlers::lsp::CompletionEvent; +use helix_view::{DocumentId, Editor, ViewId}; +use tokio::sync::mpsc::Sender; +use tokio::time::Instant; +use tokio_stream::StreamExt; + +use crate::commands; +use crate::compositor::Compositor; +use crate::config::Config; +use crate::events::{OnModeSwitch, PostCommand, PostInsertChar}; +use crate::job::{dispatch, dispatch_blocking}; +use crate::keymap::MappableCommand; +use crate::ui::editor::InsertEvent; +use crate::ui::lsp::SignatureHelp; +use crate::ui::{self, CompletionItem, Popup}; + +use super::Handlers; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum TriggerKind { + Auto, + TriggerChar, + Manual, +} + +#[derive(Debug, Clone, Copy)] +struct Trigger { + pos: usize, + view: ViewId, + doc: DocumentId, + kind: TriggerKind, +} + +#[derive(Debug)] +pub(super) struct CompletionHandler { + /// currently active trigger which will cause a + /// completion request after the timeout + trigger: Option, + /// A handle for currently active completion request. + /// This can be used to determine whether the current + /// request is still active (and new triggers should be + /// ignored) and can also be used to abort the current + /// request (by dropping the handle) + request: Option, + config: Arc>, +} + +impl CompletionHandler { + pub fn new(config: Arc>) -> CompletionHandler { + Self { + config, + request: None, + trigger: None, + } + } +} + +impl helix_event::AsyncHook for CompletionHandler { + type Event = CompletionEvent; + + fn handle_event( + &mut self, + event: Self::Event, + _old_timeout: Option, + ) -> Option { + match event { + CompletionEvent::AutoTrigger { + cursor: trigger_pos, + doc, + view, + } => { + // techically it shouldn't be possible to switch views/documents in insert mode + // but people may create weird keymaps/use the mouse so lets be extra careful + if self + .trigger + .as_ref() + .map_or(true, |trigger| trigger.doc != doc || trigger.view != view) + { + self.trigger = Some(Trigger { + pos: trigger_pos, + view, + doc, + kind: TriggerKind::Auto, + }); + } + } + CompletionEvent::TriggerChar { cursor, doc, view } => { + // immediately request completions and drop all auto completion requests + self.request = None; + self.trigger = Some(Trigger { + pos: cursor, + view, + doc, + kind: TriggerKind::TriggerChar, + }); + } + CompletionEvent::ManualTrigger { cursor, doc, view } => { + // immediately request completions and drop all auto completion requests + self.request = None; + self.trigger = Some(Trigger { + pos: cursor, + view, + doc, + kind: TriggerKind::Manual, + }); + // stop debouncing immediately and request the completion + self.finish_debounce(); + return None; + } + CompletionEvent::Cancel => { + self.trigger = None; + self.request = None; + } + CompletionEvent::DeleteText { cursor } => { + // if we deleted the original trigger, abort the completion + if matches!(self.trigger, Some(Trigger{ pos, .. }) if cursor < pos) { + self.trigger = None; + self.request = None; + } + } + } + self.trigger.map(|trigger| { + // if the current request was closed forget about it + // otherwise immediately restart the completion request + let cancel = self.request.take().map_or(false, |req| !req.is_closed()); + let timeout = if trigger.kind == TriggerKind::Auto && !cancel { + self.config.load().editor.completion_timeout + } else { + // we want almost instant completions for trigger chars + // and restarting completion requests. The small timeout here mainly + // serves to better handle cases where the completion handler + // may fall behind (so multiple events in the channel) and macros + Duration::from_millis(5) + }; + Instant::now() + timeout + }) + } + + fn finish_debounce(&mut self) { + let trigger = self.trigger.take().expect("debounce always has a trigger"); + let (tx, rx) = cancelation(); + self.request = Some(tx); + dispatch_blocking(move |editor, compositor| { + request_completion(trigger, rx, editor, compositor) + }); + } +} + +fn request_completion( + mut trigger: Trigger, + cancel: CancelRx, + editor: &mut Editor, + compositor: &mut Compositor, +) { + let (view, doc) = current!(editor); + + if compositor + .find::() + .unwrap() + .completion + .is_some() + || editor.mode != Mode::Insert + { + return; + } + + let text = doc.text(); + let cursor = doc.selection(view.id).primary().cursor(text.slice(..)); + if trigger.view != view.id || trigger.doc != doc.id() || cursor < trigger.pos { + return; + } + // this looks odd... Why are we not using the trigger position from + // the `trigger` here? Won't that mean that the trigger char doesn't get + // send to the LS if we type fast enougn? Yes that is true but it's + // not actually a problem. The LSP will resolve the completion to the identifier + // anyway (in fact sending the later position is necessary to get the right results + // from LSPs that provide incomplete completion list). We rely on trigger offset + // and primary cursor matching for multi-cursor completions so this is definitely + // necessary from our side too. + trigger.pos = cursor; + let trigger_text = text.slice(..cursor); + + let mut seen_language_servers = HashSet::new(); + let mut futures: FuturesUnordered<_> = doc + .language_servers_with_feature(LanguageServerFeature::Completion) + .filter(|ls| seen_language_servers.insert(ls.id())) + .map(|ls| { + let language_server_id = ls.id(); + let offset_encoding = ls.offset_encoding(); + let pos = pos_to_lsp_pos(text, cursor, offset_encoding); + let doc_id = doc.identifier(); + let context = if trigger.kind == TriggerKind::Manual { + lsp::CompletionContext { + trigger_kind: lsp::CompletionTriggerKind::INVOKED, + trigger_character: None, + } + } else { + let trigger_char = + ls.capabilities() + .completion_provider + .as_ref() + .and_then(|provider| { + provider + .trigger_characters + .as_deref()? + .iter() + .find(|&trigger| trigger_text.ends_with(trigger)) + }); + lsp::CompletionContext { + trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER, + trigger_character: trigger_char.cloned(), + } + }; + + let completion_response = ls.completion(doc_id, pos, None, context).unwrap(); + async move { + let json = completion_response.await?; + let response: Option = serde_json::from_value(json)?; + let items = match response { + Some(lsp::CompletionResponse::Array(items)) => items, + // TODO: do something with is_incomplete + Some(lsp::CompletionResponse::List(lsp::CompletionList { + is_incomplete: _is_incomplete, + items, + })) => items, + None => Vec::new(), + } + .into_iter() + .map(|item| CompletionItem { + item, + language_server_id, + resolved: false, + }) + .collect(); + anyhow::Ok(items) + } + }) + .collect(); + + let future = async move { + let mut items = Vec::new(); + while let Some(lsp_items) = futures.next().await { + match lsp_items { + Ok(mut lsp_items) => items.append(&mut lsp_items), + Err(err) => { + log::debug!("completion request failed: {err:?}"); + } + }; + } + items + }; + + let savepoint = doc.savepoint(view); + + let ui = compositor.find::().unwrap(); + ui.last_insert.1.push(InsertEvent::RequestCompletion); + tokio::spawn(async move { + let items = cancelable_future(future, cancel).await.unwrap_or_default(); + if items.is_empty() { + return; + } + dispatch(move |editor, compositor| { + show_completion(editor, compositor, items, trigger, savepoint) + }) + .await + }); +} + +fn show_completion( + editor: &mut Editor, + compositor: &mut Compositor, + items: Vec, + trigger: Trigger, + savepoint: Arc, +) { + let (view, doc) = current_ref!(editor); + // check if the completion request is stale. + // + // Completions are completed asynchronously and therefore the user could + //switch document/view or leave insert mode. In all of thoise cases the + // completion should be discarded + if editor.mode != Mode::Insert || view.id != trigger.view || doc.id() != trigger.doc { + return; + } + + let size = compositor.size(); + let ui = compositor.find::().unwrap(); + if ui.completion.is_some() { + return; + } + + let completion_area = ui.set_completion(editor, savepoint, items, trigger.pos, size); + let signature_help_area = compositor + .find_id::>(SignatureHelp::ID) + .map(|signature_help| signature_help.area(size, editor)); + // Delete the signature help popup if they intersect. + if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b)) { + compositor.remove(SignatureHelp::ID); + } +} + +pub fn trigger_auto_completion( + tx: &Sender, + editor: &Editor, + trigger_char_only: bool, +) { + let config = editor.config.load(); + if !config.auto_completion { + return; + } + let (view, doc): (&helix_view::View, &helix_view::Document) = current_ref!(editor); + let mut text = doc.text().slice(..); + let cursor = doc.selection(view.id).primary().cursor(text); + text = doc.text().slice(..cursor); + + let is_trigger_char = doc + .language_servers_with_feature(LanguageServerFeature::Completion) + .any(|ls| { + matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions { + trigger_characters: Some(triggers), + .. + }) if triggers.iter().any(|trigger| text.ends_with(trigger))) + }); + if is_trigger_char { + send_blocking( + tx, + CompletionEvent::TriggerChar { + cursor, + doc: doc.id(), + view: view.id, + }, + ); + return; + } + + let is_auto_trigger = !trigger_char_only + && doc + .text() + .chars_at(cursor) + .reversed() + .take(config.completion_trigger_len as usize) + .all(char_is_word); + + if is_auto_trigger { + send_blocking( + tx, + CompletionEvent::AutoTrigger { + cursor, + doc: doc.id(), + view: view.id, + }, + ); + } +} + +fn update_completions(cx: &mut commands::Context, c: Option) { + cx.callback.push(Box::new(move |compositor, cx| { + let editor_view = compositor.find::().unwrap(); + if let Some(completion) = &mut editor_view.completion { + completion.update_filter(c); + if completion.is_empty() { + editor_view.clear_completion(cx.editor); + // clearing completions might mean we want to immediately rerequest them (usually + // this occurs if typing a trigger char) + if c.is_some() { + trigger_auto_completion(&cx.editor.handlers.completions, cx.editor, false); + } + } + } + })) +} + +fn clear_completions(cx: &mut commands::Context) { + cx.callback.push(Box::new(|compositor, cx| { + let editor_view = compositor.find::().unwrap(); + editor_view.clear_completion(cx.editor); + })) +} + +fn completion_post_command_hook( + tx: &Sender, + PostCommand { command, cx }: &mut PostCommand<'_, '_>, +) -> anyhow::Result<()> { + if cx.editor.mode == Mode::Insert { + if cx.editor.last_completion.is_some() { + match command { + MappableCommand::Static { + name: "delete_word_forward" | "delete_char_forward" | "completion", + .. + } => (), + MappableCommand::Static { + name: "delete_char_backward", + .. + } => update_completions(cx, None), + _ => clear_completions(cx), + } + } else { + let event = match command { + MappableCommand::Static { + name: "delete_char_backward" | "delete_word_forward" | "delete_char_forward", + .. + } => { + let (view, doc) = current!(cx.editor); + let primary_cursor = doc + .selection(view.id) + .primary() + .cursor(doc.text().slice(..)); + CompletionEvent::DeleteText { + cursor: primary_cursor, + } + } + // hacks: some commands are handeled elsewhere and we don't want to + // cancel in that case + MappableCommand::Static { + name: "completion" | "insert_mode" | "append_mode", + .. + } => return Ok(()), + _ => CompletionEvent::Cancel, + }; + send_blocking(tx, event); + } + } + Ok(()) +} + +pub(super) fn register_hooks(handlers: &Handlers) { + let tx = handlers.completions.clone(); + register_hook!(move |event: &mut PostCommand<'_, '_>| completion_post_command_hook(&tx, event)); + + let tx = handlers.completions.clone(); + register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { + if event.old_mode == Mode::Insert { + send_blocking(&tx, CompletionEvent::Cancel); + clear_completions(event.cx); + } else if event.new_mode == Mode::Insert { + trigger_auto_completion(&tx, event.cx.editor, false) + } + Ok(()) + }); + + let tx = handlers.completions.clone(); + register_hook!(move |event: &mut PostInsertChar<'_, '_>| { + if event.cx.editor.last_completion.is_some() { + update_completions(event.cx, Some(event.c)) + } else { + trigger_auto_completion(&tx, event.cx.editor, false); + } + Ok(()) + }); +} diff --git a/helix-term/src/handlers/signature_help.rs b/helix-term/src/handlers/signature_help.rs new file mode 100644 index 000000000000..3c746548ac8c --- /dev/null +++ b/helix-term/src/handlers/signature_help.rs @@ -0,0 +1,335 @@ +use std::sync::Arc; +use std::time::Duration; + +use helix_core::syntax::LanguageServerFeature; +use helix_event::{ + cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, +}; +use helix_lsp::lsp; +use helix_stdx::rope::RopeSliceExt; +use helix_view::document::Mode; +use helix_view::events::{DocumentDidChange, SelectionDidChange}; +use helix_view::handlers::lsp::{SignatureHelpEvent, SignatureHelpInvoked}; +use helix_view::Editor; +use tokio::sync::mpsc::Sender; +use tokio::time::Instant; + +use crate::commands::Open; +use crate::compositor::Compositor; +use crate::events::{OnModeSwitch, PostInsertChar}; +use crate::handlers::Handlers; +use crate::ui::lsp::SignatureHelp; +use crate::ui::Popup; +use crate::{job, ui}; + +#[derive(Debug)] +enum State { + Open, + Closed, + Pending { request: CancelTx }, +} + +/// debounce timeout in ms, value taken from VSCode +/// TODO: make this configurable? +const TIMEOUT: u64 = 120; + +#[derive(Debug)] +pub(super) struct SignatureHelpHandler { + trigger: Option, + state: State, +} + +impl SignatureHelpHandler { + pub fn new() -> SignatureHelpHandler { + SignatureHelpHandler { + trigger: None, + state: State::Closed, + } + } +} + +impl helix_event::AsyncHook for SignatureHelpHandler { + type Event = SignatureHelpEvent; + + fn handle_event( + &mut self, + event: Self::Event, + timeout: Option, + ) -> Option { + match event { + SignatureHelpEvent::Invoked => { + self.trigger = Some(SignatureHelpInvoked::Manual); + self.state = State::Closed; + self.finish_debounce(); + return None; + } + SignatureHelpEvent::Trigger => {} + SignatureHelpEvent::ReTrigger => { + // don't retrigger if we aren't open/pending yet + if matches!(self.state, State::Closed) { + return timeout; + } + } + SignatureHelpEvent::Cancel => { + self.state = State::Closed; + return None; + } + SignatureHelpEvent::RequestComplete { open } => { + // don't cancel rerequest that was already triggered + if let State::Pending { request } = &self.state { + if !request.is_closed() { + return timeout; + } + } + self.state = if open { State::Open } else { State::Closed }; + return timeout; + } + } + if self.trigger.is_none() { + self.trigger = Some(SignatureHelpInvoked::Automatic) + } + Some(Instant::now() + Duration::from_millis(TIMEOUT)) + } + + fn finish_debounce(&mut self) { + let invocation = self.trigger.take().unwrap(); + let (tx, rx) = cancelation(); + self.state = State::Pending { request: tx }; + job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, rx)) + } +} + +pub fn request_signature_help( + editor: &mut Editor, + invoked: SignatureHelpInvoked, + cancel: CancelRx, +) { + let (view, doc) = current!(editor); + + // TODO merge multiple language server signature help into one instead of just taking the first language server that supports it + let future = doc + .language_servers_with_feature(LanguageServerFeature::SignatureHelp) + .find_map(|language_server| { + let pos = doc.position(view.id, language_server.offset_encoding()); + language_server.text_document_signature_help(doc.identifier(), pos, None) + }); + + let Some(future) = future else { + // Do not show the message if signature help was invoked + // automatically on backspace, trigger characters, etc. + if invoked == SignatureHelpInvoked::Manual { + editor + .set_error("No configured language server supports signature-help"); + } + return; + }; + + tokio::spawn(async move { + match cancelable_future(future, cancel).await { + Some(Ok(res)) => { + job::dispatch(move |editor, compositor| { + show_signature_help(editor, compositor, invoked, res) + }) + .await + } + Some(Err(err)) => log::error!("signature help request failed: {err}"), + None => (), + } + }); +} + +pub fn show_signature_help( + editor: &mut Editor, + compositor: &mut Compositor, + invoked: SignatureHelpInvoked, + response: Option, +) { + let config = &editor.config(); + + if !(config.lsp.auto_signature_help + || SignatureHelp::visible_popup(compositor).is_some() + || invoked == SignatureHelpInvoked::Manual) + { + return; + } + + // If the signature help invocation is automatic, don't show it outside of Insert Mode: + // it very probably means the server was a little slow to respond and the user has + // already moved on to something else, making a signature help popup will just be an + // annoyance, see https://github.com/helix-editor/helix/issues/3112 + // For the most part this should not be needed as the request gets canceled automatically now + // but it's technically possible for the mode change to just preempt this callback so better safe than sorry + if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert { + return; + } + + let response = match response { + // According to the spec the response should be None if there + // are no signatures, but some servers don't follow this. + Some(s) if !s.signatures.is_empty() => s, + _ => { + send_blocking( + &editor.handlers.signature_hints, + SignatureHelpEvent::RequestComplete { open: false }, + ); + compositor.remove(SignatureHelp::ID); + return; + } + }; + send_blocking( + &editor.handlers.signature_hints, + SignatureHelpEvent::RequestComplete { open: true }, + ); + + let doc = doc!(editor); + let language = doc.language_name().unwrap_or(""); + + let signature = match response + .signatures + .get(response.active_signature.unwrap_or(0) as usize) + { + Some(s) => s, + None => return, + }; + let mut contents = SignatureHelp::new( + signature.label.clone(), + language.to_string(), + Arc::clone(&editor.syn_loader), + ); + + let signature_doc = if config.lsp.display_signature_help_docs { + signature.documentation.as_ref().map(|doc| match doc { + lsp::Documentation::String(s) => s.clone(), + lsp::Documentation::MarkupContent(markup) => markup.value.clone(), + }) + } else { + None + }; + + contents.set_signature_doc(signature_doc); + + let active_param_range = || -> Option<(usize, usize)> { + let param_idx = signature + .active_parameter + .or(response.active_parameter) + .unwrap_or(0) as usize; + let param = signature.parameters.as_ref()?.get(param_idx)?; + match ¶m.label { + lsp::ParameterLabel::Simple(string) => { + let start = signature.label.find(string.as_str())?; + Some((start, start + string.len())) + } + lsp::ParameterLabel::LabelOffsets([start, end]) => { + // LS sends offsets based on utf-16 based string representation + // but highlighting in helix is done using byte offset. + use helix_core::str_utils::char_to_byte_idx; + let from = char_to_byte_idx(&signature.label, *start as usize); + let to = char_to_byte_idx(&signature.label, *end as usize); + Some((from, to)) + } + } + }; + contents.set_active_param_range(active_param_range()); + + let old_popup = compositor.find_id::>(SignatureHelp::ID); + let mut popup = Popup::new(SignatureHelp::ID, contents) + .position(old_popup.and_then(|p| p.get_position())) + .position_bias(Open::Above) + .ignore_escape_key(true); + + // Don't create a popup if it intersects the auto-complete menu. + let size = compositor.size(); + if compositor + .find::() + .unwrap() + .completion + .as_mut() + .map(|completion| completion.area(size, editor)) + .filter(|area| area.intersects(popup.area(size, editor))) + .is_some() + { + return; + } + + compositor.replace_or_push(SignatureHelp::ID, popup); +} + +fn signature_help_post_insert_char_hook( + tx: &Sender, + PostInsertChar { cx, .. }: &mut PostInsertChar<'_, '_>, +) -> anyhow::Result<()> { + if !cx.editor.config().lsp.auto_signature_help { + return Ok(()); + } + let (view, doc) = current!(cx.editor); + // TODO support multiple language servers (not just the first that is found), likely by merging UI somehow + let Some(language_server) = doc + .language_servers_with_feature(LanguageServerFeature::SignatureHelp) + .next() + else { + return Ok(()); + }; + + let capabilities = language_server.capabilities(); + + if let lsp::ServerCapabilities { + signature_help_provider: + Some(lsp::SignatureHelpOptions { + trigger_characters: Some(triggers), + // TODO: retrigger_characters + .. + }), + .. + } = capabilities + { + let mut text = doc.text().slice(..); + let cursor = doc.selection(view.id).primary().cursor(text); + text = text.slice(..cursor); + if triggers.iter().any(|trigger| text.ends_with(trigger)) { + send_blocking(tx, SignatureHelpEvent::Trigger) + } + } + Ok(()) +} + +pub(super) fn register_hooks(handlers: &Handlers) { + let tx = handlers.signature_hints.clone(); + register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { + match (event.old_mode, event.new_mode) { + (Mode::Insert, _) => { + send_blocking(&tx, SignatureHelpEvent::Cancel); + event.cx.callback.push(Box::new(|compositor, _| { + compositor.remove(SignatureHelp::ID); + })); + } + (_, Mode::Insert) => { + if event.cx.editor.config().lsp.auto_signature_help { + send_blocking(&tx, SignatureHelpEvent::Trigger); + } + } + _ => (), + } + Ok(()) + }); + + let tx = handlers.signature_hints.clone(); + register_hook!( + move |event: &mut PostInsertChar<'_, '_>| signature_help_post_insert_char_hook(&tx, event) + ); + + let tx = handlers.signature_hints.clone(); + register_hook!(move |event: &mut DocumentDidChange<'_>| { + if event.doc.config.load().lsp.auto_signature_help { + send_blocking(&tx, SignatureHelpEvent::ReTrigger); + } + Ok(()) + }); + + let tx = handlers.signature_hints.clone(); + register_hook!(move |event: &mut SelectionDidChange<'_>| { + if event.doc.config.load().lsp.auto_signature_help { + send_blocking(&tx, SignatureHelpEvent::ReTrigger); + } + Ok(()) + }); +} diff --git a/helix-term/src/health.rs b/helix-term/src/health.rs index dff9031929c3..5f2019265a3e 100644 --- a/helix-term/src/health.rs +++ b/helix-term/src/health.rs @@ -145,7 +145,7 @@ pub fn languages_all() -> std::io::Result<()> { } }; - let mut headings = vec!["Language", "LSP", "DAP"]; + let mut headings = vec!["Language", "LSP", "DAP", "Formatter"]; for feat in TsFeature::all() { headings.push(feat.short_title()) @@ -182,7 +182,7 @@ pub fn languages_all() -> std::io::Result<()> { .sort_unstable_by_key(|l| l.language_id.clone()); let check_binary = |cmd: Option<&str>| match cmd { - Some(cmd) => match which::which(cmd) { + Some(cmd) => match helix_stdx::env::which(cmd) { Ok(_) => column(&format!("✓ {}", cmd), Color::Green), Err(_) => column(&format!("✘ {}", cmd), Color::Red), }, @@ -203,6 +203,12 @@ pub fn languages_all() -> std::io::Result<()> { let dap = lang.debugger.as_ref().map(|dap| dap.command.as_str()); check_binary(dap); + let formatter = lang + .formatter + .as_ref() + .map(|formatter| formatter.command.as_str()); + check_binary(formatter); + for ts_feat in TsFeature::all() { match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() { true => column("✓", Color::Green), @@ -285,6 +291,13 @@ pub fn language(lang_str: String) -> std::io::Result<()> { lang.debugger.as_ref().map(|dap| dap.command.to_string()), )?; + probe_protocol( + "formatter", + lang.formatter + .as_ref() + .map(|formatter| formatter.command.to_string()), + )?; + for ts_feat in TsFeature::all() { probe_treesitter_feature(&lang_str, *ts_feat)? } @@ -309,7 +322,7 @@ fn probe_protocols<'a, I: Iterator + 'a>( writeln!(stdout)?; for cmd in server_cmds { - let (path, icon) = match which::which(cmd) { + let (path, icon) = match helix_stdx::env::which(cmd) { Ok(path) => (path.display().to_string().green(), "✓".green()), Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()), }; @@ -331,7 +344,7 @@ fn probe_protocol(protocol_name: &str, server_cmd: Option) -> std::io::R writeln!(stdout, "Configured {}: {}", protocol_name, cmd_name)?; if let Some(cmd) = server_cmd { - let path = match which::which(&cmd) { + let path = match helix_stdx::env::which(&cmd) { Ok(path) => path.display().to_string().green(), Err(_) => format!("'{}' not found in $PATH", cmd).red(), }; diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs index 19f2521a5231..72ed892ddf9a 100644 --- a/helix-term/src/job.rs +++ b/helix-term/src/job.rs @@ -1,13 +1,37 @@ +use helix_event::status::StatusMessage; +use helix_event::{runtime_local, send_blocking}; use helix_view::Editor; +use once_cell::sync::OnceCell; use crate::compositor::Compositor; use futures_util::future::{BoxFuture, Future, FutureExt}; use futures_util::stream::{FuturesUnordered, StreamExt}; +use tokio::sync::mpsc::{channel, Receiver, Sender}; pub type EditorCompositorCallback = Box; pub type EditorCallback = Box; +runtime_local! { + static JOB_QUEUE: OnceCell> = OnceCell::new(); +} + +pub async fn dispatch_callback(job: Callback) { + let _ = JOB_QUEUE.wait().send(job).await; +} + +pub async fn dispatch(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) { + let _ = JOB_QUEUE + .wait() + .send(Callback::EditorCompositor(Box::new(job))) + .await; +} + +pub fn dispatch_blocking(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) { + let jobs = JOB_QUEUE.wait(); + send_blocking(jobs, Callback::EditorCompositor(Box::new(job))) +} + pub enum Callback { EditorCompositor(EditorCompositorCallback), Editor(EditorCallback), @@ -21,11 +45,11 @@ pub struct Job { pub wait: bool, } -#[derive(Default)] pub struct Jobs { - pub futures: FuturesUnordered, - /// These are the ones that need to complete before we exit. + /// jobs that need to complete before we exit. pub wait_futures: FuturesUnordered, + pub callbacks: Receiver, + pub status_messages: Receiver, } impl Job { @@ -52,8 +76,16 @@ impl Job { } impl Jobs { + #[allow(clippy::new_without_default)] pub fn new() -> Self { - Self::default() + let (tx, rx) = channel(1024); + let _ = JOB_QUEUE.set(tx); + let status_messages = helix_event::status::setup(); + Self { + wait_futures: FuturesUnordered::new(), + callbacks: rx, + status_messages, + } } pub fn spawn> + Send + 'static>(&mut self, f: F) { @@ -85,18 +117,17 @@ impl Jobs { } } - pub async fn next_job(&mut self) -> Option>> { - tokio::select! { - event = self.futures.next() => { event } - event = self.wait_futures.next() => { event } - } - } - pub fn add(&self, j: Job) { if j.wait { self.wait_futures.push(j.future); } else { - self.futures.push(j.future); + tokio::spawn(async move { + match j.future.await { + Ok(Some(cb)) => dispatch_callback(cb).await, + Ok(None) => (), + Err(err) => helix_event::status::report(err).await, + } + }); } } diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index 598be55b5ff4..d9297e08dc34 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -319,7 +319,7 @@ impl Keymaps { self.sticky = None; } - let first = self.state.get(0).unwrap_or(&key); + let first = self.state.first().unwrap_or(&key); let trie_node = match self.sticky { Some(ref trie) => Cow::Owned(KeyTrie::Node(trie.clone())), None => Cow::Borrowed(keymap), diff --git a/helix-term/src/lib.rs b/helix-term/src/lib.rs index 2f6ec12b13fd..b1413ed0d972 100644 --- a/helix-term/src/lib.rs +++ b/helix-term/src/lib.rs @@ -6,13 +6,20 @@ pub mod args; pub mod commands; pub mod compositor; pub mod config; +pub mod events; pub mod health; pub mod job; pub mod keymap; pub mod ui; + use std::path::Path; +use futures_util::Future; +mod handlers; + use ignore::DirEntry; +use url::Url; + pub use keymap::macros::*; #[cfg(not(windows))] @@ -47,3 +54,22 @@ fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> b true } + +/// Opens URL in external program. +fn open_external_url_callback( + url: Url, +) -> impl Future> + Send + 'static { + let commands = open::commands(url.as_str()); + async { + for cmd in commands { + let mut command = tokio::process::Command::new(cmd.get_program()); + command.args(cmd.get_args()); + if command.output().await.is_ok() { + return Ok(job::Callback::Editor(Box::new(|_| {}))); + } + } + Ok(job::Callback::Editor(Box::new(move |editor| { + editor.set_error("Opening URL in external program failed") + }))) + } +} diff --git a/helix-term/src/main.rs b/helix-term/src/main.rs index a62c54a40ef3..132ee796f391 100644 --- a/helix-term/src/main.rs +++ b/helix-term/src/main.rs @@ -118,16 +118,16 @@ FLAGS: // Before setting the working directory, resolve all the paths in args.files for (path, _) in args.files.iter_mut() { - *path = helix_core::path::get_canonicalized_path(path); + *path = helix_stdx::path::canonicalize(&path); } // NOTE: Set the working directory early so the correct configuration is loaded. Be aware that // Application::new() depends on this logic so it must be updated if this changes. if let Some(path) = &args.working_directory { - helix_loader::set_current_working_dir(path)?; + helix_stdx::env::set_current_working_dir(path)?; } else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) { // If the first file is a directory, it will be the working directory unless -w was specified - helix_loader::set_current_working_dir(path)?; + helix_stdx::env::set_current_working_dir(path)?; } let config = match Config::load_default() { diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 7c6a0055ea48..48d97fbd8e23 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -1,8 +1,12 @@ -use crate::compositor::{Component, Context, Event, EventResult}; +use crate::{ + compositor::{Component, Context, Event, EventResult}, + handlers::trigger_auto_completion, +}; use helix_view::{ document::SavePoint, editor::CompleteAction, graphics::Margin, + handlers::lsp::SignatureHelpInvoked, theme::{Modifier, Style}, ViewId, }; @@ -10,7 +14,7 @@ use tui::{buffer::Buffer as Surface, text::Span}; use std::{borrow::Cow, sync::Arc}; -use helix_core::{Change, Transaction}; +use helix_core::{chars, Change, Transaction}; use helix_view::{graphics::Rect, Document, Editor}; use crate::commands; @@ -95,10 +99,9 @@ pub struct CompletionItem { /// Wraps a Menu. pub struct Completion { popup: Popup>, - start_offset: usize, #[allow(dead_code)] trigger_offset: usize, - // TODO: maintain a completioncontext with trigger kind & trigger char + filter: String, } impl Completion { @@ -108,7 +111,6 @@ impl Completion { editor: &Editor, savepoint: Arc, mut items: Vec, - start_offset: usize, trigger_offset: usize, ) -> Self { let preview_completion_insert = editor.config().preview_completion_insert; @@ -246,7 +248,7 @@ impl Completion { // (also without sending the transaction to the LS) *before any further transaction is applied*. // Otherwise incremental sync breaks (since the state of the LS doesn't match the state the transaction // is applied to). - if editor.last_completion.is_none() { + if matches!(editor.last_completion, Some(CompleteAction::Triggered)) { editor.last_completion = Some(CompleteAction::Selected { savepoint: doc.savepoint(view), }) @@ -324,8 +326,18 @@ impl Completion { doc.apply(&transaction, view.id); } } + // we could have just inserted a trigger char (like a `crate::` completion for rust + // so we want to retrigger immediately when accepting a completion. + trigger_auto_completion(&editor.handlers.completions, editor, true); } }; + + // In case the popup was deleted because of an intersection w/ the auto-complete menu. + if event != PromptEvent::Update { + editor + .handlers + .trigger_signature_help(SignatureHelpInvoked::Automatic, editor); + } }); let margin = if editor.menu_border() { @@ -339,14 +351,30 @@ impl Completion { .ignore_escape_key(true) .margin(margin); + let (view, doc) = current_ref!(editor); + let text = doc.text().slice(..); + let cursor = doc.selection(view.id).primary().cursor(text); + let offset = text + .chars_at(cursor) + .reversed() + .take_while(|ch| chars::char_is_word(*ch)) + .count(); + let start_offset = cursor.saturating_sub(offset); + + let fragment = doc.text().slice(start_offset..cursor); let mut completion = Self { popup, - start_offset, trigger_offset, + // TODO: expand nucleo api to allow moving straight to a Utf32String here + // and avoid allocation during matching + filter: String::from(fragment), }; // need to recompute immediately in case start_offset != trigger_offset - completion.recompute_filter(editor); + completion + .popup + .contents_mut() + .score(&completion.filter, false); completion } @@ -366,39 +394,22 @@ impl Completion { } } - pub fn recompute_filter(&mut self, editor: &Editor) { + /// Appends (`c: Some(c)`) or removes (`c: None`) a character to/from the filter + /// this should be called whenever the user types or deletes a character in insert mode. + pub fn update_filter(&mut self, c: Option) { // recompute menu based on matches let menu = self.popup.contents_mut(); - let (view, doc) = current_ref!(editor); - - // cx.hooks() - // cx.add_hook(enum type, ||) - // cx.trigger_hook(enum type, &str, ...) <-- there has to be enough to identify doc/view - // callback with editor & compositor - // - // trigger_hook sends event into channel, that's consumed in the global loop and - // triggers all registered callbacks - // TODO: hooks should get processed immediately so maybe do it after select!(), before - // looping? - - let cursor = doc - .selection(view.id) - .primary() - .cursor(doc.text().slice(..)); - if self.trigger_offset <= cursor { - let fragment = doc.text().slice(self.start_offset..cursor); - let text = Cow::from(fragment); - // TODO: logic is same as ui/picker - menu.score(&text); - } else { - // we backspaced before the start offset, clear the menu - // this will cause the editor to remove the completion popup - menu.clear(); + match c { + Some(c) => self.filter.push(c), + None => { + self.filter.pop(); + if self.filter.is_empty() { + menu.clear(); + return; + } + } } - } - - pub fn update(&mut self, cx: &mut commands::Context) { - self.recompute_filter(cx.editor) + menu.score(&self.filter, c.is_some()); } pub fn is_empty(&self) -> bool { diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index c808be175e0b..fef62a292910 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1,7 +1,7 @@ use crate::{ commands::{self, OnKeyCallback}, compositor::{Component, Context, Event, EventResult}, - job::{self, Callback}, + events::{OnModeSwitch, PostCommand}, key, keymap::{KeymapResult, Keymaps}, ui::{ @@ -33,8 +33,8 @@ use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc}; use tui::{buffer::Buffer as Surface, text::Span}; +use super::document::LineDecoration; use super::{completion::CompletionItem, statusline}; -use super::{document::LineDecoration, lsp::SignatureHelp}; pub struct EditorView { pub keymaps: Keymaps, @@ -386,7 +386,7 @@ impl EditorView { let mut warning_vec = Vec::new(); let mut error_vec = Vec::new(); - for diagnostic in doc.shown_diagnostics() { + for diagnostic in doc.diagnostics() { // Separate diagnostics into different Vecs by severity. let (vec, scope) = match diagnostic.severity { Some(Severity::Info) => (&mut info_vec, info), @@ -684,7 +684,7 @@ impl EditorView { .primary() .cursor(doc.text().slice(..)); - let diagnostics = doc.shown_diagnostics().filter(|diagnostic| { + let diagnostics = doc.diagnostics().iter().filter(|diagnostic| { diagnostic.range.start <= cursor && diagnostic.range.end >= cursor }); @@ -835,35 +835,26 @@ impl EditorView { let mut execute_command = |command: &commands::MappableCommand| { command.execute(cxt); + helix_event::dispatch(PostCommand { command, cx: cxt }); + let current_mode = cxt.editor.mode(); - match (last_mode, current_mode) { - (Mode::Normal, Mode::Insert) => { - // HAXX: if we just entered insert mode from normal, clear key buf - // and record the command that got us into this mode. + if current_mode != last_mode { + helix_event::dispatch(OnModeSwitch { + old_mode: last_mode, + new_mode: current_mode, + cx: cxt, + }); + // HAXX: if we just entered insert mode from normal, clear key buf + // and record the command that got us into this mode. + if current_mode == Mode::Insert { // how we entered insert mode is important, and we should track that so // we can repeat the side effect. self.last_insert.0 = command.clone(); self.last_insert.1.clear(); - - commands::signature_help_impl(cxt, commands::SignatureHelpInvoked::Automatic); } - (Mode::Insert, Mode::Normal) => { - // if exiting insert mode, remove completion - self.clear_completion(cxt.editor); - cxt.editor.completion_request_handle = None; - - // TODO: Use an on_mode_change hook to remove signature help - cxt.jobs.callback(async { - let call: job::Callback = - Callback::EditorCompositor(Box::new(|_editor, compositor| { - compositor.remove(SignatureHelp::ID); - })); - Ok(call) - }); - } - _ => (), } + last_mode = current_mode; }; @@ -991,12 +982,10 @@ impl EditorView { editor: &mut Editor, savepoint: Arc, items: Vec, - start_offset: usize, trigger_offset: usize, size: Rect, ) -> Option { - let mut completion = - Completion::new(editor, savepoint, items, start_offset, trigger_offset); + let mut completion = Completion::new(editor, savepoint, items, trigger_offset); if completion.is_empty() { // skip if we got no completion results @@ -1004,7 +993,7 @@ impl EditorView { } let area = completion.area(size, editor); - editor.last_completion = None; + editor.last_completion = Some(CompleteAction::Triggered); self.last_insert.1.push(InsertEvent::TriggerCompletion); // TODO : propagate required size on resize to completion too @@ -1017,6 +1006,7 @@ impl EditorView { self.completion = None; if let Some(last_completion) = editor.last_completion.take() { match last_completion { + CompleteAction::Triggered => (), CompleteAction::Applied { trigger_offset, changes, @@ -1030,9 +1020,6 @@ impl EditorView { } } } - - // Clear any savepoints - editor.clear_idle_timer(); // don't retrigger } pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { @@ -1046,13 +1033,7 @@ impl EditorView { }; } - if cx.editor.mode != Mode::Insert || !cx.editor.config().auto_completion { - return EventResult::Ignored(None); - } - - crate::commands::insert::idle_completion(cx); - - EventResult::Consumed(None) + EventResult::Ignored(None) } } @@ -1265,7 +1246,7 @@ impl Component for EditorView { editor: context.editor, count: None, register: None, - callback: None, + callback: Vec::new(), on_next_key_callback: None, jobs: context.jobs, }; @@ -1302,8 +1283,6 @@ impl Component for EditorView { cx.editor.status_msg = None; let mode = cx.editor.mode(); - let (view, _) = current!(cx.editor); - let focus = view.id; if let Some(on_next_key) = self.on_next_key.take() { // if there's a command waiting input, do that first @@ -1340,12 +1319,6 @@ impl Component for EditorView { if callback.is_some() { // assume close_fn self.clear_completion(cx.editor); - - // In case the popup was deleted because of an intersection w/ the auto-complete menu. - commands::signature_help_impl( - &mut cx, - commands::SignatureHelpInvoked::Automatic, - ); } } } @@ -1356,14 +1329,6 @@ impl Component for EditorView { // record last_insert key self.last_insert.1.push(InsertEvent::Key(key)); - - // lastly we recalculate completion - if let Some(completion) = &mut self.completion { - completion.update(&mut cx); - if completion.is_empty() { - self.clear_completion(cx.editor); - } - } } } mode => self.command_mode(mode, &mut cx, key), @@ -1377,7 +1342,7 @@ impl Component for EditorView { } // appease borrowck - let callback = cx.callback.take(); + let callbacks = take(&mut cx.callback); // if the command consumed the last view, skip the render. // on the next loop cycle the Application will then terminate. @@ -1385,21 +1350,27 @@ impl Component for EditorView { return EventResult::Ignored(None); } - // if the focused view still exists and wasn't closed - if cx.editor.tree.contains(focus) { - let config = cx.editor.config(); - let mode = cx.editor.mode(); - let view = view_mut!(cx.editor, focus); - let doc = doc_mut!(cx.editor, &view.doc); + let config = cx.editor.config(); + let mode = cx.editor.mode(); + let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, config.scrolloff); + view.ensure_cursor_in_view(doc, config.scrolloff); - // Store a history state if not in insert mode. This also takes care of - // committing changes when leaving insert mode. - if mode != Mode::Insert { - doc.append_changes_to_history(view); - } + // Store a history state if not in insert mode. This also takes care of + // committing changes when leaving insert mode. + if mode != Mode::Insert { + doc.append_changes_to_history(view); } + let callback = if callbacks.is_empty() { + None + } else { + let callback: crate::compositor::Callback = Box::new(move |compositor, cx| { + for callback in callbacks { + callback(compositor, cx) + } + }); + Some(callback) + }; EventResult::Consumed(callback) } diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 4d0c0d4a5aa6..5cf530ad812e 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -6,7 +6,7 @@ use tui::{ use std::sync::Arc; -use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Options, Parser, Tag}; +use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Options, Parser, Tag, TagEnd}; use helix_core::{ syntax::{self, HighlightEvent, InjectionLanguageMarker, Syntax}, @@ -209,7 +209,7 @@ impl Markdown { list_stack.push(list); } - Event::End(Tag::List(_)) => { + Event::End(TagEnd::List(_)) => { list_stack.pop(); // whenever top-level list closes, empty line @@ -249,7 +249,10 @@ impl Markdown { Event::End(tag) => { tags.pop(); match tag { - Tag::Heading(_, _, _) | Tag::Paragraph | Tag::CodeBlock(_) | Tag::Item => { + TagEnd::Heading(_) + | TagEnd::Paragraph + | TagEnd::CodeBlock + | TagEnd::Item => { push_line(&mut spans, &mut lines); } _ => (), @@ -257,7 +260,7 @@ impl Markdown { // whenever heading, code block or paragraph closes, empty line match tag { - Tag::Heading(_, _, _) | Tag::Paragraph | Tag::CodeBlock(_) => { + TagEnd::Heading(_) | TagEnd::Paragraph | TagEnd::CodeBlock => { lines.push(Spans::default()); } _ => (), @@ -279,7 +282,7 @@ impl Markdown { lines.extend(tui_text.lines.into_iter()); } else { let style = match tags.last() { - Some(Tag::Heading(level, ..)) => match level { + Some(Tag::Heading { level, .. }) => match level { HeadingLevel::H1 => heading_styles[0], HeadingLevel::H2 => heading_styles[1], HeadingLevel::H3 => heading_styles[2], diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs index 0ee64ce9e031..c0e60b33e344 100644 --- a/helix-term/src/ui/menu.rs +++ b/helix-term/src/ui/menu.rs @@ -96,20 +96,34 @@ impl Menu { } } - pub fn score(&mut self, pattern: &str) { - // reuse the matches allocation - self.matches.clear(); + pub fn score(&mut self, pattern: &str, incremental: bool) { let mut matcher = MATCHER.lock(); matcher.config = Config::DEFAULT; let pattern = Atom::new(pattern, CaseMatching::Ignore, AtomKind::Fuzzy, false); let mut buf = Vec::new(); - let matches = self.options.iter().enumerate().filter_map(|(i, option)| { - let text = option.filter_text(&self.editor_data); - pattern - .score(Utf32Str::new(&text, &mut buf), &mut matcher) - .map(|score| (i as u32, score as u32)) - }); - self.matches.extend(matches); + if incremental { + self.matches.retain_mut(|(index, score)| { + let option = &self.options[*index as usize]; + let text = option.filter_text(&self.editor_data); + let new_score = pattern.score(Utf32Str::new(&text, &mut buf), &mut matcher); + match new_score { + Some(new_score) => { + *score = new_score as u32; + true + } + None => false, + } + }) + } else { + self.matches.clear(); + let matches = self.options.iter().enumerate().filter_map(|(i, option)| { + let text = option.filter_text(&self.editor_data); + pattern + .score(Utf32Str::new(&text, &mut buf), &mut matcher) + .map(|score| (i as u32, score as u32)) + }); + self.matches.extend(matches); + } self.matches .sort_unstable_by_key(|&(i, score)| (Reverse(score), i)); @@ -413,6 +427,7 @@ impl Component for Menu { cell.set_fg(scroll_style.fg.unwrap_or(helix_view::theme::Color::Reset)); } else if !render_borders { // Draw scroll track + cell.set_symbol(half_block); cell.set_fg(scroll_style.bg.unwrap_or(helix_view::theme::Color::Reset)); } } diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 660bbfea363d..efa2473e01ed 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -409,7 +409,7 @@ pub mod completers { use std::path::Path; let is_tilde = input == "~"; - let path = helix_core::path::expand_tilde(Path::new(input)); + let path = helix_stdx::path::expand_tilde(Path::new(input)); let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) { (path, None) @@ -430,7 +430,7 @@ pub mod completers { match path.parent() { Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), // Path::new("h")'s parent is Some("")... - _ => helix_loader::current_working_dir(), + _ => helix_stdx::env::current_working_dir(), } }; diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 9ba45335777f..4be5a11ef647 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -63,7 +63,7 @@ impl PathOrId { fn get_canonicalized(self) -> Self { use PathOrId::*; match self { - Path(path) => Path(helix_core::path::get_canonicalized_path(&path)), + Path(path) => Path(helix_stdx::path::canonicalize(path)), Id(id) => Id(id), } } @@ -480,8 +480,7 @@ impl Picker { .find::>>() .map(|overlay| &mut overlay.content.file_picker), }; - let Some(picker) = picker - else { + let Some(picker) = picker else { log::info!("picker closed before syntax highlighting finished"); return; }; @@ -489,7 +488,15 @@ impl Picker { let doc = match current_file { PathOrId::Id(doc_id) => doc_mut!(editor, &doc_id), PathOrId::Path(path) => match picker.preview_cache.get_mut(&path) { - Some(CachedPreview::Document(ref mut doc)) => doc, + Some(CachedPreview::Document(ref mut doc)) => { + let diagnostics = Editor::doc_diagnostics( + &editor.language_servers, + &editor.diagnostics, + doc, + ); + doc.replace_diagnostics(diagnostics, &[], None); + doc + } _ => return, }, }; diff --git a/helix-term/src/ui/popup.rs b/helix-term/src/ui/popup.rs index 7a6ffe9dd6d2..b38b8b6e3fbc 100644 --- a/helix-term/src/ui/popup.rs +++ b/helix-term/src/ui/popup.rs @@ -303,6 +303,7 @@ impl Component for Popup { cell.set_fg(scroll_style.fg.unwrap_or(helix_view::theme::Color::Reset)); } else if !render_borders { // Draw scroll track + cell.set_symbol(half_block); cell.set_fg(scroll_style.bg.unwrap_or(helix_view::theme::Color::Reset)); } } diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs index 702a6e6714ad..3764bba60c64 100644 --- a/helix-term/src/ui/prompt.rs +++ b/helix-term/src/ui/prompt.rs @@ -393,7 +393,7 @@ impl Prompt { height, ); - if !self.completion.is_empty() { + if completion_area.height > 0 && !self.completion.is_empty() { let area = completion_area; let background = theme.get("ui.menu"); diff --git a/helix-term/src/ui/spinner.rs b/helix-term/src/ui/spinner.rs index 68965469d32e..379c4489f318 100644 --- a/helix-term/src/ui/spinner.rs +++ b/helix-term/src/ui/spinner.rs @@ -11,7 +11,7 @@ impl ProgressSpinners { } pub fn get_or_create(&mut self, id: usize) -> &mut Spinner { - self.inner.entry(id).or_insert_with(Spinner::default) + self.inner.entry(id).or_default() } } diff --git a/helix-term/src/ui/statusline.rs b/helix-term/src/ui/statusline.rs index 52dd49f9e212..9871828ee3d0 100644 --- a/helix-term/src/ui/statusline.rs +++ b/helix-term/src/ui/statusline.rs @@ -227,7 +227,8 @@ where { let (warnings, errors) = context .doc - .shown_diagnostics() + .diagnostics() + .iter() .fold((0, 0), |mut counts, diag| { use helix_core::diagnostic::Severity; match diag.severity { diff --git a/helix-term/tests/test/commands/write.rs b/helix-term/tests/test/commands/write.rs index 376ba5e7b1e7..adc721c5f1d0 100644 --- a/helix-term/tests/test/commands/write.rs +++ b/helix-term/tests/test/commands/write.rs @@ -3,7 +3,8 @@ use std::{ ops::RangeInclusive, }; -use helix_core::{diagnostic::Severity, path::get_normalized_path}; +use helix_core::diagnostic::Severity; +use helix_stdx::path; use helix_view::doc; use super::*; @@ -23,7 +24,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> { assert_eq!(1, docs.len()); let doc = docs.pop().unwrap(); - assert_eq!(Some(&get_normalized_path(file.path())), doc.path()); + assert_eq!(Some(&path::normalize(file.path())), doc.path()); assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1); }), false, @@ -269,7 +270,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> { assert_eq!(1, docs.len()); let doc = docs.pop().unwrap(); - assert_eq!(Some(&get_normalized_path(file.path())), doc.path()); + assert_eq!(Some(&path::normalize(file.path())), doc.path()); }), false, ) @@ -341,7 +342,7 @@ async fn test_write_new_path() -> anyhow::Result<()> { Some(&|app| { let doc = doc!(app.editor); assert!(!app.editor.is_err()); - assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap()); + assert_eq!(&path::normalize(file1.path()), doc.path().unwrap()); }), ), ( @@ -349,7 +350,7 @@ async fn test_write_new_path() -> anyhow::Result<()> { Some(&|app| { let doc = doc!(app.editor); assert!(!app.editor.is_err()); - assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap()); + assert_eq!(&path::normalize(file2.path()), doc.path().unwrap()); assert!(app.editor.document_by_path(file1.path()).is_none()); }), ), diff --git a/helix-term/tests/test/splits.rs b/helix-term/tests/test/splits.rs index f010c86ba4ab..3b66c0486421 100644 --- a/helix-term/tests/test/splits.rs +++ b/helix-term/tests/test/splits.rs @@ -1,6 +1,6 @@ use super::*; -use helix_core::path::get_normalized_path; +use helix_stdx::path; #[tokio::test(flavor = "multi_thread")] async fn test_split_write_quit_all() -> anyhow::Result<()> { @@ -27,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> { let doc1 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file1.path())) .unwrap(); assert_eq!("hello1", doc1.text().to_string()); let doc2 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file2.path())) .unwrap(); assert_eq!("hello2", doc2.text().to_string()); let doc3 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file3.path())) .unwrap(); assert_eq!("hello3", doc3.text().to_string()); diff --git a/helix-tui/src/backend/crossterm.rs b/helix-tui/src/backend/crossterm.rs index c55ab6bbd0bd..88e70f32e684 100644 --- a/helix-tui/src/backend/crossterm.rs +++ b/helix-tui/src/backend/crossterm.rs @@ -79,6 +79,7 @@ pub struct CrosstermBackend { capabilities: Capabilities, supports_keyboard_enhancement_protocol: OnceCell, mouse_capture_enabled: bool, + supports_bracketed_paste: bool, } impl CrosstermBackend @@ -91,6 +92,7 @@ where capabilities: Capabilities::from_env_or_default(config), supports_keyboard_enhancement_protocol: OnceCell::new(), mouse_capture_enabled: false, + supports_bracketed_paste: true, } } @@ -134,9 +136,16 @@ where execute!( self.buffer, terminal::EnterAlternateScreen, - EnableBracketedPaste, EnableFocusChange )?; + match execute!(self.buffer, EnableBracketedPaste,) { + Err(err) if err.kind() == io::ErrorKind::Unsupported => { + log::warn!("Bracketed paste is not supported on this terminal."); + self.supports_bracketed_paste = false; + } + Err(err) => return Err(err), + Ok(_) => (), + }; execute!(self.buffer, terminal::Clear(terminal::ClearType::All))?; if config.enable_mouse_capture { execute!(self.buffer, EnableMouseCapture)?; @@ -177,9 +186,11 @@ where if self.supports_keyboard_enhancement_protocol() { execute!(self.buffer, PopKeyboardEnhancementFlags)?; } + if self.supports_bracketed_paste { + execute!(self.buffer, DisableBracketedPaste,)?; + } execute!( self.buffer, - DisableBracketedPaste, DisableFocusChange, terminal::LeaveAlternateScreen )?; @@ -195,12 +206,8 @@ where // disable without calling enable previously let _ = execute!(stdout, DisableMouseCapture); let _ = execute!(stdout, PopKeyboardEnhancementFlags); - execute!( - stdout, - DisableBracketedPaste, - DisableFocusChange, - terminal::LeaveAlternateScreen - )?; + let _ = execute!(stdout, DisableBracketedPaste); + execute!(stdout, DisableFocusChange, terminal::LeaveAlternateScreen)?; terminal::disable_raw_mode() } diff --git a/helix-vcs/Cargo.toml b/helix-vcs/Cargo.toml index dae4c0237ef0..6aa50dcf752e 100644 --- a/helix-vcs/Cargo.toml +++ b/helix-vcs/Cargo.toml @@ -19,7 +19,7 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p parking_lot = "0.12" arc-swap = { version = "1.6.0" } -gix = { version = "0.57.0", default-features = false , optional = true } +gix = { version = "0.58.0", features = ["attributes"], default-features = false, optional = true } imara-diff = "0.1.5" anyhow = "1" @@ -29,4 +29,4 @@ log = "0.4" git = ["gix"] [dev-dependencies] -tempfile = "3.8" +tempfile = "3.9" diff --git a/helix-vcs/src/git.rs b/helix-vcs/src/git.rs index e4d45301a756..995bade06e0d 100644 --- a/helix-vcs/src/git.rs +++ b/helix-vcs/src/git.rs @@ -1,5 +1,7 @@ use anyhow::{bail, Context, Result}; use arc_swap::ArcSwap; +use gix::filter::plumbing::driver::apply::Delay; +use std::io::Read; use std::path::Path; use std::sync::Arc; @@ -76,29 +78,21 @@ impl DiffProvider for Git { let file_oid = find_file_in_commit(&repo, &head, file)?; let file_object = repo.find_object(file_oid)?; - let mut data = file_object.detach().data; - // convert LF to CRLF if configured to avoid showing every line as changed - if repo - .config_snapshot() - .boolean("core.autocrlf") - .unwrap_or(false) - { - let mut normalized_file = Vec::with_capacity(data.len()); - let mut at_cr = false; - for &byte in &data { - if byte == b'\n' { - // if this is a LF instead of a CRLF (last byte was not a CR) - // insert a new CR to generate a CRLF - if !at_cr { - normalized_file.push(b'\r'); - } - } - at_cr = byte == b'\r'; - normalized_file.push(byte) - } - data = normalized_file + let data = file_object.detach().data; + // Get the actual data that git would make out of the git object. + // This will apply the user's git config or attributes like crlf conversions. + if let Some(work_dir) = repo.work_dir() { + let rela_path = file.strip_prefix(work_dir)?; + let rela_path = gix::path::try_into_bstr(rela_path)?; + let (mut pipeline, _) = repo.filter_pipeline(None)?; + let mut worktree_outcome = + pipeline.convert_to_worktree(&data, rela_path.as_ref(), Delay::Forbid)?; + let mut buf = Vec::with_capacity(data.len()); + worktree_outcome.read_to_end(&mut buf)?; + Ok(buf) + } else { + Ok(data) } - Ok(data) } fn get_current_head_name(&self, file: &Path) -> Result>>> { diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index db53b54cc907..2e689341452a 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -15,6 +15,7 @@ default = [] term = ["crossterm"] [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-event = { path = "../helix-event" } helix-loader = { path = "../helix-loader" } @@ -45,7 +46,6 @@ serde_json = "1.0" toml = "0.7" log = "~0.4" -which = "5.0.0" parking_lot = "0.12.1" diff --git a/helix-view/src/clipboard.rs b/helix-view/src/clipboard.rs index 812c803e99a3..9ff2fd788126 100644 --- a/helix-view/src/clipboard.rs +++ b/helix-view/src/clipboard.rs @@ -73,7 +73,7 @@ pub fn get_clipboard_provider() -> Box { #[cfg(target_os = "macos")] pub fn get_clipboard_provider() -> Box { - use crate::env::{binary_exists, env_var_is_set}; + use helix_stdx::env::{binary_exists, env_var_is_set}; if env_var_is_set("TMUX") && binary_exists("tmux") { command_provider! { @@ -98,7 +98,7 @@ pub fn get_clipboard_provider() -> Box { #[cfg(not(any(windows, target_os = "wasm32", target_os = "macos")))] pub fn get_clipboard_provider() -> Box { - use crate::env::{binary_exists, env_var_is_set}; + use helix_stdx::env::{binary_exists, env_var_is_set}; use provider::command::is_exit_success; // TODO: support for user-defined provider, probably when we have plugin support by setting a // variable? diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index af950a3fc283..33137c6c94fb 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -4,10 +4,12 @@ use arc_swap::ArcSwap; use futures_util::future::BoxFuture; use futures_util::FutureExt; use helix_core::auto_pairs::AutoPairs; +use helix_core::chars::char_is_word; use helix_core::doc_formatter::TextFormat; use helix_core::encoding::Encoding; use helix_core::syntax::{Highlight, LanguageServerFeature}; use helix_core::text_annotations::{InlineAnnotation, TextAnnotations}; +use helix_lsp::util::lsp_pos_to_pos; use helix_vcs::{DiffHandle, DiffProviderRegistry}; use ::parking_lot::Mutex; @@ -34,6 +36,7 @@ use helix_core::{ }; use crate::editor::Config; +use crate::events::{DocumentDidChange, SelectionDidChange}; use crate::{DocumentId, Editor, Theme, View, ViewId}; /// 8kB of buffer space for encoding and decoding `Rope`s. @@ -112,19 +115,6 @@ pub struct SavePoint { /// The view this savepoint is associated with pub view: ViewId, revert: Mutex, - pub text: Rope, -} - -impl SavePoint { - pub fn cursor(&self) -> usize { - // we always create transactions with selections - self.revert - .lock() - .selection() - .unwrap() - .primary() - .cursor(self.text.slice(..)) - } } pub struct Document { @@ -736,7 +726,12 @@ impl Document { if let Some((fmt_cmd, fmt_args)) = self .language_config() .and_then(|c| c.formatter.as_ref()) - .and_then(|formatter| Some((which::which(&formatter.command).ok()?, &formatter.args))) + .and_then(|formatter| { + Some(( + helix_stdx::env::which(&formatter.command).ok()?, + &formatter.args, + )) + }) { use std::process::Stdio; let text = self.text().clone(); @@ -853,7 +848,7 @@ impl Document { let text = self.text().clone(); let path = match path { - Some(path) => helix_core::path::get_canonicalized_path(&path), + Some(path) => helix_stdx::path::canonicalize(path), None => { if self.path.is_none() { bail!("Can't save with no path set!"); @@ -1046,8 +1041,11 @@ impl Document { self.encoding } + /// sets the document path without sending events to various + /// observers (like LSP), in most cases `Editor::set_doc_path` + /// should be used instead pub fn set_path(&mut self, path: Option<&Path>) { - let path = path.map(helix_core::path::get_canonicalized_path); + let path = path.map(helix_stdx::path::canonicalize); // if parent doesn't exist we still want to open the document // and error out when document is saved @@ -1075,14 +1073,6 @@ impl Document { }; } - /// Set the programming language for the file if you know the name (scope) but don't have the - /// [`syntax::LanguageConfiguration`] for it. - pub fn set_language2(&mut self, scope: &str, config_loader: Arc) { - let language_config = config_loader.language_config_for_scope(scope); - - self.set_language(language_config, Some(config_loader)); - } - /// Set the programming language for the file if you know the language but don't have the /// [`syntax::LanguageConfiguration`] for it. pub fn set_language_by_language_id( @@ -1102,6 +1092,10 @@ impl Document { // TODO: use a transaction? self.selections .insert(view_id, selection.ensure_invariants(self.text().slice(..))); + helix_event::dispatch(SelectionDidChange { + doc: self, + view: view_id, + }) } /// Find the origin selection of the text in a document, i.e. where @@ -1155,6 +1149,14 @@ impl Document { let success = transaction.changes().apply(&mut self.text); if success { + if emit_lsp_notification { + helix_event::dispatch(DocumentDidChange { + doc: self, + view: view_id, + old_text: &old_doc, + }); + } + for selection in self.selections.values_mut() { *selection = selection .clone() @@ -1170,6 +1172,10 @@ impl Document { view_id, selection.clone().ensure_invariants(self.text.slice(..)), ); + helix_event::dispatch(SelectionDidChange { + doc: self, + view: view_id, + }); } self.modified_since_accessed = true; @@ -1222,18 +1228,23 @@ impl Document { }; (&mut diagnostic.range.start, assoc) })); - changes.update_positions(self.diagnostics.iter_mut().map(|diagnostic| { + changes.update_positions(self.diagnostics.iter_mut().filter_map(|diagnostic| { + if diagnostic.zero_width { + // for zero width diagnostics treat the diagnostic as a point + // rather than a range + return None; + } let assoc = if diagnostic.ends_at_word { Assoc::AfterWord } else { Assoc::Before }; - (&mut diagnostic.range.end, assoc) + Some((&mut diagnostic.range.end, assoc)) })); self.diagnostics.retain_mut(|diagnostic| { - if diagnostic.range.start > diagnostic.range.end - || (!diagnostic.zero_width && diagnostic.range.start == diagnostic.range.end) - { + if diagnostic.zero_width { + diagnostic.range.end = diagnostic.range.start + } else if diagnostic.range.start >= diagnostic.range.end { return false; } diagnostic.line = self.text.char_to_line(diagnostic.range.start); @@ -1277,6 +1288,7 @@ impl Document { } if emit_lsp_notification { + // TODO: move to hook // emit lsp notification for language_server in self.language_servers() { let notify = language_server.text_document_did_change( @@ -1387,7 +1399,6 @@ impl Document { let savepoint = Arc::new(SavePoint { view: view.id, revert: Mutex::new(revert), - text: self.text.clone(), }); self.savepoints.push(Arc::downgrade(&savepoint)); savepoint @@ -1673,7 +1684,7 @@ impl Document { pub fn relative_path(&self) -> Option { self.path .as_deref() - .map(helix_core::path::get_relative_path) + .map(helix_stdx::path::get_relative_path) } pub fn display_name(&self) -> Cow<'static, str> { @@ -1709,29 +1720,107 @@ impl Document { ) } + pub fn lsp_diagnostic_to_diagnostic( + text: &Rope, + language_config: Option<&LanguageConfiguration>, + diagnostic: &helix_lsp::lsp::Diagnostic, + language_server_id: usize, + offset_encoding: helix_lsp::OffsetEncoding, + ) -> Option { + use helix_core::diagnostic::{Range, Severity::*}; + + // TODO: convert inside server + let start = + if let Some(start) = lsp_pos_to_pos(text, diagnostic.range.start, offset_encoding) { + start + } else { + log::warn!("lsp position out of bounds - {:?}", diagnostic); + return None; + }; + + let end = if let Some(end) = lsp_pos_to_pos(text, diagnostic.range.end, offset_encoding) { + end + } else { + log::warn!("lsp position out of bounds - {:?}", diagnostic); + return None; + }; + + let severity = diagnostic.severity.map(|severity| match severity { + lsp::DiagnosticSeverity::ERROR => Error, + lsp::DiagnosticSeverity::WARNING => Warning, + lsp::DiagnosticSeverity::INFORMATION => Info, + lsp::DiagnosticSeverity::HINT => Hint, + severity => unreachable!("unrecognized diagnostic severity: {:?}", severity), + }); + + if let Some(lang_conf) = language_config { + if let Some(severity) = severity { + if severity < lang_conf.diagnostic_severity { + return None; + } + } + }; + use helix_core::diagnostic::{DiagnosticTag, NumberOrString}; + + let code = match diagnostic.code.clone() { + Some(x) => match x { + lsp::NumberOrString::Number(x) => Some(NumberOrString::Number(x)), + lsp::NumberOrString::String(x) => Some(NumberOrString::String(x)), + }, + None => None, + }; + + let tags = if let Some(tags) = &diagnostic.tags { + let new_tags = tags + .iter() + .filter_map(|tag| match *tag { + lsp::DiagnosticTag::DEPRECATED => Some(DiagnosticTag::Deprecated), + lsp::DiagnosticTag::UNNECESSARY => Some(DiagnosticTag::Unnecessary), + _ => None, + }) + .collect(); + + new_tags + } else { + Vec::new() + }; + + let ends_at_word = + start != end && end != 0 && text.get_char(end - 1).map_or(false, char_is_word); + let starts_at_word = start != end && text.get_char(start).map_or(false, char_is_word); + + Some(Diagnostic { + range: Range { start, end }, + ends_at_word, + starts_at_word, + zero_width: start == end, + line: diagnostic.range.start.line as usize, + message: diagnostic.message.clone(), + severity, + code, + tags, + source: diagnostic.source.clone(), + data: diagnostic.data.clone(), + language_server_id, + }) + } + #[inline] pub fn diagnostics(&self) -> &[Diagnostic] { &self.diagnostics } - pub fn shown_diagnostics(&self) -> impl Iterator + DoubleEndedIterator { - self.diagnostics.iter().filter(|d| { - self.language_servers_with_feature(LanguageServerFeature::Diagnostics) - .any(|ls| ls.id() == d.language_server_id) - }) - } - pub fn replace_diagnostics( &mut self, diagnostics: impl IntoIterator, unchanged_sources: &[String], - language_server_id: usize, + language_server_id: Option, ) { if unchanged_sources.is_empty() { self.clear_diagnostics(language_server_id); } else { self.diagnostics.retain(|d| { - if d.language_server_id != language_server_id { + if language_server_id.map_or(false, |id| id != d.language_server_id) { return true; } @@ -1752,9 +1841,13 @@ impl Document { }); } - pub fn clear_diagnostics(&mut self, language_server_id: usize) { - self.diagnostics - .retain(|d| d.language_server_id != language_server_id); + /// clears diagnostics for a given language server id if set, otherwise all diagnostics are cleared + pub fn clear_diagnostics(&mut self, language_server_id: Option) { + if let Some(id) = language_server_id { + self.diagnostics.retain(|d| d.language_server_id != id); + } else { + self.diagnostics.clear(); + } } /// Get the document's auto pairs. If the document has a recognized diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 76429a876fc3..0fa6d67c9876 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -2,6 +2,7 @@ use crate::{ align_view, document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode, SavePoint}, graphics::{CursorKind, Rect}, + handlers::Handlers, info::Info, input::KeyEvent, register::Registers, @@ -22,7 +23,8 @@ use std::{ borrow::Cow, cell::Cell, collections::{BTreeMap, HashMap}, - io::stdin, + fs, + io::{self, stdin}, num::NonZeroUsize, path::{Path, PathBuf}, pin::Pin, @@ -30,10 +32,7 @@ use std::{ }; use tokio::{ - sync::{ - mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, - oneshot, - }, + sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, time::{sleep, Duration, Instant, Sleep}, }; @@ -42,11 +41,12 @@ use anyhow::{anyhow, bail, Error}; pub use helix_core::diagnostic::Severity; use helix_core::{ auto_pairs::AutoPairs, - syntax::{self, AutoPairConfig, IndentationHeuristic, SoftWrap}, + syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap}, Change, LineEnding, Position, Selection, NATIVE_LINE_ENDING, }; use helix_dap as dap; use helix_lsp::lsp; +use helix_stdx::path::canonicalize; use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer}; @@ -243,12 +243,19 @@ pub struct Config { /// Set a global text_width pub text_width: usize, /// Time in milliseconds since last keypress before idle timers trigger. - /// Used for autocompletion, set to 0 for instant. Defaults to 250ms. + /// Used for various UI timeouts. Defaults to 250ms. #[serde( serialize_with = "serialize_duration_millis", deserialize_with = "deserialize_duration_millis" )] pub idle_timeout: Duration, + /// Time in milliseconds after typing a word character before auto completions + /// are shown, set to 5 for instant. Defaults to 250ms. + #[serde( + serialize_with = "serialize_duration_millis", + deserialize_with = "deserialize_duration_millis" + )] + pub completion_timeout: Duration, /// Whether to insert the completion suggestion on hover. Defaults to true. pub preview_completion_insert: bool, pub completion_trigger_len: u8, @@ -324,7 +331,7 @@ pub struct TerminalConfig { #[cfg(windows)] pub fn get_terminal_provider() -> Option { - use crate::env::binary_exists; + use helix_stdx::env::binary_exists; if binary_exists("wt") { return Some(TerminalConfig { @@ -347,7 +354,7 @@ pub fn get_terminal_provider() -> Option { #[cfg(not(any(windows, target_os = "wasm32")))] pub fn get_terminal_provider() -> Option { - use crate::env::{binary_exists, env_var_is_set}; + use helix_stdx::env::{binary_exists, env_var_is_set}; if env_var_is_set("TMUX") && binary_exists("tmux") { return Some(TerminalConfig { @@ -828,6 +835,7 @@ impl Default for Config { auto_format: true, auto_save: false, idle_timeout: Duration::from_millis(250), + completion_timeout: Duration::from_millis(250), preview_completion_insert: true, completion_trigger_len: 2, auto_info: true, @@ -952,14 +960,7 @@ pub struct Editor { /// avoid calculating the cursor position multiple /// times during rendering and should not be set by other functions. pub cursor_cache: Cell>>, - /// When a new completion request is sent to the server old - /// unfinished request must be dropped. Each completion - /// request is associated with a channel that cancels - /// when the channel is dropped. That channel is stored - /// here. When a new completion request is sent this - /// field is set and any old requests are automatically - /// canceled as a result - pub completion_request_handle: Option>, + pub handlers: Handlers, } pub type Motion = Box; @@ -987,13 +988,16 @@ enum ThemeAction { #[derive(Debug, Clone)] pub enum CompleteAction { + Triggered, + /// A savepoint of the currently selected completion. The savepoint + /// MUST be restored before sending any event to the LSP + Selected { + savepoint: Arc, + }, Applied { trigger_offset: usize, changes: Vec, }, - /// A savepoint of the currently selected completion. The savepoint - /// MUST be restored before sending any event to the LSP - Selected { savepoint: Arc }, } #[derive(Debug, Copy, Clone)] @@ -1027,6 +1031,7 @@ impl Editor { theme_loader: Arc, syn_loader: Arc, config: Arc>, + handlers: Handlers, ) -> Self { let language_servers = helix_lsp::Registry::new(syn_loader.clone()); let conf = config.load(); @@ -1071,7 +1076,7 @@ impl Editor { config_events: unbounded_channel(), needs_redraw: false, cursor_cache: Cell::new(None), - completion_request_handle: None, + handlers, } } @@ -1144,7 +1149,7 @@ impl Editor { #[inline] pub fn set_error>>(&mut self, error: T) { let error = error.into(); - log::error!("editor error: {}", error); + log::debug!("editor error: {}", error); self.status_msg = Some((error, Severity::Error)); } @@ -1212,6 +1217,90 @@ impl Editor { self.launch_language_servers(doc_id) } + /// moves/renames a path, invoking any event handlers (currently only lsp) + /// and calling `set_doc_path` if the file is open in the editor + pub fn move_path(&mut self, old_path: &Path, new_path: &Path) -> io::Result<()> { + let new_path = canonicalize(new_path); + // sanity check + if old_path == new_path { + return Ok(()); + } + let is_dir = old_path.is_dir(); + let language_servers: Vec<_> = self + .language_servers + .iter_clients() + .filter(|client| client.is_initialized()) + .cloned() + .collect(); + for language_server in language_servers { + let Some(request) = language_server.will_rename(old_path, &new_path, is_dir) else { + continue; + }; + let edit = match helix_lsp::block_on(request) { + Ok(edit) => edit, + Err(err) => { + log::error!("invalid willRename response: {err:?}"); + continue; + } + }; + if let Err(err) = self.apply_workspace_edit(language_server.offset_encoding(), &edit) { + log::error!("failed to apply workspace edit: {err:?}") + } + } + fs::rename(old_path, &new_path)?; + if let Some(doc) = self.document_by_path(old_path) { + self.set_doc_path(doc.id(), &new_path); + } + let is_dir = new_path.is_dir(); + for ls in self.language_servers.iter_clients() { + if let Some(notification) = ls.did_rename(old_path, &new_path, is_dir) { + tokio::spawn(notification); + }; + } + self.language_servers + .file_event_handler + .file_changed(old_path.to_owned()); + self.language_servers + .file_event_handler + .file_changed(new_path); + Ok(()) + } + + pub fn set_doc_path(&mut self, doc_id: DocumentId, path: &Path) { + let doc = doc_mut!(self, &doc_id); + let old_path = doc.path(); + + if let Some(old_path) = old_path { + // sanity check, should not occur but some callers (like an LSP) may + // create bogus calls + if old_path == path { + return; + } + // if we are open in LSPs send did_close notification + for language_server in doc.language_servers() { + tokio::spawn(language_server.text_document_did_close(doc.identifier())); + } + } + // we need to clear the list of language servers here so that + // refresh_doc_language/refresh_language_servers doesn't resend + // text_document_did_close. Since we called `text_document_did_close` + // we have fully unregistered this document from its LS + doc.language_servers.clear(); + doc.set_path(Some(path)); + self.refresh_doc_language(doc_id) + } + + pub fn refresh_doc_language(&mut self, doc_id: DocumentId) { + let loader = self.syn_loader.clone(); + let doc = doc_mut!(self, &doc_id); + doc.detect_language(loader); + doc.detect_indent_and_line_ending(); + self.refresh_language_servers(doc_id); + let doc = doc_mut!(self, &doc_id); + let diagnostics = Editor::doc_diagnostics(&self.language_servers, &self.diagnostics, doc); + doc.replace_diagnostics(diagnostics, &[], None); + } + /// Launch a language server for a given document fn launch_language_servers(&mut self, doc_id: DocumentId) { if !self.config().lsp.enable { @@ -1235,19 +1324,26 @@ impl Editor { .filter_map(|(lang, client)| match client { Ok(client) => Some((lang, client)), Err(err) => { - log::error!( - "Failed to initialize the language servers for `{}` - `{}` {{ {} }}", - language.scope(), - lang, - err - ); + if let helix_lsp::Error::ExecutableNotFound(err) = err { + // Silence by default since some language servers might just not be installed + log::debug!( + "Language server not found for `{}` {} {}", language.scope(), lang, err, + ); + } else { + log::error!( + "Failed to initialize the language servers for `{}` - `{}` {{ {} }}", + language.scope(), + lang, + err + ); + } None } }) .collect::>() }); - if language_servers.is_empty() { + if language_servers.is_empty() && doc.language_servers.is_empty() { return; } @@ -1464,7 +1560,7 @@ impl Editor { // ??? possible use for integration tests pub fn open(&mut self, path: &Path, action: Action) -> Result { - let path = helix_core::path::get_canonicalized_path(path); + let path = helix_stdx::path::canonicalize(path); let id = self.document_by_path(&path).map(|doc| doc.id); let id = if let Some(id) = id { @@ -1477,6 +1573,10 @@ impl Editor { self.config.clone(), )?; + let diagnostics = + Editor::doc_diagnostics(&self.language_servers, &self.diagnostics, &doc); + doc.replace_diagnostics(diagnostics, &[], None); + if let Some(diff_base) = self.diff_providers.get_diff_base(&path) { doc.set_diff_base(diff_base); } @@ -1706,6 +1806,60 @@ impl Editor { .find(|doc| doc.path().map(|p| p == path.as_ref()).unwrap_or(false)) } + /// Returns all supported diagnostics for the document + pub fn doc_diagnostics<'a>( + language_servers: &'a helix_lsp::Registry, + diagnostics: &'a BTreeMap>, + document: &Document, + ) -> impl Iterator + 'a { + Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true) + } + + /// Returns all supported diagnostics for the document + /// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from + pub fn doc_diagnostics_with_filter<'a>( + language_servers: &'a helix_lsp::Registry, + diagnostics: &'a BTreeMap>, + + document: &Document, + filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a, + ) -> impl Iterator + 'a { + let text = document.text().clone(); + let language_config = document.language.clone(); + document + .path() + .and_then(|path| url::Url::from_file_path(path).ok()) // TODO log error? + .and_then(|uri| diagnostics.get(&uri)) + .map(|diags| { + diags.iter().filter_map(move |(diagnostic, lsp_id)| { + let ls = language_servers.get_by_id(*lsp_id)?; + language_config + .as_ref() + .and_then(|c| { + c.language_servers.iter().find(|features| { + features.name == ls.name() + && features.has_feature(LanguageServerFeature::Diagnostics) + }) + }) + .and_then(|_| { + if filter(diagnostic, *lsp_id) { + Document::lsp_diagnostic_to_diagnostic( + &text, + language_config.as_deref(), + diagnostic, + *lsp_id, + ls.offset_encoding(), + ) + } else { + None + } + }) + }) + }) + .into_iter() + .flatten() + } + /// Gets the primary cursor position in screen coordinates, /// or `None` if the primary cursor is not visible on screen. pub fn cursor(&self) -> (Option, CursorKind) { @@ -1836,10 +1990,12 @@ impl Editor { if doc.restore_cursor { let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { - Range::new( - range.from(), - graphemes::prev_grapheme_boundary(text, range.to()), - ) + let mut head = range.to(); + if range.head > range.anchor { + head = graphemes::prev_grapheme_boundary(text, head); + } + + Range::new(range.from(), head) }); doc.set_selection(view.id, selection); @@ -1852,6 +2008,30 @@ impl Editor { .as_ref() .and_then(|debugger| debugger.current_stack_frame()) } + + /// Returns the id of a view that this doc contains a selection for, + /// making sure it is synced with the current changes + /// if possible or there are no selections returns current_view + /// otherwise uses an arbitrary view + pub fn get_synced_view_id(&mut self, id: DocumentId) -> ViewId { + let current_view = view_mut!(self); + let doc = self.documents.get_mut(&id).unwrap(); + if doc.selections().contains_key(¤t_view.id) { + // only need to sync current view if this is not the current doc + if current_view.doc != id { + current_view.sync_changes(doc); + } + current_view.id + } else if let Some(view_id) = doc.selections().keys().next() { + let view_id = *view_id; + let view = self.tree.get_mut(view_id); + view.sync_changes(doc); + view_id + } else { + doc.ensure_view_init(current_view.id); + current_view.id + } + } } fn try_restore_indent(doc: &mut Document, view: &mut View) { diff --git a/helix-view/src/env.rs b/helix-view/src/env.rs deleted file mode 100644 index c68cc609a3b0..000000000000 --- a/helix-view/src/env.rs +++ /dev/null @@ -1,8 +0,0 @@ -pub fn binary_exists(binary_name: &str) -> bool { - which::which(binary_name).is_ok() -} - -#[cfg(not(windows))] -pub fn env_var_is_set(env_var_name: &str) -> bool { - std::env::var_os(env_var_name).is_some() -} diff --git a/helix-view/src/events.rs b/helix-view/src/events.rs new file mode 100644 index 000000000000..8b789cc0d21e --- /dev/null +++ b/helix-view/src/events.rs @@ -0,0 +1,9 @@ +use helix_core::Rope; +use helix_event::events; + +use crate::{Document, ViewId}; + +events! { + DocumentDidChange<'a> { doc: &'a mut Document, view: ViewId, old_text: &'a Rope } + SelectionDidChange<'a> { doc: &'a mut Document, view: ViewId } +} diff --git a/helix-view/src/handlers.rs b/helix-view/src/handlers.rs new file mode 100644 index 000000000000..724e7b1921ca --- /dev/null +++ b/helix-view/src/handlers.rs @@ -0,0 +1,41 @@ +use helix_event::send_blocking; +use tokio::sync::mpsc::Sender; + +use crate::handlers::lsp::SignatureHelpInvoked; +use crate::{DocumentId, Editor, ViewId}; + +pub mod dap; +pub mod lsp; + +pub struct Handlers { + // only public because most of the actual implementation is in helix-term right now :/ + pub completions: Sender, + pub signature_hints: Sender, +} + +impl Handlers { + /// Manually trigger completion (c-x) + pub fn trigger_completions(&self, trigger_pos: usize, doc: DocumentId, view: ViewId) { + send_blocking( + &self.completions, + lsp::CompletionEvent::ManualTrigger { + cursor: trigger_pos, + doc, + view, + }, + ); + } + + pub fn trigger_signature_help(&self, invocation: SignatureHelpInvoked, editor: &Editor) { + let event = match invocation { + SignatureHelpInvoked::Automatic => { + if !editor.config().lsp.auto_signature_help { + return; + } + lsp::SignatureHelpEvent::Trigger + } + SignatureHelpInvoked::Manual => lsp::SignatureHelpEvent::Invoked, + }; + send_blocking(&self.signature_hints, event) + } +} diff --git a/helix-view/src/handlers/lsp.rs b/helix-view/src/handlers/lsp.rs index 8b137891791f..beb106b2bf83 100644 --- a/helix-view/src/handlers/lsp.rs +++ b/helix-view/src/handlers/lsp.rs @@ -1 +1,270 @@ +use crate::editor::Action; +use crate::Editor; +use crate::{DocumentId, ViewId}; +use helix_lsp::util::generate_transaction_from_edits; +use helix_lsp::{lsp, OffsetEncoding}; +pub enum CompletionEvent { + /// Auto completion was triggered by typing a word char + AutoTrigger { + cursor: usize, + doc: DocumentId, + view: ViewId, + }, + /// Auto completion was triggered by typing a trigger char + /// specified by the LSP + TriggerChar { + cursor: usize, + doc: DocumentId, + view: ViewId, + }, + /// A completion was manually requested (c-x) + ManualTrigger { + cursor: usize, + doc: DocumentId, + view: ViewId, + }, + /// Some text was deleted and the cursor is now at `pos` + DeleteText { cursor: usize }, + /// Invalidate the current auto completion trigger + Cancel, +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum SignatureHelpInvoked { + Automatic, + Manual, +} + +pub enum SignatureHelpEvent { + Invoked, + Trigger, + ReTrigger, + Cancel, + RequestComplete { open: bool }, +} + +#[derive(Debug)] +pub struct ApplyEditError { + pub kind: ApplyEditErrorKind, + pub failed_change_idx: usize, +} + +#[derive(Debug)] +pub enum ApplyEditErrorKind { + DocumentChanged, + FileNotFound, + UnknownURISchema, + IoError(std::io::Error), + // TODO: check edits before applying and propagate failure + // InvalidEdit, +} + +impl ToString for ApplyEditErrorKind { + fn to_string(&self) -> String { + match self { + ApplyEditErrorKind::DocumentChanged => "document has changed".to_string(), + ApplyEditErrorKind::FileNotFound => "file not found".to_string(), + ApplyEditErrorKind::UnknownURISchema => "URI schema not supported".to_string(), + ApplyEditErrorKind::IoError(err) => err.to_string(), + } + } +} + +impl Editor { + fn apply_text_edits( + &mut self, + uri: &helix_lsp::Url, + version: Option, + text_edits: Vec, + offset_encoding: OffsetEncoding, + ) -> Result<(), ApplyEditErrorKind> { + let path = match uri.to_file_path() { + Ok(path) => path, + Err(_) => { + let err = format!("unable to convert URI to filepath: {}", uri); + log::error!("{}", err); + self.set_error(err); + return Err(ApplyEditErrorKind::UnknownURISchema); + } + }; + + let doc_id = match self.open(&path, Action::Load) { + Ok(doc_id) => doc_id, + Err(err) => { + let err = format!("failed to open document: {}: {}", uri, err); + log::error!("{}", err); + self.set_error(err); + return Err(ApplyEditErrorKind::FileNotFound); + } + }; + + let doc = doc_mut!(self, &doc_id); + if let Some(version) = version { + if version != doc.version() { + let err = format!("outdated workspace edit for {path:?}"); + log::error!("{err}, expected {} but got {version}", doc.version()); + self.set_error(err); + return Err(ApplyEditErrorKind::DocumentChanged); + } + } + + // Need to determine a view for apply/append_changes_to_history + let view_id = self.get_synced_view_id(doc_id); + let doc = doc_mut!(self, &doc_id); + + let transaction = generate_transaction_from_edits(doc.text(), text_edits, offset_encoding); + let view = view_mut!(self, view_id); + doc.apply(&transaction, view.id); + doc.append_changes_to_history(view); + Ok(()) + } + + // TODO make this transactional (and set failureMode to transactional) + pub fn apply_workspace_edit( + &mut self, + offset_encoding: OffsetEncoding, + workspace_edit: &lsp::WorkspaceEdit, + ) -> Result<(), ApplyEditError> { + if let Some(ref document_changes) = workspace_edit.document_changes { + match document_changes { + lsp::DocumentChanges::Edits(document_edits) => { + for (i, document_edit) in document_edits.iter().enumerate() { + let edits = document_edit + .edits + .iter() + .map(|edit| match edit { + lsp::OneOf::Left(text_edit) => text_edit, + lsp::OneOf::Right(annotated_text_edit) => { + &annotated_text_edit.text_edit + } + }) + .cloned() + .collect(); + self.apply_text_edits( + &document_edit.text_document.uri, + document_edit.text_document.version, + edits, + offset_encoding, + ) + .map_err(|kind| ApplyEditError { + kind, + failed_change_idx: i, + })?; + } + } + lsp::DocumentChanges::Operations(operations) => { + log::debug!("document changes - operations: {:?}", operations); + for (i, operation) in operations.iter().enumerate() { + match operation { + lsp::DocumentChangeOperation::Op(op) => { + self.apply_document_resource_op(op).map_err(|io| { + ApplyEditError { + kind: ApplyEditErrorKind::IoError(io), + failed_change_idx: i, + } + })?; + } + + lsp::DocumentChangeOperation::Edit(document_edit) => { + let edits = document_edit + .edits + .iter() + .map(|edit| match edit { + lsp::OneOf::Left(text_edit) => text_edit, + lsp::OneOf::Right(annotated_text_edit) => { + &annotated_text_edit.text_edit + } + }) + .cloned() + .collect(); + self.apply_text_edits( + &document_edit.text_document.uri, + document_edit.text_document.version, + edits, + offset_encoding, + ) + .map_err(|kind| { + ApplyEditError { + kind, + failed_change_idx: i, + } + })?; + } + } + } + } + } + + return Ok(()); + } + + if let Some(ref changes) = workspace_edit.changes { + log::debug!("workspace changes: {:?}", changes); + for (i, (uri, text_edits)) in changes.iter().enumerate() { + let text_edits = text_edits.to_vec(); + self.apply_text_edits(uri, None, text_edits, offset_encoding) + .map_err(|kind| ApplyEditError { + kind, + failed_change_idx: i, + })?; + } + } + + Ok(()) + } + + fn apply_document_resource_op(&mut self, op: &lsp::ResourceOp) -> std::io::Result<()> { + use lsp::ResourceOp; + use std::fs; + match op { + ResourceOp::Create(op) => { + let path = op.uri.to_file_path().unwrap(); + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { + !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) + }); + if !ignore_if_exists || !path.exists() { + // Create directory if it does not exist + if let Some(dir) = path.parent() { + if !dir.is_dir() { + fs::create_dir_all(dir)?; + } + } + + fs::write(&path, [])?; + self.language_servers.file_event_handler.file_changed(path); + } + } + ResourceOp::Delete(op) => { + let path = op.uri.to_file_path().unwrap(); + if path.is_dir() { + let recursive = op + .options + .as_ref() + .and_then(|options| options.recursive) + .unwrap_or(false); + + if recursive { + fs::remove_dir_all(&path)? + } else { + fs::remove_dir(&path)? + } + self.language_servers.file_event_handler.file_changed(path); + } else if path.is_file() { + fs::remove_file(&path)?; + } + } + ResourceOp::Rename(op) => { + let from = op.old_uri.to_file_path().unwrap(); + let to = op.new_uri.to_file_path().unwrap(); + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { + !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) + }); + if !ignore_if_exists || !to.exists() { + self.move_path(&from, &to)?; + } + } + } + Ok(()) + } +} diff --git a/helix-view/src/input.rs b/helix-view/src/input.rs index 0f4ffaacf9cc..5f5067eac9f8 100644 --- a/helix-view/src/input.rs +++ b/helix-view/src/input.rs @@ -325,7 +325,7 @@ impl std::str::FromStr for KeyEvent { fn from_str(s: &str) -> Result { let mut tokens: Vec<_> = s.split('-').collect(); - let code = match tokens.pop().ok_or_else(|| anyhow!("Missing key code"))? { + let mut code = match tokens.pop().ok_or_else(|| anyhow!("Missing key code"))? { keys::BACKSPACE => KeyCode::Backspace, keys::ENTER => KeyCode::Enter, keys::LEFT => KeyCode::Left, @@ -405,6 +405,18 @@ impl std::str::FromStr for KeyEvent { modifiers.insert(flag); } + // Normalize character keys so that characters like C-S-r and C-R + // are represented by equal KeyEvents. + match code { + KeyCode::Char(ch) + if ch.is_ascii_lowercase() && modifiers.contains(KeyModifiers::SHIFT) => + { + code = KeyCode::Char(ch.to_ascii_uppercase()); + modifiers.remove(KeyModifiers::SHIFT); + } + _ => (), + } + Ok(KeyEvent { code, modifiers }) } } @@ -684,6 +696,19 @@ mod test { modifiers: KeyModifiers::ALT | KeyModifiers::CONTROL } ); + + assert_eq!( + str::parse::("C-S-r").unwrap(), + str::parse::("C-R").unwrap(), + ); + + assert_eq!( + str::parse::("S-w").unwrap(), + KeyEvent { + code: KeyCode::Char('W'), + modifiers: KeyModifiers::NONE + } + ); } #[test] diff --git a/helix-view/src/lib.rs b/helix-view/src/lib.rs index 6a68e7d6f9bd..14b6e1ce8138 100644 --- a/helix-view/src/lib.rs +++ b/helix-view/src/lib.rs @@ -1,17 +1,14 @@ #[macro_use] pub mod macros; +pub mod base64; pub mod clipboard; pub mod document; pub mod editor; -pub mod env; +pub mod events; pub mod graphics; pub mod gutter; -pub mod handlers { - pub mod dap; - pub mod lsp; -} -pub mod base64; +pub mod handlers; pub mod info; pub mod input; pub mod keyboard; diff --git a/languages.toml b/languages.toml index 63bb2e4c0da3..9bf1f8b70f05 100644 --- a/languages.toml +++ b/languages.toml @@ -1,6 +1,8 @@ # Language support configuration. # See the languages documentation: https://docs.helix-editor.com/master/languages.html +use-grammars = { except = [ "hare", "wren", "gemini" ] } + [language-server] als = { command = "als" } @@ -324,6 +326,29 @@ comment-token = "//" language-servers = [ "mint" ] indent = { tab-width = 2, unit = " " } +[[language]] +name = "janet" +scope = "source.janet" +injection-regex = "janet" +file-types = ["cgen", "janet", "jdn"] +shebangs = ["janet"] +roots = ["project.janet"] +comment-token = "#" +indent = { tab-width = 2, unit = " " } +formatter = { command = "janet-format" } +grammar = "janet-simple" + +[language.auto-pairs] +'"' = '"' +'(' = ')' +'[' = ']' +'{' = '}' +"`" = "`" + +[[grammar]] +name = "janet-simple" +source = { git = "https://github.com/sogaiu/tree-sitter-janet-simple", rev = "51271e260346878e1a1aa6c506ce6a797b7c25e2" } + [[language]] name = "json" scope = "source.json" @@ -348,7 +373,8 @@ file-types = [ ".vuerc", "composer.lock", ".watchmanconfig", - "avsc" + "avsc", + ".prettierrc" ] language-servers = [ "vscode-json-language-server" ] auto-format = true @@ -793,6 +819,7 @@ file-types = [ "sh", "bash", "zsh", + ".bash_history", ".bash_login", ".bash_logout", ".bash_profile", @@ -1116,7 +1143,7 @@ name = "purescript" scope = "source.purescript" injection-regex = "purescript" file-types = ["purs"] -roots = ["spago.dhall", "bower.json"] +roots = ["spago.yaml", "spago.dhall", "bower.json"] comment-token = "--" language-servers = [ "purescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -1245,7 +1272,7 @@ file-types = ["pod"] [[grammar]] name = "pod" -source = { git = "https://github.com/tree-sitter-perl/tree-sitter-pod", rev = "d466b84009a63986834498073ec05d58d727d55f" } +source = { git = "https://github.com/tree-sitter-perl/tree-sitter-pod", rev = "39da859947b94abdee43e431368e1ae975c0a424" } [[language]] name = "racket" @@ -1281,7 +1308,7 @@ injection-regex = "comment" [[grammar]] name = "comment" -source = { git = "https://github.com/stsewd/tree-sitter-comment", rev = "a37ca370310ac6f89b6e0ebf2b86b2219780494e" } +source = { git = "https://github.com/stsewd/tree-sitter-comment", rev = "aefcc2813392eb6ffe509aa0fc8b4e9b57413ee1" } [[language]] name = "wgsl" @@ -1390,7 +1417,7 @@ language-servers = [ "metals" ] [[grammar]] name = "scala" -source = { git = "https://github.com/tree-sitter/tree-sitter-scala", rev = "23d21310fe4ab4b3273e7a6810e781224a3e7fe1" } +source = { git = "https://github.com/tree-sitter/tree-sitter-scala", rev = "7891815f42dca9ed6aeb464c2edc39d479ab965c" } [[language]] name = "dockerfile" @@ -1480,7 +1507,7 @@ source = { git = "https://github.com/mtoohey31/tree-sitter-gitattributes", rev = [[language]] name = "git-ignore" scope = "source.gitignore" -file-types = [".gitignore", ".gitignore_global", ".ignore", ".prettierignore", ".eslintignore", ".npmignore", "CODEOWNERS"] +file-types = [".gitignore", ".gitignore_global", ".ignore", ".prettierignore", ".eslintignore", ".npmignore", "CODEOWNERS", { suffix = ".config/helix/ignore" }, { suffix = ".helix/ignore" }] injection-regex = "git-ignore" comment-token = "#" grammar = "gitignore" @@ -1805,7 +1832,7 @@ language-servers = [ "nu-lsp" ] [[grammar]] name = "nu" -source = { git = "https://github.com/nushell/tree-sitter-nu", rev = "98c11c491e3405c75affa1cf004097692da3dda2" } +source = { git = "https://github.com/nushell/tree-sitter-nu", rev = "358c4f509eb97f0148bbd25ad36acc729819b9c1" } [[language]] name = "vala" @@ -1913,6 +1940,12 @@ shebangs = ["scheme", "guile", "chicken"] comment-token = ";" indent = { tab-width = 2, unit = " " } +[language.auto-pairs] +'(' = ')' +'{' = '}' +'[' = ']' +'"' = '"' + [[grammar]] name = "scheme" source = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af3af6c9356b936f8a515a1e449c32e804c2b1a8" } @@ -2106,7 +2139,7 @@ language-servers = [ "slint-lsp" ] [[grammar]] name = "slint" -source = { git = "https://github.com/jrmoulton/tree-sitter-slint", rev = "00c8a2d3645766f68c0d0460086c0a994e5b0d85" } +source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "15618215b79b9db08f824a5c97a12d073dcc1c00" } [[language]] name = "task" @@ -2249,7 +2282,7 @@ grammar = "vhs" [[grammar]] name = "vhs" -source = { git = "https://github.com/charmbracelet/tree-sitter-vhs", rev = "c6d81f34c011c29ee86dd73b45a8ecc9f2e2bdaf" } +source = { git = "https://github.com/charmbracelet/tree-sitter-vhs", rev = "9534865e614c95eb9418e5e73f061c32fa4d9540" } [[language]] name = "kdl" @@ -2656,7 +2689,7 @@ language-servers = [ "cs" ] [[grammar]] name = "smithy" -source = { git = "https://github.com/indoorvivants/tree-sitter-smithy", rev = "cf8c7eb9faf7c7049839585eac19c94af231e6a0" } +source = { git = "https://github.com/indoorvivants/tree-sitter-smithy", rev = "8327eb84d55639ffbe08c9dc82da7fff72a1ad07" } [[language]] name = "vhdl" @@ -2855,7 +2888,7 @@ indent = { tab-width = 2, unit = " " } [[grammar]] name = "typst" -source = { git = "https://github.com/uben0/tree-sitter-typst", rev = "e35aa22395fdde82bbc4b5700c324ce346dfc9e5" } +source = { git = "https://github.com/uben0/tree-sitter-typst", rev = "ecf8596336857adfcd5f7cbb3b2aa11a67badc37" } [[language]] name = "nunjucks" @@ -2879,7 +2912,7 @@ source = { git = "https://github.com/varpeti/tree-sitter-jinja2", rev = "a533cd3 [[grammar]] name = "wren" -source = { git = "https://git.sr.ht/~jummit/tree-sitter-wren", rev = "793d58266924e6efcc40e411663393e9d72bec87"} +source = { git = "https://git.sr.ht/~jummit/tree-sitter-wren", rev = "6748694be32f11e7ec6b5faeb1b48ca6156d4e06" } [[language]] name = "wren" @@ -2907,7 +2940,7 @@ indent = { tab-width = 4, unit = " " } [[grammar]] name = "unison" -source = { git = "https://github.com/kylegoetz/tree-sitter-unison", rev = "98c4e8bc5c9f5989814a720457cf36963cf4043d" } +source = { git = "https://github.com/kylegoetz/tree-sitter-unison", rev = "1f505e2447fa876a87aee47ff3d70b9e141c744f" } [[language]] name = "todotxt" @@ -2998,15 +3031,6 @@ file-types = ["log"] name = "log" source = { git = "https://github.com/Tudyx/tree-sitter-log", rev = "62cfe307e942af3417171243b599cc7deac5eab9" } -[[language]] -name = "janet" -scope = "source.janet" -injection-regex = "janet" -file-types = ["janet"] -comment-token = "#" -indent = { tab-width = 2, unit = " " } -grammar = "clojure" - [[language]] name = "hoon" scope = "source.hoon" @@ -3018,3 +3042,45 @@ indent = {tab-width = 2, unit = " "} [[grammar]] name = "hoon" source = { git = "https://github.com/urbit-pilled/tree-sitter-hoon", rev = "1d5df35af3e0afe592832a67b9fb3feeeba1f7b6" } + +[[language]] +name = "hocon" +scope = "source.conf" +file-types = ["conf"] +comment-token = "#" +auto-format = true +indent = { tab-width = 2, unit = " " } + +[[grammar]] +name = "hocon" +source = { git = "https://github.com/antosha417/tree-sitter-hocon", rev = "c390f10519ae69fdb03b3e5764f5592fb6924bcc" } + +[[language]] +name = "tact" +scope = "source.tact" +injection-regex = "tact" +file-types = ["tact"] +comment-token = "//" +indent = { tab-width = 4, unit = " " } + +[language.auto-pairs] +'"' = '"' +'{' = '}' +'(' = ')' +'<' = '>' + +[[grammar]] +name = "tact" +source = { git = "https://github.com/tact-lang/tree-sitter-tact", rev = "ec57ab29c86d632639726631fb2bb178d23e1c91" } + +[[language]] +name = "pkl" +scope = "source.pkl" +injection-regex = "pkl" +file-types = ["pkl", "pcf"] +comment-token = "//" +indent = { tab-width = 2, unit = " " } + +[[grammar]] +name = "pkl" +source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b712f8ab00a2d862c10b37318699ae" } diff --git a/logo.svg b/logo.svg index a2d1c11097ea..1408f897f9ba 100644 --- a/logo.svg +++ b/logo.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/logo_dark.svg b/logo_dark.svg index f6e94f1b4c39..30ff77963e32 100644 --- a/logo_dark.svg +++ b/logo_dark.svg @@ -1,115 +1 @@ - - + \ No newline at end of file diff --git a/logo_light.svg b/logo_light.svg index cdd5ddb8b0a6..014443bceca8 100644 --- a/logo_light.svg +++ b/logo_light.svg @@ -1,115 +1 @@ - - + \ No newline at end of file diff --git a/runtime/queries/bash/highlights.scm b/runtime/queries/bash/highlights.scm index 3f2df63867d6..92d61e8b83e0 100644 --- a/runtime/queries/bash/highlights.scm +++ b/runtime/queries/bash/highlights.scm @@ -10,23 +10,37 @@ (variable_name) @variable.other.member [ + "if" + "then" + "else" + "elif" + "fi" "case" + "in" + "esac" +] @keyword.control.conditional + +[ + "for" "do" "done" - "elif" - "else" - "esac" + "select" + "until" + "while" +] @keyword.control.repeat + +[ + "declare" + "typeset" "export" - "fi" - "for" - "function" - "if" - "in" + "readonly" + "local" "unset" - "while" - "then" + "unsetenv" ] @keyword +"function" @keyword.function + (comment) @comment (function_definition name: (word) @function) diff --git a/runtime/queries/comment/highlights.scm b/runtime/queries/comment/highlights.scm index 9583f9c53414..4cefcdf74d8d 100644 --- a/runtime/queries/comment/highlights.scm +++ b/runtime/queries/comment/highlights.scm @@ -44,3 +44,5 @@ ; User mention (@user) ("text" @tag (#match? @tag "^[@][a-zA-Z0-9_-]+$")) + +(uri) @markup.link.url diff --git a/runtime/queries/css/indents.scm b/runtime/queries/css/indents.scm new file mode 100644 index 000000000000..1dfd977d98d7 --- /dev/null +++ b/runtime/queries/css/indents.scm @@ -0,0 +1,7 @@ +[ + (block) +] @indent + +[ + "}" +] @outdent diff --git a/runtime/queries/dart/textobjects.scm b/runtime/queries/dart/textobjects.scm new file mode 100644 index 000000000000..028276156725 --- /dev/null +++ b/runtime/queries/dart/textobjects.scm @@ -0,0 +1,68 @@ +(class_definition + body: (_) @class.inside) @class.around + +(mixin_declaration + (class_body) @class.inside) @class.around + +(extension_declaration + (extension_body) @class.inside) @class.around + +(enum_declaration + body: (_) @class.inside) @class.around + +(type_alias) @class.around + +(_ + ( + [ + (getter_signature) + (setter_signature) + (function_signature) + (method_signature) + (constructor_signature) + ] + . + (function_body) @function.inside @function.around + ) @function.around +) + +(declaration + [ + (constant_constructor_signature) + (constructor_signature) + (factory_constructor_signature) + (redirecting_factory_constructor_signature) + (getter_signature) + (setter_signature) + (operator_signature) + (function_signature) + ] +) @function.around + +(lambda_expression + body: (_) @function.inside +) @function.around + +(function_expression + body: (_) @function.inside +) @function.around + +[ + (comment) + (documentation_comment) +] @comment.inside + +(comment)+ @comment.around + +(documentation_comment)+ @comment.around + +(formal_parameter) @parameter.inside + +(formal_parameter_list) @parameter.around + +(expression_statement + ((identifier) @_name (#any-of? @_name "test" "testWidgets")) + . + (selector (argument_part (arguments . (_) . (argument) @test.inside))) +) @test.around + diff --git a/runtime/queries/gdscript/textobjects.scm b/runtime/queries/gdscript/textobjects.scm index 089544682978..47512bba785d 100644 --- a/runtime/queries/gdscript/textobjects.scm +++ b/runtime/queries/gdscript/textobjects.scm @@ -13,5 +13,7 @@ (typed_default_parameter) ] @parameter.inside @parameter.around) +(arguments (_expression) @parameter.inside @parameter.around) + (comment) @comment.inside (comment)+ @comment.around diff --git a/runtime/queries/go/highlights.scm b/runtime/queries/go/highlights.scm index fba2df99eafa..8eed12afbdb3 100644 --- a/runtime/queries/go/highlights.scm +++ b/runtime/queries/go/highlights.scm @@ -183,9 +183,12 @@ [ (int_literal) +] @constant.numeric.integer + +[ (float_literal) (imaginary_literal) -] @constant.numeric.integer +] @constant.numeric.float [ (true) @@ -197,4 +200,31 @@ (iota) ] @constant.builtin +; Comments + (comment) @comment + +; Doc Comments +(source_file + . + (comment)+ @comment.block.documentation) + +(source_file + (comment)+ @comment.block.documentation + . + (const_declaration)) + +(source_file + (comment)+ @comment.block.documentation + . + (function_declaration)) + +(source_file + (comment)+ @comment.block.documentation + . + (type_declaration)) + +(source_file + (comment)+ @comment.block.documentation + . + (var_declaration)) diff --git a/runtime/queries/go/injections.scm b/runtime/queries/go/injections.scm index 321c90add371..d7b03da35630 100644 --- a/runtime/queries/go/injections.scm +++ b/runtime/queries/go/injections.scm @@ -1,2 +1,14 @@ ((comment) @injection.content (#set! injection.language "comment")) + + +(call_expression + (selector_expression) @_function + (#any-of? @_function "regexp.Match" "regexp.MatchReader" "regexp.MatchString" "regexp.Compile" "regexp.CompilePOSIX" "regexp.MustCompile" "regexp.MustCompilePOSIX") + (argument_list + . + [ + (raw_string_literal) + (interpreted_string_literal) + ] @injection.content + (#set! injection.language "regex"))) diff --git a/runtime/queries/hocon/highlights.scm b/runtime/queries/hocon/highlights.scm new file mode 100644 index 000000000000..d1aa38a27dbc --- /dev/null +++ b/runtime/queries/hocon/highlights.scm @@ -0,0 +1,31 @@ +(comment) @comment + +(null) @constant.builtin +[(true) (false)] @constant.builtin.boolean +(number) @constant.numeric +(string) @string +(multiline_string) @string +(string (escape_sequence) @constant.character.escape) +(unquoted_string) @string + +(value [":" "=" "+=" ] @operator) + +(substitution (_) @string) +(substitution ["${" "${?" "}"] @punctuation.special) + +[ + "url" + "file" + "classpath" + "required" +] @function.builtin + +(include) @keyword.directive + +[ "(" ")" "[" "]" "{" "}" ] @punctuation.bracket + +(unit) @keyword +(path (_) @keyword) +(unquoted_path "." @punctuation.delimiter) +[ "," ] @punctuation.delimiter + diff --git a/runtime/queries/hocon/indents.scm b/runtime/queries/hocon/indents.scm new file mode 100644 index 000000000000..27c2c988f7ef --- /dev/null +++ b/runtime/queries/hocon/indents.scm @@ -0,0 +1,10 @@ +[ + (object) + (array) +] @indent + +[ + "]" + "}" +] @outdent + diff --git a/runtime/queries/janet/highlights.scm b/runtime/queries/janet/highlights.scm index a036368a1208..1335594392f0 100644 --- a/runtime/queries/janet/highlights.scm +++ b/runtime/queries/janet/highlights.scm @@ -1 +1,66 @@ -; inherits: clojure +(kwd_lit) @string.special.symbol + +(str_lit) @string + +(long_str_lit) @string + +(buf_lit) @string + +(long_buf_lit) @string + +(num_lit) @constant.numeric + +[(bool_lit) (nil_lit)] @constant.builtin + +(comment) @comment + +((sym_lit) @variable + (#match? @variable "^\\*.+\\*$")) + +(short_fn_lit + . + (sym_lit) @function) + +;; special forms +(par_tup_lit + . + (sym_lit) @function.macro + (#match? @function.macro + "^(break|def|do|fn|if|quasiquote|quote|set|splice|unquote|upscope|var|while)$")) + +;; for macros +;; +;; (each name (all-bindings) +;; (when-let [info (dyn (symbol name))] +;; (when (info :macro) +;; (print name)))) +(par_tup_lit + . + (sym_lit) @function.macro + (#match? @function.macro + "^(%=|\\*=|\\+\\+|\\+=|\\-\\-|\\-=|\\->|\\->>|\\-\\?>|\\-\\?>>|/=|and|as\\->|as\\-macro|as\\?\\->|assert|case|catseq|chr|comment|compif|comptime|compwhen|cond|coro|def\\-|default|defdyn|defer|defmacro|defmacro\\-|defn|defn\\-|delay|doc|each|eachk|eachp|edefer|ev/do\\-thread|ev/gather|ev/spawn|ev/spawn\\-thread|ev/with\\-deadline|ffi/defbind|fiber\\-fn|for|forever|forv|generate|if\\-let|if\\-not|if\\-with|import|juxt|label|let|loop|match|or|prompt|protect|repeat|seq|short\\-fn|tabseq|toggle|tracev|try|unless|use|var\\-|varfn|when|when\\-let|when\\-with|with|with\\-dyns|with\\-syms|with\\-vars)$")) + +;; builtin functions +;; +;; (each name (all-bindings) +;; (when-let [info (dyn (symbol name))] +;; (when (and (nil? (info :macro)) +;; (or (function? (info :value)) +;; (cfunction? (info :value)))) +;; (print name)))) +((sym_lit) @function.builtin + (#match? @function.builtin + "^(%|\\*|\\+|\\-|/|<|<=|=|>|>=|\\.break|\\.breakall|\\.bytecode|\\.clear|\\.clearall|\\.disasm|\\.fiber|\\.fn|\\.frame|\\.locals|\\.next|\\.nextc|\\.ppasm|\\.signal|\\.slot|\\.slots|\\.source|\\.stack|\\.step|abstract\\?|accumulate|accumulate2|all|all\\-bindings|all\\-dynamics|any\\?|apply|array|array/clear|array/concat|array/ensure|array/fill|array/insert|array/new|array/new\\-filled|array/peek|array/pop|array/push|array/remove|array/slice|array/trim|array/weak|array\\?|asm|bad\\-compile|bad\\-parse|band|blshift|bnot|boolean\\?|bor|brshift|brushift|buffer|buffer/bit|buffer/bit\\-clear|buffer/bit\\-set|buffer/bit\\-toggle|buffer/blit|buffer/clear|buffer/fill|buffer/format|buffer/from\\-bytes|buffer/new|buffer/new\\-filled|buffer/popn|buffer/push|buffer/push\\-at|buffer/push\\-byte|buffer/push\\-string|buffer/push\\-word|buffer/slice|buffer/trim|buffer\\?|bxor|bytes\\?|cancel|cfunction\\?|cli\\-main|cmp|comp|compare|compare<|compare<=|compare=|compare>|compare>=|compile|complement|count|curenv|debug|debug/arg\\-stack|debug/break|debug/fbreak|debug/lineage|debug/stack|debug/stacktrace|debug/step|debug/unbreak|debug/unfbreak|debugger|debugger\\-on\\-status|dec|deep\\-not=|deep=|defglobal|describe|dictionary\\?|disasm|distinct|div|doc\\*|doc\\-format|doc\\-of|dofile|drop|drop\\-until|drop\\-while|dyn|eflush|empty\\?|env\\-lookup|eprin|eprinf|eprint|eprintf|error|errorf|ev/acquire\\-lock|ev/acquire\\-rlock|ev/acquire\\-wlock|ev/all\\-tasks|ev/call|ev/cancel|ev/capacity|ev/chan|ev/chan\\-close|ev/chunk|ev/close|ev/count|ev/deadline|ev/full|ev/give|ev/give\\-supervisor|ev/go|ev/lock|ev/read|ev/release\\-lock|ev/release\\-rlock|ev/release\\-wlock|ev/rselect|ev/rwlock|ev/select|ev/sleep|ev/take|ev/thread|ev/thread\\-chan|ev/write|eval|eval\\-string|even\\?|every\\?|extreme|false\\?|ffi/align|ffi/call|ffi/calling\\-conventions|ffi/close|ffi/context|ffi/free|ffi/jitfn|ffi/lookup|ffi/malloc|ffi/native|ffi/pointer\\-buffer|ffi/pointer\\-cfunction|ffi/read|ffi/signature|ffi/size|ffi/struct|ffi/trampoline|ffi/write|fiber/can\\-resume\\?|fiber/current|fiber/getenv|fiber/last\\-value|fiber/maxstack|fiber/new|fiber/root|fiber/setenv|fiber/setmaxstack|fiber/status|fiber\\?|file/close|file/flush|file/lines|file/open|file/read|file/seek|file/tell|file/temp|file/write|filter|find|find\\-index|first|flatten|flatten\\-into|flush|flycheck|freeze|frequencies|from\\-pairs|function\\?|gccollect|gcinterval|gcsetinterval|gensym|get|get\\-in|getline|getproto|group\\-by|has\\-key\\?|has\\-value\\?|hash|idempotent\\?|identity|import\\*|in|inc|index\\-of|indexed\\?|int/s64|int/to\\-bytes|int/to\\-number|int/u64|int\\?|interleave|interpose|invert|juxt\\*|keep|keep\\-syntax|keep\\-syntax!|keys|keyword|keyword/slice|keyword\\?|kvs|last|length|lengthable\\?|load\\-image|macex|macex1|maclintf|make\\-env|make\\-image|map|mapcat|marshal|math/abs|math/acos|math/acosh|math/asin|math/asinh|math/atan|math/atan2|math/atanh|math/cbrt|math/ceil|math/cos|math/cosh|math/erf|math/erfc|math/exp|math/exp2|math/expm1|math/floor|math/gamma|math/gcd|math/hypot|math/lcm|math/log|math/log\\-gamma|math/log10|math/log1p|math/log2|math/next|math/pow|math/random|math/rng|math/rng\\-buffer|math/rng\\-int|math/rng\\-uniform|math/round|math/seedrandom|math/sin|math/sinh|math/sqrt|math/tan|math/tanh|math/trunc|max|max\\-of|mean|memcmp|merge|merge\\-into|merge\\-module|min|min\\-of|mod|module/add\\-paths|module/expand\\-path|module/find|module/value|nan\\?|nat\\?|native|neg\\?|net/accept|net/accept\\-loop|net/address|net/address\\-unpack|net/chunk|net/close|net/connect|net/flush|net/listen|net/localname|net/peername|net/read|net/recv\\-from|net/send\\-to|net/server|net/setsockopt|net/shutdown|net/write|next|nil\\?|not|not=|number\\?|odd\\?|one\\?|os/arch|os/cd|os/chmod|os/clock|os/compiler|os/cpu\\-count|os/cryptorand|os/cwd|os/date|os/dir|os/environ|os/execute|os/exit|os/getenv|os/isatty|os/link|os/lstat|os/mkdir|os/mktime|os/open|os/perm\\-int|os/perm\\-string|os/pipe|os/posix\\-exec|os/posix\\-fork|os/proc\\-close|os/proc\\-kill|os/proc\\-wait|os/readlink|os/realpath|os/rename|os/rm|os/rmdir|os/setenv|os/shell|os/sigaction|os/sleep|os/spawn|os/stat|os/strftime|os/symlink|os/time|os/touch|os/umask|os/which|pairs|parse|parse\\-all|parser/byte|parser/clone|parser/consume|parser/eof|parser/error|parser/flush|parser/has\\-more|parser/insert|parser/new|parser/produce|parser/state|parser/status|parser/where|partial|partition|partition\\-by|peg/compile|peg/find|peg/find\\-all|peg/match|peg/replace|peg/replace\\-all|pos\\?|postwalk|pp|prewalk|prin|prinf|print|printf|product|propagate|put|put\\-in|quit|range|reduce|reduce2|repl|require|resume|return|reverse|reverse!|run\\-context|sandbox|scan\\-number|setdyn|signal|slice|slurp|some|sort|sort\\-by|sorted|sorted\\-by|spit|string|string/ascii\\-lower|string/ascii\\-upper|string/bytes|string/check\\-set|string/find|string/find\\-all|string/format|string/from\\-bytes|string/has\\-prefix\\?|string/has\\-suffix\\?|string/join|string/repeat|string/replace|string/replace\\-all|string/reverse|string/slice|string/split|string/trim|string/triml|string/trimr|string\\?|struct|struct/getproto|struct/proto\\-flatten|struct/to\\-table|struct/with\\-proto|struct\\?|sum|symbol|symbol/slice|symbol\\?|table|table/clear|table/clone|table/getproto|table/new|table/proto\\-flatten|table/rawget|table/setproto|table/to\\-struct|table/weak|table/weak\\-keys|table/weak\\-values|table\\?|take|take\\-until|take\\-while|tarray/buffer|tarray/copy\\-bytes|tarray/length|tarray/new|tarray/properties|tarray/slice|tarray/swap\\-bytes|thread/close|thread/current|thread/exit|thread/new|thread/receive|thread/send|thaw|trace|true\\?|truthy\\?|tuple|tuple/brackets|tuple/setmap|tuple/slice|tuple/sourcemap|tuple/type|tuple\\?|type|unmarshal|untrace|update|update\\-in|values|varglobal|walk|warn\\-compile|xprin|xprinf|xprint|xprintf|yield|zero\\?|zipcoll)$")) + +;; other calls +(par_tup_lit + . + (sym_lit) @function) + +(sym_lit) @variable + +["{" "@{" "}" + "[" "@[" "]" + "(" "@(" ")"] @punctuation.bracket + +["~" "'" "|" ";" ","] @operator diff --git a/runtime/queries/make/indents.scm b/runtime/queries/make/indents.scm new file mode 100644 index 000000000000..42b2c60e65d7 --- /dev/null +++ b/runtime/queries/make/indents.scm @@ -0,0 +1,8 @@ +[ + (define_directive) + (rule) +] @indent + +[ + "endef" +] @outdent diff --git a/runtime/queries/nu/highlights.scm b/runtime/queries/nu/highlights.scm index 746c502510c5..66a305840508 100644 --- a/runtime/queries/nu/highlights.scm +++ b/runtime/queries/nu/highlights.scm @@ -2,7 +2,6 @@ ;;; keywords [ "def" - "def-env" "alias" "export-env" "export" @@ -73,7 +72,6 @@ "tb" "tB" "Tb" "TB" "pb" "pB" "Pb" "PB" "eb" "eB" "Eb" "EB" - "zb" "zB" "Zb" "ZB" "kib" "kiB" "kIB" "kIb" "Kib" "KIb" "KIB" "mib" "miB" "mIB" "mIb" "Mib" "MIb" "MIB" @@ -81,7 +79,6 @@ "tib" "tiB" "tIB" "tIb" "Tib" "TIb" "TIB" "pib" "piB" "pIB" "pIb" "Pib" "PIb" "PIB" "eib" "eiB" "eIB" "eIb" "Eib" "EIb" "EIB" - "zib" "ziB" "zIB" "zIb" "Zib" "ZIb" "ZIB" ] @variable.parameter ) (val_binary diff --git a/runtime/queries/pkl/highlights.scm b/runtime/queries/pkl/highlights.scm new file mode 100644 index 000000000000..501c9485945b --- /dev/null +++ b/runtime/queries/pkl/highlights.scm @@ -0,0 +1,179 @@ +; Copyright © 2024 Apple Inc. and the Pkl project authors. All rights reserved. +; +; Licensed under the Apache License, Version 2.0 (the "License"); +; you may not use this file except in compliance with the License. +; You may obtain a copy of the License at +; +; https://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, software +; distributed under the License is distributed on an "AS IS" BASIS, +; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +; See the License for the specific language governing permissions and +; limitations under the License. + +; this definition is imprecise in that +; * any qualified or unqualified call to a method named "Regex" is considered a regex +; * string delimiters are considered part of the regex + +; Operators + +[ + "??" + "@" + "=" + "<" + ">" + "!" + "==" + "!=" + "<=" + ">=" + "&&" + "||" + "+" + "-" + "**" + "*" + "/" + "~/" + "%" + "|>" +] @keyword.operator + +[ + "?" + "|" + "->" +] @operator.type + +[ + "," + ":" + "." + "?." +] @punctuation.delimiter + +[ + "(" + ")" + "]" + "{" + "}" + ; "[" @punctuation.bracket TODO: FIGURE OUT HOW TO REFER TO CUSTOM TOKENS +] @punctuation.bracket + +; Keywords + +[ + "abstract" + "amends" + "as" + "class" + "extends" + "external" + "function" + "hidden" + "import" + "import*" + "in" + "let" + "local" + "module" + "new" + "open" + "out" + "typealias" + "when" +] @keyword + +[ + "if" + "is" + "else" +] @keyword.control.conditional + +[ + "for" +] @keyword.control.repeat + +(importExpr "import" @keyword.control.import) +(importGlobExpr "import*" @keyword.control.import) + +"read" @function.builtin +"read?" @function.builtin +"read*" @function.builtin +"throw" @function.builtin +"trace" @function.builtin + +(moduleExpr "module" @type.builtin) +"nothing" @type.builtin +"unknown" @type.builtin + +(outerExpr) @variable.builtin +"super" @variable.builtin +(thisExpr) @variable.builtin + +[ + (falseLiteral) + (nullLiteral) + (trueLiteral) +] @constant.builtin + +; Literals + +(stringConstant) @string +(slStringLiteral) @string +(mlStringLiteral) @string + +(escapeSequence) @constent.character.escape + +(intLiteral) @constant.numeric.integer +(floatLiteral) @constant.numeric.float + +(interpolationExpr + "\\(" @punctuation.special + ")" @punctuation.special) @embedded + +(interpolationExpr + "\\#(" @punctuation.special + ")" @punctuation.special) @embedded + +(interpolationExpr + "\\##(" @punctuation.special + ")" @punctuation.special) @embedded + +(lineComment) @comment +(blockComment) @comment +(docComment) @comment + +; Identifiers + +(classProperty (identifier) @variable.other.member) +(objectProperty (identifier) @variable.other.member) + +(parameterList (typedIdentifier (identifier) @variable.parameter)) +(objectBodyParameters (typedIdentifier (identifier) @variable.parameter)) + +(identifier) @variable + +; Method definitions + +(classMethod (methodHeader (identifier)) @function.method) +(objectMethod (methodHeader (identifier)) @function.method) + +; Method calls + +(methodCallExpr + (identifier) @function.method) + +; Types + +(clazz (identifier) @type) +(typeAlias (identifier) @type) +((identifier) @type + (match? @type "^[A-Z]")) + +(typeArgumentList + "<" @punctuation.bracket + ">" @punctuation.bracket) diff --git a/runtime/queries/pkl/indents.scm b/runtime/queries/pkl/indents.scm new file mode 100644 index 000000000000..d2a9be1ab180 --- /dev/null +++ b/runtime/queries/pkl/indents.scm @@ -0,0 +1,23 @@ +; Copyright © 2024 Apple Inc. and the Pkl project authors. All rights reserved. +; +; Licensed under the Apache License, Version 2.0 (the "License"); +; you may not use this file except in compliance with the License. +; You may obtain a copy of the License at +; +; https://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, software +; distributed under the License is distributed on an "AS IS" BASIS, +; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +; See the License for the specific language governing permissions and +; limitations under the License. + +; this definition is imprecise in that +; * any qualified or unqualified call to a method named "Regex" is considered a regex +; * string delimiters are considered part of the regex +[ + (objectBody) + (classBody) + (ifExpr) + (mlStringLiteral) ; This isn't perfect; newlines are too indented but it's better than if omitted. +] @indent diff --git a/runtime/queries/pkl/injections.scm b/runtime/queries/pkl/injections.scm new file mode 100644 index 000000000000..15867f35e647 --- /dev/null +++ b/runtime/queries/pkl/injections.scm @@ -0,0 +1,30 @@ +; Copyright © 2024 Apple Inc. and the Pkl project authors. All rights reserved. +; +; Licensed under the Apache License, Version 2.0 (the "License"); +; you may not use this file except in compliance with the License. +; You may obtain a copy of the License at +; +; https://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, software +; distributed under the License is distributed on an "AS IS" BASIS, +; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +; See the License for the specific language governing permissions and +; limitations under the License. + +; this definition is imprecise in that +; * any qualified or unqualified call to a method named "Regex" is considered a regex +; * string delimiters are considered part of the regex +( + ((methodCallExpr (identifier) @methodName (argumentList (slStringLiteral) @injection.content)) + (#set! injection.language "regex")) + (eq? @methodName "Regex")) + +((lineComment) @injection.content + (#set! injection.language "comment")) + +((blockComment) @injection.content + (#set! injection.language "comment")) + +((docComment) @injection.content + (#set! injection.language "markdown")) diff --git a/runtime/queries/pod/highlights.scm b/runtime/queries/pod/highlights.scm index e8bd4b54615c..d88d9ffa75c7 100644 --- a/runtime/queries/pod/highlights.scm +++ b/runtime/queries/pod/highlights.scm @@ -1,61 +1,97 @@ -[(pod_directive) - (head_directive) - (over_directive) - (item_directive) - (back_directive) - (encoding_directive) - (cut_directive)] @tag - -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head1") +; A highlight file for nvim-treesitter to use + +[(pod_command) + (command) + (cut_command)] @keyword + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head1") (content) @markup.heading.1) -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head2") + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head2") (content) @markup.heading.2) -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head3") + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head3") (content) @markup.heading.3) -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head4") + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head4") (content) @markup.heading.4) -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head5") + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head5") (content) @markup.heading.5) -(head_paragraph - (head_directive) @directive - (#eq? @directive "=head6") + +(command_paragraph + (command) @keyword + (#eq? @keyword "=head6") (content) @markup.heading.6) -(over_paragraph (content) @constant.numeric.integer) -(item_paragraph (content) @markup.list) -(encoding_paragraph (content) @string) +(command_paragraph + (command) @keyword + (#match? @keyword "^=over") + (content) @constant.numeric) + +(command_paragraph + (command) @keyword + (#match? @keyword "^=item") + (content) @markup) + +(command_paragraph + (command) @keyword + (#match? @keyword "^=encoding") + (content) @string.special) + +(command_paragraph + (command) @keyword + (#not-match? @keyword "^=(head|over|item|encoding)") + (content) @string) (verbatim_paragraph (content) @markup.raw) -(interior_sequence) @tag +(interior_sequence + (sequence_letter) @constant.character + ["<" ">"] @punctuation.delimiter +) (interior_sequence - (sequence_letter) @letter - (#eq? @letter "B") + (sequence_letter) @character + (#eq? @character "B") (content) @markup.bold) + (interior_sequence - (sequence_letter) @letter - (#eq? @letter "C") - (content) @markup.raw) + (sequence_letter) @character + (#eq? @character "C") + (content) @markup.literal) + (interior_sequence - (sequence_letter) @letter - (#eq? @letter "F") - (content) @markup.italic) + (sequence_letter) @character + (#eq? @character "F") + (content) @markup.underline @string.special) + (interior_sequence - (sequence_letter) @letter - (#eq? @letter "I") - (content) @markup.italic) + (sequence_letter) @character + (#eq? @character "I") + (content) @markup.bold) + (interior_sequence - (sequence_letter) @letter - (#eq? @letter "L") + (sequence_letter) @character + (#eq? @character "L") (content) @markup.link.url) + +(interior_sequence + (sequence_letter) @character + (#eq? @character "X") + (content) @markup.reference) + +(interior_sequence + (sequence_letter) @character + (#eq? @character "E") + (content) @string.special.escape) diff --git a/runtime/queries/protobuf/textobjects.scm b/runtime/queries/protobuf/textobjects.scm new file mode 100644 index 000000000000..6f06b13498b3 --- /dev/null +++ b/runtime/queries/protobuf/textobjects.scm @@ -0,0 +1,9 @@ +(message (messageBody) @class.inside) @class.around +(enum (enumBody) @class.inside) @class.around +(service (serviceBody) @class.inside) @class.around + +(rpc (enumMessageType) @parameter.inside) @function.inside +(rpc (enumMessageType) @parameter.around) @function.around + +(comment) @comment.inside +(comment)+ @comment.around diff --git a/runtime/queries/rust/highlights.scm b/runtime/queries/rust/highlights.scm index 47e57e8008d6..3cda9d52083c 100644 --- a/runtime/queries/rust/highlights.scm +++ b/runtime/queries/rust/highlights.scm @@ -189,6 +189,33 @@ ; TODO: variable.mut to highlight mutable identifiers via locals.scm +; ------- +; Constructors +; ------- +; TODO: this is largely guesswork, remove it once we get actual info from locals.scm or r-a + +(struct_expression + name: (type_identifier) @constructor) + +(tuple_struct_pattern + type: [ + (identifier) @constructor + (scoped_identifier + name: (identifier) @constructor) + ]) +(struct_pattern + type: [ + ((type_identifier) @constructor) + (scoped_type_identifier + name: (type_identifier) @constructor) + ]) +(match_pattern + ((identifier) @constructor) (#match? @constructor "^[A-Z]")) +(or_pattern + ((identifier) @constructor) + ((identifier) @constructor) + (#match? @constructor "^[A-Z]")) + ; ------- ; Guess Other Types ; ------- @@ -203,33 +230,28 @@ (call_expression function: [ - ((identifier) @type.enum.variant - (#match? @type.enum.variant "^[A-Z]")) + ((identifier) @constructor + (#match? @constructor "^[A-Z]")) (scoped_identifier - name: ((identifier) @type.enum.variant - (#match? @type.enum.variant "^[A-Z]"))) + name: ((identifier) @constructor + (#match? @constructor "^[A-Z]"))) ]) ; --- -; Assume that types in match arms are enums and not -; tuple structs. Same for `if let` expressions. +; PascalCase identifiers under a path which is also PascalCase +; are assumed to be constructors if they have methods or fields. ; --- -(match_pattern - (scoped_identifier - name: (identifier) @constructor)) -(tuple_struct_pattern - type: [ - ((identifier) @constructor) - (scoped_identifier - name: (identifier) @constructor) - ]) -(struct_pattern - type: [ - ((type_identifier) @constructor) - (scoped_type_identifier - name: (type_identifier) @constructor) - ]) +(field_expression + value: (scoped_identifier + path: [ + (identifier) @type + (scoped_identifier + name: (identifier) @type) + ] + name: (identifier) @constructor + (#match? @type "^[A-Z]") + (#match? @constructor "^[A-Z]"))) ; --- ; Other PascalCase identifiers are assumed to be structs. diff --git a/runtime/queries/rust/injections.scm b/runtime/queries/rust/injections.scm index ae9e587fd468..b05b9d9750a4 100644 --- a/runtime/queries/rust/injections.scm +++ b/runtime/queries/rust/injections.scm @@ -2,11 +2,29 @@ (#set! injection.language "comment")) ((macro_invocation - macro: (identifier) @_html (#eq? @_html "html") + macro: + [ + (scoped_identifier + name: (_) @_macro_name) + (identifier) @_macro_name + ] (token_tree) @injection.content) + (#eq? @_macro_name "html") (#set! injection.language "html") (#set! injection.include-children)) +((macro_invocation + macro: + [ + (scoped_identifier + name: (_) @_macro_name) + (identifier) @_macro_name + ] + (token_tree) @injection.content) + (#eq? @_macro_name "slint") + (#set! injection.language "slint") + (#set! injection.include-children)) + ((macro_invocation (token_tree) @injection.content) (#set! injection.language "rust") diff --git a/runtime/queries/scala/highlights.scm b/runtime/queries/scala/highlights.scm index 67603fdda3ca..e21a3909d1cf 100644 --- a/runtime/queries/scala/highlights.scm +++ b/runtime/queries/scala/highlights.scm @@ -53,20 +53,13 @@ (var_declaration name: (identifier) @variable) -; method definition +; function definitions/declarations -(class_definition - body: (template_body - (function_definition - name: (identifier) @function.method))) -(object_definition - body: (template_body - (function_definition - name: (identifier) @function.method))) -(trait_definition - body: (template_body - (function_definition - name: (identifier) @function.method))) +(function_declaration + name: (identifier) @function.method) + +(function_definition + name: (identifier) @function.method) ; imports/exports @@ -263,7 +256,7 @@ "return" @keyword.control.return -(comment) @comment +[(comment) (block_comment)] @comment ;; `case` is a conditional keyword in case_block diff --git a/runtime/queries/scala/injections.scm b/runtime/queries/scala/injections.scm index 321c90add371..1ad68557e2fe 100644 --- a/runtime/queries/scala/injections.scm +++ b/runtime/queries/scala/injections.scm @@ -1,2 +1,16 @@ -((comment) @injection.content +([(comment) (block_comment)] @injection.content (#set! injection.language "comment")) + + +; TODO for some reason multiline string (triple quotes) interpolation works only if it contains interpolated value +; Matches these SQL interpolators: +; - Doobie: 'sql', 'fr' +; - Quill: 'sql', 'infix' +; - Slick: 'sql', 'sqlu' +(interpolated_string_expression + interpolator: + ((identifier) @interpolator + (#any-of? @interpolator "fr" "infix" "sql" "sqlu")) + (interpolated_string) @injection.content + (#set! injection.language "sql")) + diff --git a/runtime/queries/scala/textobjects.scm b/runtime/queries/scala/textobjects.scm new file mode 100644 index 000000000000..21286b3ef4f2 --- /dev/null +++ b/runtime/queries/scala/textobjects.scm @@ -0,0 +1,65 @@ +; Function queries + +(function_definition + body: (_) @function.inside) @function.around ; Does not include end marker + +(lambda_expression + (_) @function.inside) @function.around + +; Scala 3 braceless lambda +(colon_argument + (_) @function.inside) @function.around + + +; Class queries + +(object_definition + body: (_)? @class.inside) @class.around + +(class_definition + body: (_)? @class.inside) @class.around + +(trait_definition + body: (_)? @class.inside) @class.around + +(type_definition) @class.around + +(enum_case_definitions) @class.around + +(enum_definition + body: (_)? @class.inside) @class.around + + +; Parameter queries + +(parameters + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(class_parameters + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(parameter_types + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(bindings + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +; Does not match context bounds or higher-kinded types +(type_parameters + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(arguments + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(type_arguments + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + + +; Comment queries + +[(comment) (block_comment)] @comment.inside +[(comment) (block_comment)] @comment.around ; Does not match consecutive block comments + + +; Test queries +; Not supported diff --git a/runtime/queries/slint/highlights.scm b/runtime/queries/slint/highlights.scm index c0ef3dd889f4..06d82a413ca7 100644 --- a/runtime/queries/slint/highlights.scm +++ b/runtime/queries/slint/highlights.scm @@ -1,122 +1,109 @@ +(comment) @comment + +; Different types: +(string_value) @string +(bool_value) @constant.builtin.boolean + +; Constants + +(escape_sequence) @constant.character.escape + +(color_value) @constant -(identifier) @variable [ - (type_identifier) - (units) -]@type + (children_identifier) + (easing_kind_identifier) +] @constant.builtin -(array_literal - (identifier) @type) +[ + (int_value) + (physical_length_value) +] @constant.numeric.integer -(function_identifier) @function [ - (image_macro) - (children_macro) - (radial_grad_macro) - (linear_grad_macro) -] @function.macro + (float_value) + (percent_value) + (length_value) + (duration_value) + (angle_value) + (relative_font_size_value) +] @constant.numeric.float -(call_expression - function: (identifier) @function) -(call_expression - function: (field_expression - field: (identifier) @function)) +(purity) @keyword.storage.modifier -(vis) @keyword.control.import +(function_visibility) @keyword.storage.modifier -(transition_statement state: (identifier) @variable.other.member) -(state_expression state: (identifier) @variable.other.member) -(struct_block_definition field: (identifier) @variable.other.member) -(assign_property (identifier) @attribute) +(property_visibility) @keyword.storage.modifier -(comment) @comment +(builtin_type_identifier) @type.builtin -(string_literal) @string -(int_literal) @constant.numeric.integer -(float_literal) @constant.numeric.float +(reference_identifier) @variable.builtin -[ - "in" - "in-out" - "for" -] @keyword.control.repeat +(type + [ + (type_list) + (user_type_identifier) + (anon_struct_block) + ]) @type -[ - "import" - "export" - "from" -] @keyword.control.import +(user_type_identifier) @type -[ - "if" - "else" - "when" -] @keyword.control.conditional +; Functions and callbacks +(argument) @variable.parameter -[ - "struct" - "property" -] @keyword.storage.type +(function_call + name: (_) @function.call) -[ - "global" -] @keyword.storage.modifier +; definitions +(callback + name: (_) @function) +(callback_alias + name: (_) @function) -[ - "root" - "parent" - "duration" - "easing" -] @variable.builtin +(callback_event + name: (simple_identifier) @function.call) +(enum_definition + name: (_) @type.enum) -[ - "callback" - "animate" - "states" - "out" - "transitions" - "component" - "inherits" -] @keyword +(function_definition + name: (_) @function) -[ - "black" - "transparent" - "blue" - "ease" - "ease_in" - "ease-in" - "ease_in_out" - "ease-in-out" - "ease_out" - "ease-out" - "end" - "green" - "red" - "start" - "yellow" - "white" - "gray" - ] @constant.builtin +(struct_definition + name: (_) @type) + +(typed_identifier + type: (_) @type) + +; Operators +(binary_expression + op: (_) @operator) + +(unary_expression + op: (_) @operator) [ - "true" - "false" -] @constant.builtin.boolean + (comparison_operator) + (mult_prec_operator) + (add_prec_operator) + (unary_prec_operator) + (assignment_prec_operator) +] @operator -"@" @keyword +[ + ":=" + "=>" + "->" + "<=>" +] @operator -; ; Punctuation [ - "," - "." ";" - ":" + "." + "," ] @punctuation.delimiter -; ; Brackets [ "(" ")" @@ -126,46 +113,136 @@ "}" ] @punctuation.bracket -(define_property ["<" ">"] @punctuation.bracket) +(property + [ + "<" + ">" + ] @punctuation.bracket) -[ - "angle" - "bool" - "brush" - "color" - "duration" - "easing" - "float" - "image" - "int" - "length" - "percent" - "physical-length" - "physical_length" - "string" -] @type.builtin +; Properties, constants and variables +(component + id: (simple_identifier) @constant) + +(property + name: (simple_identifier) @variable) + +(binding_alias + name: (simple_identifier) @variable) + +(binding + name: (simple_identifier) @variable) + +(struct_block + (simple_identifier) @variable.other.member) + +(anon_struct_block + (simple_identifier) @variable.other.member) + +(property_assignment + property: (simple_identifier) @variable) + +(states_definition + name: (simple_identifier) @variable) + +(callback + name: (simple_identifier) @variable) + +(typed_identifier + name: (_) @variable) + +(simple_indexed_identifier + (simple_identifier) @variable) + +(expression + (simple_identifier) @variable) +; Attributes [ - ":=" - "<=>" - "!" - "-" - "+" - "*" - "/" - "&&" - "||" - ">" - "<" - ">=" - "<=" - "=" - ":" - "+=" - "-=" - "*=" - "/=" - "?" - "=>" ] @operator - -(ternary_expression [":" "?"] @keyword.control.conditional) \ No newline at end of file + (linear_gradient_identifier) + (radial_gradient_identifier) + (radial_gradient_kind) +] @attribute + +(image_call + "@image-url" @attribute) + +(tr + "@tr" @attribute) + +; Keywords +(animate_option_identifier) @keyword + +(export) @keyword.control.import + +(if_statement + "if" @keyword.control.conditional) + +(if_expr + [ + "if" + "else" + ] @keyword.control.conditional) + +(ternary_expression + [ + "?" + ":" + ] @keyword.control.conditional) + +(animate_statement + "animate" @keyword) + +(callback + "callback" @keyword.function) + +(component_definition + [ + "component" + "inherits" + ] @keyword.storage.type) + +(enum_definition + "enum" @keyword.storage.type) + +(for_loop + [ + "for" + "in" + ] @keyword.control.repeat) + +(function_definition + "function" @keyword.function) + +(global_definition + "global" @keyword.storage.type) + +(imperative_block + "return" @keyword.control.return) + +(import_statement + [ + "import" + "from" + ] @keyword.control.import) + +(import_type + "as" @keyword.control.import) + +(property + "property" @keyword.storage.type) + +(states_definition + [ + "states" + "when" + ] @keyword) + +(struct_definition + "struct" @keyword.storage.type) + +(transitions_definition + [ + "transitions" + "in" + "out" + ] @keyword) diff --git a/runtime/queries/slint/indents.scm b/runtime/queries/slint/indents.scm index 4b5ce41b8530..189f8a0e5ac4 100644 --- a/runtime/queries/slint/indents.scm +++ b/runtime/queries/slint/indents.scm @@ -1,12 +1,11 @@ [ - (comp_body) - (state_statement) - (transition_statement) - (handler_body) - (consequence_body) - (global_single) + (anon_struct_block) + (assignment_block) + (block) + (enum_block) + (global_block) + (imperative_block) + (struct_block) ] @indent -[ - "}" -] @outdent +"}" @outdent diff --git a/runtime/queries/slint/locals.scm b/runtime/queries/slint/locals.scm index a115f0c69f4e..06601b05dabc 100644 --- a/runtime/queries/slint/locals.scm +++ b/runtime/queries/slint/locals.scm @@ -1,3 +1,6 @@ -; locals.scm - -(component_item) @local.scope +[ + (component) + (component_definition) + (function_definition) + (imperative_block) +] @local.scope diff --git a/runtime/queries/slint/textobjects.scm b/runtime/queries/slint/textobjects.scm new file mode 100644 index 000000000000..7e2f36096c71 --- /dev/null +++ b/runtime/queries/slint/textobjects.scm @@ -0,0 +1,35 @@ +(function_definition + (imperative_block) @funtion.inside) @function.around + +(callback_event + (imperative_block) @function.inside) @function.around + +(property + (imperative_block) @function.inside) @function.around + +(struct_definition + (struct_block) @class.inside) @class.around + +(enum_definition + (enum_block) @class.inside) @class.around + +(global_definition + (global_block) @class.inside) @class.around + +(component_definition + (block) @class.inside) @class.around + +(component_definition + (block) @class.inside) @class.around + +(comment) @comment.around + +(typed_identifier + name: (_) @parameter.inside) @parameter.around + +(callback + arguments: (_) @parameter.inside) + +(string_value + "\"" . (_) @text.inside . "\"") @text.around + diff --git a/runtime/queries/tact/highlights.scm b/runtime/queries/tact/highlights.scm new file mode 100644 index 000000000000..53bf985b54b8 --- /dev/null +++ b/runtime/queries/tact/highlights.scm @@ -0,0 +1,298 @@ +; See: https://docs.helix-editor.com/master/themes.html#syntax-highlighting +; ------------------------------------------------------------------------- + +; attribute +; --------- + +[ + "@name" + "@interface" +] @attribute + +; comment.line +; ------------ + +((comment) @comment.line + (#match? @comment.line "^//")) + +; comment.block +; ------------- + +(comment) @comment.block + +; function.builtin +; ---------------- + +((identifier) @function.builtin + (#any-of? @function.builtin + "send" "sender" "require" "now" + "myBalance" "myAddress" "newAddress" + "contractAddress" "contractAddressExt" + "emit" "cell" "ton" + "beginString" "beginComment" "beginTailString" "beginStringFromBuilder" "beginCell" "emptyCell" + "randomInt" "random" + "checkSignature" "checkDataSignature" "sha256" + "min" "max" "abs" "pow" + "throw" "dump" "getConfigParam" + "nativeThrowWhen" "nativeThrowUnless" "nativeReserve" + "nativeRandomize" "nativeRandomizeLt" "nativePrepareRandom" "nativeRandom" "nativeRandomInterval") + (#is-not? local)) + +; function.method +; --------------- + +(method_call_expression + name: (identifier) @function.method) + +; function +; -------- + +(func_identifier) @function + +(native_function + name: (identifier) @function) + +(static_function + name: (identifier) @function) + +(static_call_expression + name: (identifier) @function) + +(init_function + "init" @function.method) + +(receive_function + "receive" @function.method) + +(bounced_function + "bounced" @function.method) + +(external_function + "external" @function.method) + +(function + name: (identifier) @function.method) + +; keyword.control.conditional +; --------------------------- + +[ + "if" "else" +] @keyword.control.conditional + +; keyword.control.repeat +; ---------------------- + +[ + "while" "repeat" "do" "until" +] @keyword.control.repeat + +; keyword.control.import +; ---------------------- + +"import" @keyword.control.import + +; keyword.control.return +; ---------------------- + +"return" @keyword.control.return + +; keyword.operator +; ---------------- + +"initOf" @keyword.operator + +; keyword.directive +; ----------------- + +"primitive" @keyword.directive + +; keyword.function +; ---------------- + +[ + "fun" + "native" +] @keyword.function + +; keyword.storage.type +; -------------------- + +[ + "contract" "trait" "struct" "message" "with" + "const" "let" +] @keyword.storage.type + +; keyword.storage.modifier +; ------------------------ + +[ + "get" "mutates" "extends" "virtual" "override" "inline" "abstract" +] @keyword.storage.modifier + +; keyword +; ------- + +[ + "with" + ; "public" ; -- not used, but declared in grammar.ohm + ; "extend" ; -- not used, but declared in grammar.ohm +] @keyword + +; constant.builtin.boolean +; ------------------------ + +(boolean) @constant.builtin.boolean + +; constant.builtin +; ---------------- + +((identifier) @constant.builtin + (#any-of? @constant.builtin + "SendPayGasSeparately" + "SendIgnoreErrors" + "SendDestroyIfZero" + "SendRemainingValue" + "SendRemainingBalance") + (#is-not? local)) + +(null) @constant.builtin + +; constant.numeric.integer +; ------------------------ + +(integer) @constant.numeric.integer + +; constant +; -------- + +(constant + name: (identifier) @constant) + +; string.special.path +; ------------------- + +(import_statement + library: (string) @string.special.path) + +; string +; ------ + +(string) @string + +; type.builtin +; ------------ + +(tlb_serialization + "as" @keyword + type: (identifier) @type.builtin + (#any-of? @type.builtin + "int8" "int16" "int32" "int64" "int128" "int256" "int257" + "uint8" "uint16" "uint32" "uint64" "uint128" "uint256" + "coins" "remaining" "bytes32" "bytes64")) + +((type_identifier) @type.builtin + (#any-of? @type.builtin + "Address" "Bool" "Builder" "Cell" "Int" "Slice" "String" "StringBuilder")) + +(map_type + "map" @type.builtin + "<" @punctuation.bracket + ">" @punctuation.bracket) + +(bounced_type + "bounced" @type.builtin + "<" @punctuation.bracket + ">" @punctuation.bracket) + +((identifier) @type.builtin + (#eq? @type.builtin "SendParameters") + (#is-not? local)) + +; type +; ---- + +(type_identifier) @type + +; constructor +; ----------- + +(instance_expression + name: (identifier) @constructor) + +(initOf + name: (identifier) @constructor) + +; operator +; -------- + +[ + "-" "-=" + "+" "+=" + "*" "*=" + "/" "/=" + "%" "%=" + "=" "==" + "!" "!=" "!!" + "<" "<=" "<<" + ">" ">=" ">>" + "&" "|" + "&&" "||" +] @operator + +; punctuation.bracket +; ------------------- + +[ + "(" ")" + "{" "}" +] @punctuation.bracket + +; punctuation.delimiter +; --------------------- + +[ + ";" + "," + "." + ":" + "?" +] @punctuation.delimiter + +; variable.other.member +; --------------------- + +(field + name: (identifier) @variable.other.member) + +(contract_body + (constant + name: (identifier) @variable.other.member)) + +(trait_body + (constant + name: (identifier) @variable.other.member)) + +(field_access_expression + name: (identifier) @variable.other.member) + +(lvalue (_) (_) @variable.other.member) + +(instance_argument + name: (identifier) @variable.other.member) + +; variable.parameter +; ------------------ + +(parameter + name: (identifier) @variable.parameter) + +; variable.builtin +; ---------------- + +(self) @variable.builtin + +; variable +; -------- + +(identifier) @variable diff --git a/runtime/queries/tact/indents.scm b/runtime/queries/tact/indents.scm new file mode 100644 index 000000000000..62c532b22304 --- /dev/null +++ b/runtime/queries/tact/indents.scm @@ -0,0 +1,38 @@ +; indent +; ------ + +[ + ; (..., ...) + (parameter_list) + (argument_list) + + ; {..., ...} + (instance_argument_list) + + ; {...; ...} + (message_body) + (struct_body) + (contract_body) + (trait_body) + (function_body) + (block_statement) + + ; misc. + (binary_expression) + (return_statement) +] @indent + +; outdent +; ------- + +[ + "}" + ")" + ">" +] @outdent + +; indent.always +; outdent.always +; align +; extend +; extend.prevent-once \ No newline at end of file diff --git a/runtime/queries/tact/injections.scm b/runtime/queries/tact/injections.scm new file mode 100644 index 000000000000..e61db3a56a4d --- /dev/null +++ b/runtime/queries/tact/injections.scm @@ -0,0 +1,5 @@ +; See: https://docs.helix-editor.com/guides/injection.html + +((comment) @injection.content + (#set! injection.language "comment") + (#match? @injection.content "^//")) \ No newline at end of file diff --git a/runtime/queries/tact/locals.scm b/runtime/queries/tact/locals.scm new file mode 100644 index 000000000000..f1b3e8de5fd0 --- /dev/null +++ b/runtime/queries/tact/locals.scm @@ -0,0 +1,35 @@ +; See: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#local-variables + +; Scopes @local.scope +; ------------------------- + +[ + (static_function) + (init_function) + (bounced_function) + (receive_function) + (external_function) + (function) + (block_statement) +] @local.scope + +; Definitions @local.definition +; ------------------------------ + +(let_statement + name: (identifier) @local.definition) + +(parameter + name: (identifier) @local.definition) + +(constant + name: (identifier) @local.definition) + +; References @local.reference +; ----------------------------- + +(self) @local.reference + +(value_expression (identifier) @local.reference) + +(lvalue (identifier) @local.reference) diff --git a/runtime/queries/tact/textobjects.scm b/runtime/queries/tact/textobjects.scm new file mode 100644 index 000000000000..54d07014e06e --- /dev/null +++ b/runtime/queries/tact/textobjects.scm @@ -0,0 +1,58 @@ +; function.inside & around +; ------------------------ + +(static_function + body: (_) @function.inside) @function.around + +(init_function + body: (_) @function.inside) @function.around + +(bounced_function + body: (_) @function.inside) @function.around + +(receive_function + body: (_) @function.inside) @function.around + +(external_function + body: (_) @function.inside) @function.around + +(function + body: (_) @function.inside) @function.around + +; class.inside & around +; --------------------- + +(struct + body: (_) @class.inside) @class.around + +(message + body: (_) @class.inside) @class.around + +(contract + body: (_) @class.inside) @class.around + +; NOTE: Marked as @definition.interface in tags, as it's semantically correct +(trait + body: (_) @class.inside) @class.around + +; parameter.inside & around +; ------------------------- + +(parameter_list + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(argument_list + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(instance_argument_list + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +; comment.inside +; -------------- + +(comment) @comment.inside + +; comment.around +; -------------- + +(comment)+ @comment.around \ No newline at end of file diff --git a/runtime/queries/tsq/highlights.scm b/runtime/queries/tsq/highlights.scm index b59514bc2d44..5ef6bf4c8c1a 100644 --- a/runtime/queries/tsq/highlights.scm +++ b/runtime/queries/tsq/highlights.scm @@ -41,7 +41,7 @@ (capture) @label ((predicate_name) @function - (#match? @function "^#(eq\\?|match\\?|is\\?|is-not\\?|not-same-line\\?|not-kind-eq\\?|set!|select-adjacent!|strip!)$")) + (#any-of? @function "#eq?" "#match?" "#any-of?" "#not-any-of?" "#is?" "#is-not?" "#not-same-line?" "#not-kind-eq?" "#set!" "#select-adjacent!" "#strip!")) (predicate_name) @error (escape_sequence) @constant.character.escape diff --git a/runtime/queries/typst/highlights.scm b/runtime/queries/typst/highlights.scm index b422e05b3e1d..0bbccede0564 100644 --- a/runtime/queries/typst/highlights.scm +++ b/runtime/queries/typst/highlights.scm @@ -55,7 +55,12 @@ ; MARKUP (item "-" @markup.list) (term ["/" ":"] @markup.list) -(heading ["=" "==" "===" "====" "====="] @markup.heading.marker) @markup.heading +(heading "=" @markup.heading.marker) @markup.heading.1 +(heading "==" @markup.heading.marker) @markup.heading.2 +(heading "===" @markup.heading.marker) @markup.heading.3 +(heading "====" @markup.heading.marker) @markup.heading.4 +(heading "=====" @markup.heading.marker) @markup.heading.5 +(heading "======" @markup.heading.marker) @markup.heading.6 (url) @tag (emph) @markup.italic (strong) @markup.bold diff --git a/runtime/queries/typst/injections.scm b/runtime/queries/typst/injections.scm index 8039b4cab7ca..06a25097930e 100644 --- a/runtime/queries/typst/injections.scm +++ b/runtime/queries/typst/injections.scm @@ -3,4 +3,5 @@ (raw_blck lang: (ident) @injection.language - (blob) @injection.content) \ No newline at end of file + (blob) @injection.content) + diff --git a/runtime/queries/unison/highlights.scm b/runtime/queries/unison/highlights.scm index 956dc5824eeb..711779295f26 100644 --- a/runtime/queries/unison/highlights.scm +++ b/runtime/queries/unison/highlights.scm @@ -9,8 +9,6 @@ ;; Keywords [ (kw_forall) - (unique_kw) - (structural_kw) (type_kw) (kw_equals) (do) @@ -51,7 +49,7 @@ (blank_pattern) @variable.builtin ;; Types -(record_field name: (wordy_id) @variable.other.member type: (wordy_id) @type) +(record_field name: (wordy_id) @variable.other.member type: (_) @type) (type_constructor (type_name (wordy_id) @constructor)) (ability_declaration type_name: (wordy_id) @type type_arg: (wordy_id) @variable.parameter) (effect (wordy_id) @special) ;; NOTE: an effect is just like a type, but in signature we special case it @@ -63,7 +61,7 @@ ;; Terms (type_signature term_name: (path)? @variable term_name: (wordy_id) @variable) (type_signature (wordy_id) @type) -(type_signature (delayed (wordy_id)) @type) +(type_signature (term_type(delayed(wordy_id))) @type) (term_definition param: (wordy_id) @variable.parameter) diff --git a/runtime/queries/unison/indents.scm b/runtime/queries/unison/indents.scm new file mode 100644 index 000000000000..6cb15517cfdc --- /dev/null +++ b/runtime/queries/unison/indents.scm @@ -0,0 +1,15 @@ +[ + (term_definition) + (type_declaration) + (pattern) + (tuple_or_parenthesized) + (literal_list) + (tuple_pattern) + (function_application) + (exp_if) + (constructor) + (delay_block) + (type_signature) +] @indent + +[(kw_then) (kw_else) (cases)] @indent.always @extend diff --git a/runtime/queries/vhs/highlights.scm b/runtime/queries/vhs/highlights.scm index 9a2d05cf4635..a7e1af301711 100644 --- a/runtime/queries/vhs/highlights.scm +++ b/runtime/queries/vhs/highlights.scm @@ -1,4 +1,4 @@ -[ +[ "Output" "Backspace" "Down" @@ -15,22 +15,31 @@ "Hide" "Show" ] @keyword -[ "FontFamily" +[ "Shell" + "FontFamily" "FontSize" "Framerate" + "PlaybackSpeed" "Height" "LetterSpacing" "TypingSpeed" "LineHeight" "Padding" "Theme" - "Width" ] @type + "LoopOffset" + "Width" + "BorderRadius" + "Margin" + "MarginFill" + "WindowBar" + "WindowBarSize" + "CursorBlink" ] @type [ "@" ] @operator (control) @function.macro (float) @constant.numeric.float (integer) @constant.numeric.integer (comment) @comment -(path) @string.special.path -[(string) (json)] @string -(time) @string.special.symbol \ No newline at end of file +[(path) (string) (json)] @string.special.path +(time) @string.special.symbol +(boolean) @constant.builtin.boolean diff --git a/runtime/themes/cyan_light.toml b/runtime/themes/cyan_light.toml index e18c46a9f6fb..45cb6539dbfb 100644 --- a/runtime/themes/cyan_light.toml +++ b/runtime/themes/cyan_light.toml @@ -95,7 +95,8 @@ "ui.text" = "shade05" "ui.text.focus" = { fg = "shade07", bg = "light_blue" } "ui.virtual" = "shade03" -"ui.virtual.ruler" = { bg = "shade04" } +"ui.virtual.ruler" = { bg = "shade01" } +"ui.virtual.inlay-hint" = { fg = "shade03_darker" } "ui.menu" = { fg = "shade05", bg = "shade01" } "ui.menu.selected" = { fg = "shade07", bg = "light_blue" } @@ -119,6 +120,9 @@ shade05 = "#434b6c" shade06 = "#343a54" shade07 = "#25293c" +shade03_darker = "#9199bb" +shade04_lighter = "#616d9d" + background = "#f2f3f7" foreground = "#25293c" @@ -133,7 +137,6 @@ blue = "#0073E6" dark_blue = "#185b93" darker_blue = "#000080" - purple = "#660E7A" light_purple = "#ED9CFF" @@ -142,7 +145,6 @@ green = "#00733B" light_green = "#5DCE87" green_blue = "#458383" - yellow = "#808000" dark_yellow = "#7A7A43" diff --git a/runtime/themes/dracula.toml b/runtime/themes/dracula.toml index 1253544f2ed3..1473c1a835e5 100644 --- a/runtime/themes/dracula.toml +++ b/runtime/themes/dracula.toml @@ -60,10 +60,10 @@ "diff.minus" = { fg = "red" } "ui.background" = { fg = "foreground", bg = "background" } "ui.cursor.match" = { fg = "foreground", bg = "grey" } -"ui.cursor" = { fg = "background", bg = "purple", modifiers = ["dim"] } -"ui.cursor.normal" = { fg = "background", bg = "purple", modifiers = ["dim"] } -"ui.cursor.insert" = { fg = "background", bg = "green", modifiers = ["dim"] } -"ui.cursor.select" = { fg = "background", bg = "cyan", modifiers = ["dim"] } +"ui.cursor" = { fg = "background", bg = "purple", modifiers = ["dim"] } +"ui.cursor.normal" = { fg = "background", bg = "purple", modifiers = ["dim"] } +"ui.cursor.insert" = { fg = "background", bg = "green", modifiers = ["dim"] } +"ui.cursor.select" = { fg = "background", bg = "cyan", modifiers = ["dim"] } "ui.cursor.primary.normal" = { fg = "background", bg = "purple" } "ui.cursor.primary.insert" = { fg = "background", bg = "green" } "ui.cursor.primary.select" = { fg = "background", bg = "cyan" } @@ -74,16 +74,16 @@ "ui.linenr" = { fg = "comment" } "ui.linenr.selected" = { fg = "foreground" } "ui.menu" = { fg = "foreground", bg = "current_line" } -"ui.menu.selected" = { fg = "current_line", bg = "purple", modifiers = ["dim"] } +"ui.menu.selected" = { fg = "current_line", bg = "purple", modifiers = ["dim"] } "ui.menu.scroll" = { fg = "foreground", bg = "current_line" } "ui.popup" = { fg = "foreground", bg = "black" } "ui.selection.primary" = { bg = "current_line" } "ui.selection" = { bg = "selection" } "ui.statusline" = { fg = "foreground", bg = "darker" } "ui.statusline.inactive" = { fg = "comment", bg = "darker" } -"ui.statusline.normal" = { fg = "black", bg = "purple" } -"ui.statusline.insert" = { fg = "black", bg = "green" } -"ui.statusline.select" = { fg = "black", bg = "cyan" } +"ui.statusline.normal" = { fg = "black", bg = "purple", modifiers = ["bold"] } +"ui.statusline.insert" = { fg = "black", bg = "green", modifiers = ["bold"] } +"ui.statusline.select" = { fg = "black", bg = "cyan", modifiers = ["bold"] } "ui.text" = { fg = "foreground" } "ui.text.focus" = { fg = "cyan" } "ui.window" = { fg = "foreground" } @@ -133,3 +133,4 @@ green = "#50fa7b" purple = "#BD93F9" cyan = "#8be9fd" pink = "#ff79c6" + diff --git a/runtime/themes/github_dark.toml b/runtime/themes/github_dark.toml index 4f9aa562b0ed..6b3354848c97 100644 --- a/runtime/themes/github_dark.toml +++ b/runtime/themes/github_dark.toml @@ -60,6 +60,7 @@ label = "scale.red.3" "ui.text.focus" = { fg = "fg.default" } "ui.text.inactive" = "fg.subtle" "ui.virtual" = { fg = "scale.gray.6" } +"ui.virtual.ruler" = { bg = "canvas.subtle" } "ui.selection" = { bg = "scale.blue.8" } "ui.selection.primary" = { bg = "scale.blue.7" } diff --git a/runtime/themes/github_light.toml b/runtime/themes/github_light.toml index 3e2269698339..e6912a987c60 100644 --- a/runtime/themes/github_light.toml +++ b/runtime/themes/github_light.toml @@ -60,6 +60,7 @@ label = "scale.red.5" "ui.text.focus" = { fg = "fg.default" } "ui.text.inactive" = "fg.subtle" "ui.virtual" = { fg = "scale.gray.2" } +"ui.virtual.ruler" = { bg = "canvas.subtle" } "ui.selection" = { bg = "scale.blue.0" } "ui.selection.primary" = { bg = "scale.blue.1" } diff --git a/runtime/themes/gruvbox_light_hard.toml b/runtime/themes/gruvbox_light_hard.toml new file mode 100644 index 000000000000..4a48116ed7d8 --- /dev/null +++ b/runtime/themes/gruvbox_light_hard.toml @@ -0,0 +1,7 @@ +# Author : Twinkle +# The theme uses the gruvbox light palette with hard contrast: github.com/morhetz/gruvbox + +inherits = "gruvbox_light" + +[palette] +bg0 = "#f9f5d7" # main background diff --git a/runtime/themes/gruvbox_light_soft.toml b/runtime/themes/gruvbox_light_soft.toml new file mode 100644 index 000000000000..a29b23734b43 --- /dev/null +++ b/runtime/themes/gruvbox_light_soft.toml @@ -0,0 +1,7 @@ +# Author : Twinkle +# The theme uses the gruvbox light palette with soft contrast: github.com/morhetz/gruvbox + +inherits = "gruvbox_light" + +[palette] +bg0 = "#f2e5bc" # main background diff --git a/runtime/themes/monokai_pro.toml b/runtime/themes/monokai_pro.toml index 57bede94aefb..a898671f64bd 100644 --- a/runtime/themes/monokai_pro.toml +++ b/runtime/themes/monokai_pro.toml @@ -30,6 +30,11 @@ "ui.cursor.match" = { bg = "base4" } "ui.cursorline" = { bg = "base1" } +# bufferline, inlay hints +"ui.bufferline" = { fg = "base6", bg = "base8x0c" } +"ui.bufferline.active" = { fg = "base8", bg = "base4" } +"ui.virtual.inlay-hint" = { fg = "base6" } + # comments, nord3 based lighter color "comment" = { fg = "base5", modifiers = ["italic"] } "ui.linenr" = { fg = "base5" } diff --git a/runtime/themes/monokai_pro_machine.toml b/runtime/themes/monokai_pro_machine.toml index b292e6b02517..519c9ed3999c 100644 --- a/runtime/themes/monokai_pro_machine.toml +++ b/runtime/themes/monokai_pro_machine.toml @@ -27,6 +27,11 @@ "ui.cursor.match" = { bg = "base4" } "ui.cursorline" = { bg = "base1" } +# bufferline, inlay hints +"ui.bufferline" = { fg = "base6", bg = "base8x0c" } +"ui.bufferline.active" = { fg = "base8", bg = "base4" } +"ui.virtual.inlay-hint" = { fg = "base6" } + # comments, nord3 based lighter color "comment" = { fg = "base5", modifiers = ["italic"] } "ui.linenr" = { fg = "base5" } diff --git a/runtime/themes/monokai_pro_octagon.toml b/runtime/themes/monokai_pro_octagon.toml index 3236fc167407..9be6bdc00e2d 100644 --- a/runtime/themes/monokai_pro_octagon.toml +++ b/runtime/themes/monokai_pro_octagon.toml @@ -30,6 +30,11 @@ "ui.cursor.match" = { bg = "base4" } "ui.cursorline" = { bg = "base1" } +# bufferline, inlay hints +"ui.bufferline" = { fg = "base6", bg = "base8x0c" } +"ui.bufferline.active" = { fg = "base8", bg = "base4" } +"ui.virtual.inlay-hint" = { fg = "base6" } + # comments, nord3 based lighter color "comment" = { fg = "base5", modifiers = ["italic"] } "ui.linenr" = { fg = "base5" } diff --git a/runtime/themes/monokai_pro_ristretto.toml b/runtime/themes/monokai_pro_ristretto.toml index f897bddbd66b..d002d13fa949 100644 --- a/runtime/themes/monokai_pro_ristretto.toml +++ b/runtime/themes/monokai_pro_ristretto.toml @@ -27,6 +27,11 @@ "ui.cursor.match" = { bg = "base4" } "ui.cursorline" = { bg = "base1" } +# bufferline, inlay hints +"ui.bufferline" = { fg = "base6", bg = "base8x0c" } +"ui.bufferline.active" = { fg = "base8", bg = "base4" } +"ui.virtual.inlay-hint" = { fg = "base6" } + # comments, nord3 based lighter color "comment" = { fg = "base5", modifiers = ["italic"] } "ui.linenr" = { fg = "base5" } diff --git a/runtime/themes/monokai_pro_spectrum.toml b/runtime/themes/monokai_pro_spectrum.toml index 74533404eb06..3cec2f1a3f10 100644 --- a/runtime/themes/monokai_pro_spectrum.toml +++ b/runtime/themes/monokai_pro_spectrum.toml @@ -27,6 +27,11 @@ "ui.cursor.match" = { bg = "base4" } "ui.cursorline" = { bg = "base1" } +# bufferline, inlay hints +"ui.bufferline" = { fg = "base6", bg = "base8x0c" } +"ui.bufferline.active" = { fg = "base8", bg = "base4" } +"ui.virtual.inlay-hint" = { fg = "base6" } + # comments, nord3 based lighter color "comment" = { fg = "base5", modifiers = ["italic"] } "ui.linenr" = { fg = "base5" } diff --git a/runtime/themes/onedark.toml b/runtime/themes/onedark.toml index 1db2aff862aa..eae11172e809 100644 --- a/runtime/themes/onedark.toml +++ b/runtime/themes/onedark.toml @@ -73,9 +73,13 @@ "ui.statusline" = { fg = "white", bg = "light-black" } "ui.statusline.inactive" = { fg = "light-gray", bg = "light-black" } -"ui.statusline.normal" = { fg = "light-black", bg = "blue" } -"ui.statusline.insert" = { fg = "light-black", bg = "green" } -"ui.statusline.select" = { fg = "light-black", bg = "purple" } +"ui.statusline.normal" = { fg = "light-black", bg = "blue", modifiers = ["bold"] } +"ui.statusline.insert" = { fg = "light-black", bg = "green", modifiers = ["bold"] } +"ui.statusline.select" = { fg = "light-black", bg = "purple", modifiers = ["bold"] } + +"ui.bufferline" = { fg = "light-gray", bg = "light-black" } +"ui.bufferline.active" = { fg = "light-black", bg = "blue", underline = { color = "light-black", style = "line" } } +"ui.bufferline.background" = { bg = "light-black" } "ui.text" = { fg = "white" } "ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] } diff --git a/runtime/themes/onedarker.toml b/runtime/themes/onedarker.toml index 88b2871aba2b..307871445716 100644 --- a/runtime/themes/onedarker.toml +++ b/runtime/themes/onedarker.toml @@ -75,6 +75,11 @@ "ui.statusline.normal" = { fg = "light-black", bg = "purple" } "ui.statusline.insert" = { fg = "light-black", bg = "green" } "ui.statusline.select" = { fg = "light-black", bg = "cyan" } + +"ui.bufferline" = { fg = "light-gray", bg = "light-black" } +"ui.bufferline.active" = { fg = "light-black", bg = "blue", underline = { color = "light-black", style = "line" } } +"ui.bufferline.background" = { bg = "light-black" } + "ui.text" = { fg = "white" } "ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] } diff --git a/runtime/themes/sonokai.toml b/runtime/themes/sonokai.toml index f60bd4dbf55b..c7c9adc06e1f 100644 --- a/runtime/themes/sonokai.toml +++ b/runtime/themes/sonokai.toml @@ -72,6 +72,7 @@ "ui.menu.selected" = { fg = "bg0", bg = "green" } "ui.virtual.whitespace" = { fg = "grey_dim" } "ui.virtual.ruler" = { bg = "grey_dim" } +"ui.virtual.inlay-hint" = { fg = "grey_dim" } info = { fg = 'green', bg = 'bg2' } hint = { fg = 'blue', bg = 'bg2', modifiers = ['bold'] } diff --git a/runtime/themes/term16_dark.toml b/runtime/themes/term16_dark.toml new file mode 100644 index 000000000000..b34a4b4e22f2 --- /dev/null +++ b/runtime/themes/term16_dark.toml @@ -0,0 +1,80 @@ +# Author: dgkf + +"ui.background" = { } +"ui.background.separator" = { fg = "red" } +"ui.cursor" = { fg = "light-gray", modifiers = ["reversed"] } +"ui.cursor.match" = { fg = "light-yellow", modifiers = ["reversed"] } +"ui.cursor.primary" = { fg = "light-gray", modifiers = ["reversed"] } +"ui.cursor.secondary" = { fg = "gray", modifiers = ["reversed"] } +"ui.cursorline.primary" = { bg = "black" } +"ui.gutter" = { } +"ui.gutter.selected" = { bg = "black" } +"ui.help" = { fg = "white", bg = "black" } +"ui.linenr" = { fg = "gray", modifiers = ["bold"] } +"ui.linenr.selected" = { fg = "white", modifiers = ["bold"] } +"ui.menu" = { fg = "light-gray", bg = "gray" } +"ui.menu.selected" = { modifiers = ["reversed"] } +"ui.menu.scroll" = { fg = "light-blue" } +"ui.popup" = { bg = "black" } +"ui.selection" = { bg = "gray" } +"ui.statusline" = { fg = "light-gray", bg = "gray" } +"ui.statusline.inactive" = { bg = "black" } +"ui.virtual" = { bg = "black" } +"ui.virtual.indent-guide" = { fg = "gray" } +"ui.virtual.whitespace" = {} +"ui.virtual.wrap" = { fg = "gray" } +"ui.virtual.inlay-hint" = { fg = "light-gray", modifiers = ["dim", "italic"] } +"ui.virtual.inlay-hint.parameter" = { fg = "yellow", modifiers = ["dim", "italic"] } +"ui.virtual.inlay-hint.type" = { fg = "blue", modifiers = ["dim", "italic"] } +"ui.window" = { fg = "gray", modifiers = ["dim"] } + +"comment" = { fg = "light-gray", modifiers = ["italic", "dim"] } + +"attribute" = "light-yellow" +"constant" = { fg = "light-yellow", modifiers = ["bold", "dim"] } +"constant.numeric" = "light-yellow" +"constant.character.escape" = "light-cyan" +"constructor" = "light-blue" +"function" = "light-blue" +"function.macro" = "light-red" +"function.builtin" = { fg = "light-blue", modifiers = ["bold"] } +"tag" = { fg = "light-magenta", modifiers = ["dim"] } +"type" = "blue" +"type.builtin" = { fg = "blue", modifiers = ["bold"] } +"type.enum.variant" = { fg = "light-magenta", modifiers = ["dim"] } +"string" = "light-green" +"special" = "light-red" +"variable" = "white" +"variable.parameter" = { fg = "light-yellow", modifiers = ["italic"] } +"variable.other.member" = "light-green" +"keyword" = "light-magenta" +"keyword.control.exception" = "light-red" +"keyword.directive" = { fg = "light-yellow", modifiers = ["bold"] } +"keyword.operator" = { fg = "light-blue", modifiers = ["bold"] } +"label" = "light-green" +"namespace" = { fg = "blue", modifiers = ["dim"] } + +"markup.heading" = "light-blue" +"markup.list" = "light-red" +"markup.bold" = { fg = "light-cyan", modifiers = ["bold"] } +"markup.italic" = { fg = "light-blue", modifiers = ["italic"] } +"markup.strikethrough" = { modifiers = ["crossed_out"] } +"markup.link.url" = { fg = "magenta", modifiers = ["dim"] } +"markup.link.text" = "light-magenta" +"markup.quote" = "light-cyan" +"markup.raw" = "light-green" + +"diff.plus" = "light-green" +"diff.delta" = "light-yellow" +"diff.minus" = "light-red" + +"diagnostic.hint" = { underline = { color = "gray", style = "curl" } } +"diagnostic.info" = { underline = { color = "light-cyan", style = "curl" } } +"diagnostic.warning" = { underline = { color = "light-yellow", style = "curl" } } +"diagnostic.error" = { underline = { color = "light-red", style = "curl" } } + +"info" = "light-cyan" +"hint" = { fg = "light-gray", modifiers = ["dim"] } +"debug" = "white" +"warning" = "yellow" +"error" = "light-red" diff --git a/runtime/themes/term16_light.toml b/runtime/themes/term16_light.toml new file mode 100644 index 000000000000..a02784b25072 --- /dev/null +++ b/runtime/themes/term16_light.toml @@ -0,0 +1,85 @@ +# Author: dgkf +# Modified from base16_terminal, Author: NNB + +inherits = "term16_dark" + +"ui.background.separator" = "light-gray" +"ui.cursor" = { fg = "gray", modifiers = ["reversed"] } +"ui.cursor.match" = { fg = "yellow", modifiers = ["reversed"] } +"ui.cursor.primary" = { fg = "black", modifiers = ["reversed"] } +"ui.cursor.secondary" = { fg = "gray", modifiers = ["reversed"] } +"ui.cursorline.primary" = { bg = "white" } +"ui.cursorline.secondary" = { bg = "white" } +"ui.cursorcolumn.primary" = { bg = "white" } +"ui.cursorcolumn.secondary" = { bg = "white" } +"ui.gutter" = { } +"ui.gutter.selected" = { bg = "white" } +"ui.linenr" = { fg = "gray", modifiers = ["dim"] } +"ui.linenr.selected" = { fg = "black", modifiers = ["bold"] } +"ui.menu" = { bg = "light-gray" } +"ui.menu.selected" = { fg = "white", bg = "gray", modifiers = ["bold"] } +"ui.menu.scroll" = { fg = "light-blue" } +"ui.help" = { } +"ui.text" = { } +"ui.text.focus" = { } +"ui.popup" = { bg = "white" } +"ui.selection" = { bg = "light-gray" } +"ui.statusline" = { bg = "white" } +"ui.statusline.inactive" = { fg = "gray", modifiers = ["underlined"] } +"ui.statusline.insert" = { fg = "white", bg = "blue" } +"ui.statusline.select" = { fg = "white", bg = "magenta" } +"ui.virtual" = { fg = "light-gray" } +"ui.virtual.indent-guide" = { fg = "light-gray", modifiers = ["dim"] } +"ui.virtual.ruler" = { bg = "white" } +"ui.virtual.wrap" = { fg = "light-gray" } +"ui.window" = { fg = "gray", modifiers = ["dim"] } + +"comment" = { fg = "gray", modifiers = ["italic", "dim"] } + +"attribute" = "yellow" +"constant" = { fg = "yellow", modifiers = ["bold"] } +"constant.numeric" = { fg = "yellow", modifiers = ["bold"] } +"constant.character.escape" = "blue" +"constructor" = "blue" +"function" = "blue" +"function.builtin" = { fg = "blue", modifiers = ["bold"] } +"tag" = { fg = "magenta", modifiers = ["dim"] } +"type" = "blue" +"type.builtin" = { fg = "blue", modifiers = ["bold"] } +"type.enum.variant" = { fg = "magenta", modifiers = ["dim"] } +"string" = "green" +"special" = "red" +"variable" = { fg = "black", modifiers = ["dim"] } +"variable.parameter" = { fg = "red", modifiers = ["italic", "dim"] } +"variable.other.member" = "green" +"keyword" = "magenta" +"keyword.control.exception" = "red" +"keyword.directive" = { fg = "yellow", modifiers = ["bold"] } +"keyword.operator" = { fg = "blue", modifiers = ["bold"] } +"label" = "red" +"namespace" = { fg = "blue", modifiers = ["dim"] } + +"markup.heading" = { fg = "blue", modifiers = ["bold"] } +"markup.list" = "red" +"markup.bold" = { fg = "cyan", modifiers = ["bold"] } +"markup.italic" = { fg = "blue", modifiers = ["italic"] } +"markup.strikethrough" = { modifiers = ["crossed_out"] } +"markup.link.url" = { fg = "magenta", modifiers = ["dim"] } +"markup.link.text" = { fg = "magenta", modifiers = ["bold"] } +"markup.quote" = "cyan" +"markup.raw" = "blue" + +"diff.plus" = "green" +"diff.delta" = "yellow" +"diff.minus" = "red" + +"diagnostic.hint" = { underline = { color = "cyan", style = "curl" } } +"diagnostic.info" = { underline = { color = "blue", style = "curl" } } +"diagnostic.warning" = { underline = { color = "yellow", style = "curl" } } +"diagnostic.error" = { underline = { color = "red", style = "curl" } } + +"hint" = "cyan" +"info" = "blue" +"debug" = "light-yellow" +"warning" = "yellow" +"error" = "red" diff --git a/runtime/themes/tokyonight.toml b/runtime/themes/tokyonight.toml index cc99689fce7b..95ebd40872cd 100644 --- a/runtime/themes/tokyonight.toml +++ b/runtime/themes/tokyonight.toml @@ -1,75 +1,96 @@ -# Author: Paul Graydon +# Author: Paul Graydon -"comment" = { fg = "comment", modifiers = ["italic"] } -"constant" = { fg = "orange" } +attribute = { fg = "cyan" } +comment = { fg = "comment", modifiers = ["italic"] } +"comment.block.documentation" = { fg = "yellow" } +constant = { fg = "orange" } +"constant.builtin" = { fg = "aqua" } +"constant.character" = { fg = "light-green" } "constant.character.escape" = { fg = "magenta" } -"function" = { fg = "blue", modifiers = ["italic"] } +constructor = { fg = "aqua" } +function = { fg = "blue", modifiers = ["italic"] } +"function.builtin" = { fg = "aqua" } "function.macro" = { fg = "cyan" } -"keyword" = { fg = "cyan", modifiers = ["italic"] } +"function.special" = { fg = "cyan" } +keyword = { fg = "purple", modifiers = ["italic"] } "keyword.control" = { fg = "magenta" } "keyword.control.import" = { fg = "cyan" } -"keyword.operator" = { fg = "turquoise" } -"keyword.function" = { fg = "magenta", modifiers = ["italic"] } -"operator" = { fg = "turquoise" } -"punctuation" = { fg = "turquoise" } -"string" = { fg = "light-green" } -"string.regexp" = { fg = "light-blue" } -"tag" = { fg = "red" } -"type" = { fg = "teal" } -"namespace" = { fg = "blue" } -"variable" = { fg = "white" } +"keyword.control.return" = { fg = "purple", modifiers = ["italic"] } +"keyword.directive" = { fg = "cyan" } +"keyword.function" = { fg = "magenta" } +"keyword.operator" = { fg = "magenta" } +label = { fg = "blue" } +namespace = { fg = "cyan" } +operator = { fg = "turquoise" } +punctuation = { fg = "turquoise" } +special = { fg = "aqua" } +string = { fg = "light-green" } +"string.regexp" = { fg = "light-cyan" } +"string.special" = { fg = "aqua" } +tag = { fg = "magenta" } +type = { fg = "aqua" } +"type.builtin" = { fg = "aqua" } +"type.enum.variant" = { fg = "orange" } +variable = { fg = "fg" } "variable.builtin" = { fg = "red" } "variable.other.member" = { fg = "green" } "variable.parameter" = { fg = "yellow", modifiers = ["italic"] } -"diff.plus" = { fg = "green" } -"diff.delta" = { fg = "orange" } -"diff.minus" = { fg = "red" } +"markup.bold" = { modifiers = ["bold"] } +"markup.heading" = { fg = "blue", modifiers = ["bold"] } +"markup.heading.completion" = { bg = "bg-menu", fg = "fg" } +"markup.heading.hover" = { bg = "fg-selected" } +"markup.italic" = { modifiers = ["italic"] } +"markup.link" = { fg = "blue", underline = { style = "line" } } +"markup.link.label" = { fg = "teal" } +"markup.link.text" = { fg = "teal" } +"markup.link.url" = { underline = { style = "line" } } +"markup.list" = { fg = "orange", modifiers = ["bold"] } +"markup.normal.completion" = { fg = "comment" } +"markup.normal.hover" = { fg = "fg-dark" } +"markup.raw" = { fg = "teal" } +"markup.raw.inline" = { bg = "black", fg = "blue" } +"markup.strikethrough" = { modifiers = ["crossed_out"] } -"ui.background" = { fg = "foreground", bg = "background" } -"ui.cursor" = { modifiers = ["reversed"] } -"ui.cursor.match" = { fg = "orange", modifiers = ["bold"] } -"ui.cursor.primary" = { modifiers = ["reversed"] } -"ui.cursorline.primary" = { bg = "background_menu" } -"ui.help" = { fg = "foreground", bg = "background_menu" } -"ui.linenr" = { fg = "foreground_gutter" } -"ui.linenr.selected" = { fg = "foreground" } -"ui.menu" = { fg = "foreground", bg = "background_menu" } -"ui.menu.selected" = { bg = "background_highlight" } -"ui.popup" = { fg = "foreground", bg = "background_menu" } -"ui.selection" = { bg = "background_highlight" } -"ui.selection.primary" = { bg = "background_highlight" } -"ui.statusline" = { fg = "foreground", bg = "background_menu" } -"ui.statusline.inactive" = { fg = "foreground_gutter", bg = "background_menu" } -"ui.statusline.normal" = { fg = "black", bg = "blue" } -"ui.statusline.insert" = { fg = "black", bg = "green" } -"ui.statusline.select" = { fg = "black", bg = "magenta" } -"ui.text" = { fg = "foreground" } -"ui.text.focus" = { fg = "cyan" } -"ui.virtual.ruler" = { bg = "foreground_gutter" } -"ui.virtual.whitespace" = { fg = "foreground_gutter" } -"ui.virtual.inlay-hint" = { fg = "comment" } -"ui.window" = { fg = "black" } +"diff.delta" = { fg = "change" } +"diff.delta.moved" = { fg = "blue" } +"diff.minus" = { fg = "delete" } +"diff.plus" = { fg = "add" } -"error" = { fg = "red" } -"warning" = { fg = "yellow" } -"info" = { fg = "blue" } -"hint" = { fg = "teal" } -"diagnostic.error" = { underline = { style = "curl", color = "red" } } -"diagnostic.warning" = { underline = { style = "curl", color = "yellow" } } -"diagnostic.info" = { underline = { style = "curl", color = "blue" } } -"diagnostic.hint" = { underline = { style = "curl", color = "teal" } } -"special" = { fg = "orange" } +error = { fg = "error" } +hint = { fg = "hint" } +info = { fg = "info" } +warning = { fg = "yellow" } +"diagnostic.error" = { underline = { style = "curl" } } +"diagnostic.warning" = { underline = { style = "curl" } } +"diagnostic.info" = { underline = { style = "curl" } } +"diagnostic.hint" = { underline = { style = "curl" } } -"markup.heading" = { fg = "cyan", modifiers = ["bold"] } -"markup.list" = { fg = "cyan" } -"markup.bold" = { fg = "orange", modifiers = ["bold"] } -"markup.italic" = { fg = "yellow", modifiers = ["italic"] } -"markup.strikethrough" = { modifiers = ["crossed_out"] } -"markup.link.url" = { fg = "green" } -"markup.link.text" = { fg = "light-gray" } -"markup.quote" = { fg = "yellow", modifiers = ["italic"] } -"markup.raw" = { fg = "cyan" } +"ui.background" = { bg = "bg", fg = "fg" } +"ui.cursor" = { modifiers = ["reversed"] } +"ui.cursor.match" = { fg = "orange", modifiers = ["bold"] } +"ui.cursorline.primary" = { bg = "bg-menu" } +"ui.help" = { bg = "bg-menu", fg = "fg" } +"ui.linenr" = { fg = "fg-gutter" } +"ui.linenr.selected" = { fg = "fg-linenr" } +"ui.menu" = { bg = "bg-menu", fg = "fg" } +"ui.menu.selected" = { bg = "fg-selected" } +"ui.popup" = { bg = "bg-menu", fg = "border-highlight" } +"ui.selection" = { bg = "bg-highlight" } +"ui.selection.primary" = { bg = "bg-highlight" } +"ui.statusline" = { bg = "bg-menu", fg = "fg-dark" } +"ui.statusline.inactive" = { bg = "bg-menu", fg = "fg-gutter" } +"ui.statusline.normal" = { bg = "blue", fg = "bg", modifiers = ["bold"] } +"ui.statusline.insert" = { bg = "light-green", fg = "bg", modifiers = ["bold"] } +"ui.statusline.select" = { bg = "magenta", fg = "bg", modifiers = ["bold"] } +"ui.text" = { bg = "bg", fg = "fg" } +"ui.text.focus" = { bg = "bg-visual" } +"ui.text.inactive" = { fg = "comment", modifiers = ["italic"] } +"ui.text.info" = { bg = "bg-menu", fg = "fg" } +"ui.virtual.ruler" = { bg = "fg-gutter" } +"ui.virtual.whitespace" = { fg = "fg-gutter" } +"ui.virtual.inlay-hint" = { bg = "bg-inlay", fg = "teal" } +"ui.window" = { fg = "border", modifiers = ["bold"] } [palette] red = "#f7768e" @@ -77,20 +98,34 @@ orange = "#ff9e64" yellow = "#e0af68" light-green = "#9ece6a" green = "#73daca" +aqua = "#2ac3de" +teal = "#1abc9c" turquoise = "#89ddff" light-cyan = "#b4f9f8" -teal = "#2ac3de" cyan = "#7dcfff" blue = "#7aa2f7" +purple = "#9d7cd8" magenta = "#bb9af7" -white = "#c0caf5" -light-gray = "#9aa5ce" -parameters = "#cfc9c2" comment = "#565f89" black = "#414868" -foreground = "#a9b1d6" -foreground_highlight = "#c0caf5" -foreground_gutter = "#363b54" -background = "#1a1b26" -background_highlight = "#30374b" -background_menu = "#16161e" + +add = "#449dab" +change = "#6183bb" +delete = "#914c54" + +error = "#db4b4b" +hint = "#1abc9c" +info = "#0db9d7" + +fg = "#c0caf5" +fg-dark = "#a9b1d6" +fg-gutter = "#3b4261" +fg-linenr = "#737aa2" +fg-selected = "#343a55" +border = "#15161e" +border-highlight = "#27a1b9" +bg = "#1a1b26" +bg-inlay = "#1a2b32" +bg-highlight = "#292e42" +bg-menu = "#16161e" +bg-visual = "#283457" diff --git a/runtime/themes/tokyonight_day.toml b/runtime/themes/tokyonight_day.toml new file mode 100644 index 000000000000..54caf8d6fb73 --- /dev/null +++ b/runtime/themes/tokyonight_day.toml @@ -0,0 +1,41 @@ +# Author: Paul Graydon + +inherits = "tokyonight" + +[palette] +red = "#f52a65" +orange = "#b15c00" +yellow = "#8c6c3e" +light-green = "#587539" +green = "#387068" +aqua = "#188092" +teal = "#118c74" +turquoise = "#006a83" +light-cyan = "#2e5857" +cyan = "#007197" +blue = "#2e7de9" +purple = "#7847bd" +magenta = "#9854f1" +comment = "#848cb5" +black = "#a1a6c5" + +add = "#aecde6" +change = "#d6d8e3" +delete = "#dfccd4" + +error = "#c64343" +hint = "#118c74" +info = "#07879d" + +fg = "#3760bf" +fg-dark = "#6172b0" +fg-gutter = "#a8aecb" +fg-linenr = "#68709a" +fg-selected = "#b3b8d1" +border = "#e9e9ed" +border-highlight = "#2496ac" +bg = "#e1e2e7" +bg-inlay = "#acd7eb" +bg-highlight = "#c4c8da" +bg-menu = "#e9e9ec" +bg-visual = "#b6bfe2" diff --git a/runtime/themes/tokyonight_moon.toml b/runtime/themes/tokyonight_moon.toml new file mode 100644 index 000000000000..8468051e6714 --- /dev/null +++ b/runtime/themes/tokyonight_moon.toml @@ -0,0 +1,41 @@ +# Author: Paul Graydon + +inherits = "tokyonight" + +[palette] +red = "#ff757f" +orange = "#ff966c" +yellow = "#ffc777" +light-green = "#c3e88d" +green = "#4fd6be" +aqua = "#65bcff" +teal = "#4fd6be" +turquoise = "#89ddff" +light-cyan = "#b4f9f8" +cyan = "#86e1fc" +blue = "#82aaff" +purple = "#fca7ea" +magenta = "#c099ff" +comment = "#636da6" +black = "#444a73" + +add = "#b8db87" +change = "#7ca1f2" +delete = "#e26a75" + +error = "#c53b53" +hint = "#4fd6be" +info = "#0db9d7" + +fg = "#c8d3f5" +fg-dark = "#828bb8" +fg-gutter = "#3b4261" +fg-linenr = "#737aa2" +fg-selected = "#363c58" +border = "#1b1d2b" +border-highlight = "#589ed7" +bg = "#222436" +bg-inlay = "#273644" +bg-highlight = "#2f334d" +bg-menu = "#1e2030" +bg-visual = "#2d3f76" diff --git a/runtime/themes/tokyonight_storm.toml b/runtime/themes/tokyonight_storm.toml index e82c43409fe6..5ec4a16a008f 100644 --- a/runtime/themes/tokyonight_storm.toml +++ b/runtime/themes/tokyonight_storm.toml @@ -1,8 +1,12 @@ -# Author: Paul Graydon +# Author: Paul Graydon inherits = "tokyonight" [palette] -background = "#24283b" -background_highlight = "#373d5a" -background_menu = "#1f2335" +border = "#1d202f" +bg = "#24283b" +bg-inlay = "#233745" +bg-highlight = "#373d5a" +bg-menu = "#1f2335" +bg-visual = "#2e3c64" +border-highlight = "#29a4bd" diff --git a/runtime/themes/ttox.toml b/runtime/themes/ttox.toml new file mode 100644 index 000000000000..16909cc7013e --- /dev/null +++ b/runtime/themes/ttox.toml @@ -0,0 +1,31 @@ +# Author : Tomas Ruud + +"ui.selection" = { fg = "white", bg = "gray" } +"ui.cursor" = { fg = "black", bg = "light-gray" } +"ui.cursor.primary" = { fg = "black", bg = "light-gray" } +"ui.cursor.match" = { modifiers = ["underlined"] } +"ui.background.separator" = "gray" +"ui.linenr" = "gray" +"ui.linenr.selected" = { fg = "white", bg = "gray" } +"ui.statusline" = { bg = "black", fg = "white" } +"ui.menu" = { fg = "white", bg = "black" } +"ui.menu.selected" = { bg = "light-gray", fg = "black" } +"ui.popup" = { fg = "white", bg = "black" } +"ui.help" = { fg = "white", bg = "black" } +"ui.virtual.ruler" = { underline = { style = "line"} } + +"string" = { bg = "light-green", fg = "black" } +"constant" = { bg = "light-cyan", fg = "black" } +"comment" = { bg = "light-magenta", fg = "black" } + +"diff.plus" = "green" +"diff.minus" = "red" +"diff.delta" = "gray" + +"warning" = { fg = "black", bg = "light-yellow" } +"error" = { fg = "black", bg = "light-red" } +"hint" = { fg = "black", bg = "light-blue" } + +"diagnostic.warning" = { fg = "black", bg = "light-yellow" } +"diagnostic.error" = { fg = "black", bg = "light-red" } +"diagnostic.hint" = { fg = "black", bg = "light-blue" } diff --git a/runtime/themes/voxed.toml b/runtime/themes/voxed.toml new file mode 100644 index 000000000000..e55b46e5d329 --- /dev/null +++ b/runtime/themes/voxed.toml @@ -0,0 +1,102 @@ +attribute = "buff" +keyword = "sglow" +"keyword.directive" = "defineish" +namespace = "blue" +punctuation = "white" +"punctuation.delimiter" = "functionish" +operator = "greenish" +special = "maize" +"variable.other.member" = "bsienna" +variable = "tan" +"variable.parameter" = { fg = "parameters" } +"variable.builtin" = "white" +type = "light-blue" +"type.builtin" = "functionish" +constructor = "typeish" +function = "functionish" +"function.macro" = "blue" +"function.builtin" = "typeish" +tag = "functionish" +comment = "bgrey" +constant = "tan" +"constant.builtin" = "#D38588" +string = "redish" +"constant.numeric" = "functionish" +"constant.character.escape" = "cyan" +label = "yellow" + +"markup.heading" = "functionish" +"markup.list" = "status-two" +"markup.quote" = "tan" +"markup.bold" = { fg = "sglow", modifiers = ["bold"] } +"markup.italic" = { fg = "sglow", modifiers = ["italic"] } +"markup.strikethrough" = { modifiers = ["crossed_out"] } +"markup.link.url" = { fg = "sglow", modifiers = ["underlined"] } +"markup.link.text" = "greenish" +"markup.raw" = "light-grey" + +"diff.plus" = "#7DDF64" +"diff.minus" = "#F22B29" +"diff.delta" = "#6f44f0" + +"ui.background" = { fg = "#25262B", bg="#1f1f21" } +"ui.background.separator" = { fg = "sglow" } +"ui.linenr" = { fg = "light-grey", modifiers = ["italic"] } +"ui.linenr.selected" = { fg = "bpink", modifiers = ["bold"] } +"ui.statusline" = { fg = "black", bg = "light-grey", modifiers = ["bold"] } +"ui.statusline.inactive" = { fg = "black", bg = "bgrey-two" } +"ui.popup" = { fg = "bgrey", bg = "#25262B" } +"ui.window" = { fg = "white" } +"ui.help" = { bg = "#3f4047", fg = "light-grey" } + +"ui.text" = { fg = "white" } +"ui.text.focus" = { fg = "maize", bg = "bgrey" } +"ui.text.inactive" = "bgrey" +"ui.virtual" = { fg = "blue" } +"ui.virtual.ruler" = { bg = "bgrey-two" } +"ui.virtual.indent-guide" = { fg = "bpink" } + +"ui.selection" = { bg = "maize" } +"ui.selection.primary" = { fg = "white", bg = "bgrey" } +"ui.cursor.select" = { bg = "white" } +"ui.cursor.insert" = { bg = "white" } +"ui.cursor.match" = { fg = "#212121", bg = "#6C6999" } +"ui.cursor" = { bg = "bgrey-two", modifiers = ["reversed"] } +"ui.cursorline.primary" = { bg = "white" } +"ui.highlight" = { bg = "white" } +"ui.highlight.frameline" = { bg = "#634450" } +"ui.debug" = { fg = "#634450" } +"ui.debug.breakpoint" = { fg = "bpink" } +"ui.menu" = { fg = "white", bg = "#23232d" } +"ui.menu.selected" = { fg = "white", bg = "bgrey" } +"ui.menu.scroll" = { fg = "white", bg = "white" } + +"diagnostic.hint" = { underline = { color = "maize", style = "curl" } } +"diagnostic.info" = { underline = { color = "sglow", style = "curl" } } +"diagnostic.warning" = { underline = { color = "redish", style = "curl" } } +"diagnostic.error" = { underline = { color = "bpink", style = "curl" } } + +warning = "bpink" +error = "bsienna" +info = "maize" +hint = "tan" + +[palette] +parameters = "#d89182" +defineish = "#71c45c" +buff = "#f0dc82" +tan = "#DAB785" +typeish = "#AAAAA5" +greenish = "#458588" +functionish = "#b784a3" +bsienna = "#D5896F" +bpink = "#FF5964" +maize = "#FFE74C" +bgrey = "#8c8681" +sglow = "#FFCF56" +status = "#15616D" +status-two = "#3879A1" +redish = "#E76B74" +light-grey = "#b7afa8" +bgrey-two = "#706b68" +gruvgreen = "#B8BB26" diff --git a/runtime/themes/zed_onedark.toml b/runtime/themes/zed_onedark.toml index 5fda576f3a59..7ac1e73cc586 100644 --- a/runtime/themes/zed_onedark.toml +++ b/runtime/themes/zed_onedark.toml @@ -4,22 +4,20 @@ "comment" = { fg = "light-gray", modifiers = ["italic"] } "constant" = { fg = "yellow" } "constant.numeric" = { fg = "orange" } -"constant.builtin" = { fg = "orange" } +"constant.builtin" = { fg = "yellow" } "constant.builtin.boolean" = { fg = "yellow" } -"constant.character.escape" = { fg = "orange" } +"constant.character.escape" = { fg = "yellow" } "constructor" = { fg = "blue" } "function" = { fg = "blue" } "function.builtin" = { fg = "blue" } -"function.macro" = { fg = "purple" } +"function.method" = { fg = "blue" } +"function.macro" = { fg = "blue" } "keyword" = { fg = "purple" } -"keyword.control" = { fg = "purple" } -"keyword.control.import" = { fg = "purple" } -"keyword.directive" = { fg = "purple" } "label" = { fg = "ui-text" } "namespace" = { fg = "ui-text" } "operator" = { fg = "ui-text" } -"keyword.operator" = { fg = "purple" } -"special" = { fg = "blue" } +"puncuation" = { fg = "ui-text" } +"special" = { fg = "ui-text" } "string" = { fg = "green" } "type" = { fg = "cyan" } "variable.builtin" = { fg = "orange" } @@ -28,41 +26,43 @@ "markup.heading" = { fg = "red" } "markup.raw.inline" = { fg = "green" } -"markup.bold" = { fg = "orange", modifiers = ["bold"] } +"markup.bold" = { fg = "yellow", modifiers = ["bold"] } "markup.italic" = { fg = "purple", modifiers = ["italic"] } "markup.strikethrough" = { modifiers = ["crossed_out"] } "markup.list" = { fg = "red" } "markup.quote" = { fg = "yellow" } -"markup.link.url" = { fg = "cyan", modifiers = ["underlined"]} +"markup.link.url" = { fg = "cyan", modifiers = ["underlined"] } "markup.link.text" = { fg = "purple" } "diff.plus" = "green" -"diff.delta" = "orange" +"diff.delta" = "yellow" "diff.minus" = "red" -"diagnostic.info".underline = { color = "blue", style = "curl" } -"diagnostic.hint".underline = { color = "green", style = "curl" } -"diagnostic.warning".underline = { color = "yellow", style = "curl" } -"diagnostic.error".underline = { color = "red", style = "curl" } +"diagnostic.info".underline = { color = "blue", style = "curl" } +"diagnostic.hint".underline = { color = "green", style = "curl" } +"diagnostic.warning".underline = { color = "yellow", style = "curl" } +"diagnostic.error".underline = { color = "red", style = "curl" } "info" = { fg = "blue", modifiers = ["bold"] } "hint" = { fg = "green", modifiers = ["bold"] } "warning" = { fg = "yellow", modifiers = ["bold"] } "error" = { fg = "red", modifiers = ["bold"] } "ui.background" = { bg = "ui-text-reversed" } +"ui.gutter" = { bg = "gray" } "ui.virtual" = { fg = "faint-gray" } "ui.virtual.indent-guide" = { fg = "faint-gray" } "ui.virtual.whitespace" = { fg = "light-gray" } "ui.virtual.ruler" = { bg = "gray" } -"ui.virtual.inlay-hint" = { fg = "light-gray" } +"ui.virtual.inlay-hint" = { fg = "blue-gray", modifiers = ["bold"] } "ui.cursor" = { fg = "white", modifiers = ["reversed"] } "ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] } -"ui.cursor.match" = { fg = "blue", modifiers = ["underlined"]} +"ui.cursor.match" = { fg = "blue", modifiers = ["underlined"] } +"ui.cursor.insert" = { fg = "dark-blue" } "ui.selection" = { bg = "faint-gray" } -"ui.selection.primary" = { bg = "gray" } -"ui.cursorline.primary" = { bg = "light-black" } +"ui.selection.primary" = { bg = "#293b5bff" } +"ui.cursorline.primary" = { bg = "gray" } "ui.highlight" = { bg = "gray" } "ui.highlight.frameline" = { bg = "#97202a" } @@ -70,14 +70,14 @@ "ui.linenr" = { fg = "linenr" } "ui.linenr.selected" = { fg = "ui-text" } -"ui.statusline" = { fg = "white", bg = "light-black" } -"ui.statusline.inactive" = { fg = "light-gray", bg = "light-black" } -"ui.statusline.normal" = { fg = "light-black", bg = "blue" } -"ui.statusline.insert" = { fg = "light-black", bg = "green" } -"ui.statusline.select" = { fg = "light-black", bg = "purple" } +"ui.statusline" = { fg = "white", bg = "gray" } +"ui.statusline.inactive" = { fg = "light-gray", bg = "black" } +"ui.statusline.normal" = { fg = "black", bg = "blue" } +"ui.statusline.insert" = { fg = "black", bg = "green" } +"ui.statusline.select" = { fg = "black", bg = "purple" } "ui.text" = { fg = "ui-text" } -"ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] } +"ui.text.focus" = { fg = "white", bg = "gray", modifiers = ["bold"] } "ui.help" = { fg = "white", bg = "gray" } "ui.popup" = { bg = "gray" } @@ -89,22 +89,21 @@ "ui.debug" = { fg = "red" } [palette] - -yellow = "#dac18c" -blue = "#7ca8dd" -red = "#bd7476" -purple = "#9d74b9" -green = "#a0b783" -orange = "#b4926e" -cyan = "#7eb2be" -light-black = "#2e323a" -gray = "#363f4c" -light-gray = "#5c606b" +yellow = "#dfc184ff" +orange = "#bf956aff" +blue = "#73ade9ff" +blue-gray = "#5a6f89ff" +red = "#d07277ff" +purple = "#b477cfff" +green = "#a1c181ff" +cyan = "#6eb4bfff" +gray = "#2f343ebf" +light-gray = "#5d636fff" faint-gray = "#3B4048" -linenr = "#4B5263" +linenr = "#5d636fff" -white = "#a8adb7" -black = "#292c33" +white = "#c8ccd4ff" +black = "#282c33ff" # black and white are used for a lot of the UI text -ui-text = "#a8adb7" #white -ui-text-reversed = "#292c33" #black +ui-text = "#c8ccd4ff" #white +ui-text-reversed = "#282c33ff" #black diff --git a/runtime/themes/zed_onelight.toml b/runtime/themes/zed_onelight.toml index 2b54cd5bec38..086fce34be47 100644 --- a/runtime/themes/zed_onelight.toml +++ b/runtime/themes/zed_onelight.toml @@ -16,9 +16,13 @@ inherits = "zed_onedark" "ui.cursor" = { fg = "dark-blue", modifiers = ["reversed"] } "ui.cursor.primary" = { fg = "dark-blue", modifiers = ["reversed"] } +"ui.cursor.insert" = { fg = "dark-blue" } +"ui.selection.primary" = { bg = "blue-gray" } "ui.cursorline.primary" = { bg = "faint-gray" } +"ui.virtual.inlay-hint" = { fg = "violet", modifiers = ["bold"] } + "ui.statusline" = { fg = "black", bg = "gray" } "ui.statusline.inactive" = { fg = "white", bg = "light-black" } "ui.statusline.normal" = { fg = "white", bg = "blue" } @@ -32,24 +36,26 @@ inherits = "zed_onedark" "ui.window" = { fg = "dark-gray" } [palette] - -yellow = "#dac18c" -blue = "#5185b5" -red = "#bd7476" -dark-blue = "#607bdb" -orange = "#ca7667" -purple = "#a160ac" -green = "#739d60" -gold = "#a8763c" -cyan = "#4b80b2" +yellow = "#dabb7e" +red = "#d36151ff" +orange = "#d3604fff" +blue = "#5b79e3ff" +dark-blue = "#4a62db" +purple = "#a449abff" +violet = "#9294beff" +green = "#649f57ff" +gold = "#ad6e25ff" +cyan = "#3882b7ff" light-black = "#2e323a" -gray = "#dcdcdd" +# gray = "#dcdcdd" +gray = "#eaeaed" dark-gray = "#ebebec" -light-gray = "#a6a6aa" +light-gray = "#a2a3a7ff" +blue-gray = "#d9dcea" faint-gray = "#efefef" -linenr = "#4B5263" +linenr = "#b0b1b3" -black = "#404248" -white = "#fafafa" -ui-text = "#404248" -ui-text-reversed = "#fafafa" +black = "#383a41ff" +white = "#fafafaff" +ui-text = "#383a41ff" +ui-text-reversed = "#fafafaff"