diff --git a/.cargo/config.toml b/.cargo/config.toml index 5d6155669a3a..b016eca31aec 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,3 @@ [alias] xtask = "run --package xtask --" -integration-test = "test --features integration --workspace --test integration" +integration-test = "test --features integration --profile integration --workspace --test integration" diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml index c67deb69046f..47fd3fe8a51c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -32,7 +32,7 @@ body: id: helix-log attributes: label: Helix log - description: See `hx -h` for log file path + description: See `hx -h` for log file path. If you can reproduce the issue run `RUST_BACKTRACE=1 hx -vv` to generate a more detailed log file. value: |
~/.cache/helix/helix.log @@ -61,7 +61,8 @@ body: label: Helix Version description: > Helix version (`hx -V` if using a release, `git describe` if building - from master) - placeholder: "helix 0.6.0 (c0dbd6dc)" + from master). + **Make sure that you are using the [latest helix release](https://github.com/helix-editor/helix/releases) or a newer master build** + placeholder: "helix 22.12 (5eaa6d97)" validations: required: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0d6fcb3e8426..d7d7d47e02ca 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,24 +4,17 @@ on: push: branches: - master + merge_group: schedule: - cron: '00 01 * * *' jobs: check: - name: Check + name: Check (msrv) runs-on: ubuntu-latest - strategy: - matrix: - rust: [stable, msrv] steps: - name: Checkout sources uses: actions/checkout@v3 - - - name: Use MSRV rust toolchain - if: matrix.rust == 'msrv' - run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml - - name: Install stable toolchain uses: helix-editor/rust-toolchain@v1 with: @@ -44,7 +37,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 - uses: Swatinem/rust-cache@v2 @@ -73,7 +66,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 with: components: rustfmt, clippy @@ -98,7 +91,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/cachix.yml b/.github/workflows/cachix.yml index 20035678707a..7d2f734aa20c 100644 --- a/.github/workflows/cachix.yml +++ b/.github/workflows/cachix.yml @@ -14,7 +14,7 @@ jobs: uses: actions/checkout@v3 - name: Install nix - uses: cachix/install-nix-action@v18 + uses: cachix/install-nix-action@v20 - name: Authenticate with Cachix uses: cachix/cachix-action@v12 diff --git a/.github/workflows/msrv-rust-toolchain.toml b/.github/workflows/msrv-rust-toolchain.toml deleted file mode 100644 index b169d31e6f55..000000000000 --- a/.github/workflows/msrv-rust-toolchain.toml +++ /dev/null @@ -1,3 +0,0 @@ -[toolchain] -channel = "1.61.0" -components = ["rustfmt", "rust-src"] diff --git a/Cargo.lock b/Cargo.lock index 066af52830ad..de985bca11eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -51,9 +51,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" [[package]] name = "arc-swap" @@ -61,15 +61,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" -[[package]] -name = "atoi" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" -dependencies = [ - "num-traits", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -84,20 +75,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", -] - -[[package]] -name = "bstr" -version = "1.0.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fca0852af221f458706eb0725c03e4ed6c46af9ac98e6a689d5e634215d594dd" +checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1" dependencies = [ "memchr", "once_cell", @@ -107,9 +87,9 @@ dependencies = [ [[package]] name = "btoi" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97c0869a9faa81f8bbf8102371105d6d0a7b79167a04c340b04ab16892246a11" +checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad" dependencies = [ "num-traits", ] @@ -132,32 +112,17 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" -[[package]] -name = "bytesize" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c58ec36aac5066d5ca17df51b3e70279f5670a72102f5752cb7e7c856adfc70" - [[package]] name = "cassowary" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" -[[package]] -name = "castaway" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a17ed5635fc8536268e5d4de1e22e81ac34419e5f052d4d51f4e01dcc263fcc" -dependencies = [ - "rustversion", -] - [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" [[package]] name = "cfg-if" @@ -215,17 +180,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "compact_str" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5138945395949e7dfba09646dc9e766b548ff48e23deb5246890e6b64ae9e1b9" -dependencies = [ - "castaway", - "itoa", - "ryu", -] - [[package]] name = "content_inspector" version = "0.2.4" @@ -250,27 +204,18 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-utils" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" -dependencies = [ - "cfg-if", -] - [[package]] name = "crossterm" -version = "0.25.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" +checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" dependencies = [ "bitflags", "crossterm_winapi", "futures-core", "libc", "mio", - "parking_lot 0.12.1", + "parking_lot", "signal-hook", "signal-hook-mio", "winapi", @@ -329,19 +274,6 @@ dependencies = [ "syn", ] -[[package]] -name = "dashmap" -version = "5.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" -dependencies = [ - "cfg-if", - "hashbrown 0.12.3", - "lock_api", - "once_cell", - "parking_lot_core 0.9.4", -] - [[package]] name = "dirs" version = "4.0.0" @@ -383,6 +315,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "dunce" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" + [[package]] name = "either" version = "1.8.0" @@ -391,9 +329,9 @@ checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ "cfg-if", ] @@ -407,6 +345,27 @@ dependencies = [ "encoding_rs", ] +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "error-code" version = "2.3.1" @@ -455,7 +414,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -540,75 +499,116 @@ dependencies = [ ] [[package]] -name = "git-actor" -version = "0.17.0" +name = "gix" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dabfac58aecb4a38cdd2568de66eb1f0d968fd6726f5a80cb8bea7944ef10cc0" +dependencies = [ + "gix-actor", + "gix-attributes", + "gix-config", + "gix-credentials", + "gix-date", + "gix-diff", + "gix-discover", + "gix-features", + "gix-glob", + "gix-hash", + "gix-hashtable", + "gix-index", + "gix-lock", + "gix-mailmap", + "gix-object", + "gix-odb", + "gix-pack", + "gix-path", + "gix-prompt", + "gix-ref", + "gix-refspec", + "gix-revision", + "gix-sec", + "gix-tempfile", + "gix-traverse", + "gix-url", + "gix-validate", + "gix-worktree", + "log", + "once_cell", + "signal-hook", + "smallvec", + "thiserror", + "unicode-normalization", +] + +[[package]] +name = "gix-actor" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9e5fd7bc63ad527d64584f8d01f99b89c051f5fbb8144b58ae5f812775065cf" +checksum = "dc22b0cdc52237667c301dd7cdc6ead8f8f73c9f824e9942c8ebd6b764f6c0bf" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "git-date", + "gix-date", "itoa", "nom", - "quick-error", + "thiserror", ] [[package]] -name = "git-attributes" -version = "0.8.0" +name = "gix-attributes" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8013dfce47c1e29236d732308933e2c77af5355ec5105755d26faf7764d3f7b" +checksum = "2231a25934a240d0a4b6f4478401c73ee81d8be52de0293eedbc172334abf3e1" dependencies = [ - "bstr 1.0.1", - "compact_str", - "git-features", - "git-glob", - "git-path", - "git-quote", + "bstr", + "gix-features", + "gix-glob", + "gix-path", + "gix-quote", "thiserror", "unicode-bom", ] [[package]] -name = "git-bitmap" -version = "0.2.0" +name = "gix-bitmap" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44304093ac66a0ada1b243c15c3a503a165a1d0f50bec748f4e5a9b84a0d0722" +checksum = "024bca0c7187517bda5ea24ab148c9ca8208dd0c3e2bea88cdb2008f91791a6d" dependencies = [ - "quick-error", + "thiserror", ] [[package]] -name = "git-chunk" -version = "0.4.0" +name = "gix-chunk" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3090baa2f4a3fe488a9b3e31090b83259aaf930bf0634af34c18117274f8f1a8" +checksum = "b0d39583cab06464b8bf73b3f1707458270f0e7383cb24c3c9c1a16e6f792978" dependencies = [ "thiserror", ] [[package]] -name = "git-command" -version = "0.2.1" +name = "gix-command" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "215145cc1686a45bc6f9872b153a0d3f3c40a1b94173a928325e1b53dfa5e2af" +checksum = "b2c6f75c1e0f924de39e750880a6e21307194bb1ab773efe3c7d2d787277f8ab" dependencies = [ - "bstr 1.0.1", + "bstr", ] [[package]] -name = "git-config" -version = "0.15.0" +name = "gix-config" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9da662fd64ac69772158dcf04777da6266f0f36bc9a310b3eb2d805bb696315" +checksum = "52c62e26ce11f607712e4f49a0a192ed87675d30187fd61be070abbd607d12f1" dependencies = [ - "bstr 1.0.1", - "git-config-value", - "git-features", - "git-glob", - "git-path", - "git-ref", - "git-sec", + "bstr", + "gix-config-value", + "gix-features", + "gix-glob", + "gix-path", + "gix-ref", + "gix-sec", "memchr", "nom", "once_cell", @@ -618,135 +618,137 @@ dependencies = [ ] [[package]] -name = "git-config-value" -version = "0.10.0" +name = "gix-config-value" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989a90c1c630513a153c685b4249b96fdf938afc75bf7ef2ae1ccbd3d799f5db" +checksum = "693d4a4ba0531e46fe558459557a5b29fb86c3e4b2666c1c0861d93c7c678331" dependencies = [ "bitflags", - "bstr 1.0.1", - "git-path", + "bstr", + "gix-path", "libc", "thiserror", ] [[package]] -name = "git-credentials" -version = "0.9.0" +name = "gix-credentials" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97cd6bbe001afd6356b35ef13f2a6b0f0abc0133d1b2ecaec1033bdd769616d6" +checksum = "5be32b5fe339a31b8e53fa854081dc914c45020dcb64637f3c21baf69c96fc1b" dependencies = [ - "bstr 1.0.1", - "git-command", - "git-config-value", - "git-path", - "git-prompt", - "git-sec", - "git-url", + "bstr", + "gix-command", + "gix-config-value", + "gix-path", + "gix-prompt", + "gix-sec", + "gix-url", "thiserror", ] [[package]] -name = "git-date" -version = "0.4.0" +name = "gix-date" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "412c9b89026505bd24d5f8acafa578de6eea3b271ece307a73b8e646e671302a" +checksum = "b96271912ce39822501616f177dea7218784e6c63be90d5f36322ff3a722aae2" dependencies = [ - "bstr 1.0.1", + "bstr", "itoa", "thiserror", "time", ] [[package]] -name = "git-diff" -version = "0.26.0" +name = "gix-diff" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca87474422d26d606d04cec6bedfabcd92a0a74102cd7936785358ced6a4a25a" +checksum = "585b0834d4b6791a848637c4e109545fda9b0f29b591ba55edb33ceda6e7856b" dependencies = [ - "git-hash", - "git-object", + "gix-hash", + "gix-object", "imara-diff", "thiserror", ] [[package]] -name = "git-discover" -version = "0.12.0" +name = "gix-discover" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9e26e0bc434643228cd418185bd28ca5c7cf831bde1da434807391c27ac40e" +checksum = "91c204adba5ebd211c74735cbb65817d277e154486bac0dffa3701f163b80350" dependencies = [ - "bstr 1.0.1", - "git-hash", - "git-path", - "git-ref", - "git-sec", + "bstr", + "dunce", + "gix-hash", + "gix-path", + "gix-ref", + "gix-sec", "thiserror", ] [[package]] -name = "git-features" -version = "0.26.0" +name = "gix-features" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ff74064fa007c5beefa89a64bb72834f32b3c497750a56c79c6802bbdb311f9" +checksum = "5e6a9dfa7b3c1a99315203e8b97f8f99f3bd95731590607abeaa5ca31bc41fe3" dependencies = [ "crc32fast", "flate2", - "git-hash", + "gix-hash", "libc", "once_cell", "prodash", - "quick-error", "sha1_smol", + "thiserror", "walkdir", ] [[package]] -name = "git-glob" -version = "0.5.1" +name = "gix-glob" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3908404c9b76ac7b3f636a104142378d3eaa78623cbc6eb7c7f0651979d48e8a" +checksum = "93e43efd776bc543f46f0fd0ca3d920c37af71a764a16f2aebd89765e9ff2993" dependencies = [ "bitflags", - "bstr 1.0.1", + "bstr", ] [[package]] -name = "git-hash" -version = "0.10.1" +name = "gix-hash" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1532d82bf830532f8d545c5b7b568e311e3593f16cf7ee9dd0ce03c74b12b99d" +checksum = "0c0c5a9f4d621d4f4ea046bb331df5c746ca735b8cae5b234cc2be70ee4dbef0" dependencies = [ "hex", "thiserror", ] [[package]] -name = "git-hashtable" -version = "0.1.0" +name = "gix-hashtable" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c52b625ad8cc360a0b7f426266f21fb07bd49b8f4ccf1b3ca7bc89424db1dec4" +checksum = "9609c1b8f36f12968e6a6098f7cdb52004f7d42d570f47a2d6d7c16612f19acb" dependencies = [ - "git-hash", + "gix-hash", "hashbrown 0.13.2", + "parking_lot", ] [[package]] -name = "git-index" -version = "0.12.1" +name = "gix-index" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "485da97dd4f69c7d9a8dc238cd6f4a726387ffc34573489e8e0d2bee266e3454" +checksum = "c12caf7886c7ba06f2b28835cdc2be1dca86bd047d00299d2d49e707ce1c2616" dependencies = [ - "atoi", "bitflags", - "bstr 1.0.1", + "bstr", + "btoi", "filetime", - "git-bitmap", - "git-features", - "git-hash", - "git-lock", - "git-object", - "git-traverse", + "gix-bitmap", + "gix-features", + "gix-hash", + "gix-lock", + "gix-object", + "gix-traverse", "itoa", "memmap2", "smallvec", @@ -754,39 +756,39 @@ dependencies = [ ] [[package]] -name = "git-lock" -version = "3.0.0" +name = "gix-lock" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e4f05b8a68c3a5dd83a6651c76be384e910fe283072184fdab9d77f87ccec2" +checksum = "66119ff8a4a395d0ea033fef718bc85f8b4f0855874f4ce1e005fc16cfe1f66e" dependencies = [ "fastrand", - "git-tempfile", - "quick-error", + "gix-tempfile", + "thiserror", ] [[package]] -name = "git-mailmap" -version = "0.9.0" +name = "gix-mailmap" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0316b4346f3e162ade368209efb8a609b587793c74aa3b8de0ec01a4f3580120" +checksum = "2b66aea5e52875cd4915f4957a6f4b75831a36981e2ec3f5fad9e370e444fe1a" dependencies = [ - "bstr 1.0.1", - "git-actor", - "quick-error", + "bstr", + "gix-actor", + "thiserror", ] [[package]] -name = "git-object" -version = "0.26.0" +name = "gix-object" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f8563e2d6f524d7053f3106714f99ecdc3adbba2cb7108c09d71a02579f2e19" +checksum = "8df068db9180ee935fbb70504848369e270bdcb576b05c0faa8b9fd3b86fc017" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "git-actor", - "git-features", - "git-hash", - "git-validate", + "gix-actor", + "gix-features", + "gix-hash", + "gix-validate", "hex", "itoa", "nom", @@ -795,260 +797,215 @@ dependencies = [ ] [[package]] -name = "git-odb" -version = "0.40.0" +name = "gix-odb" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616115a0e3daff6e08842758d24547b37a6eb6d0e2eedd95a740c3aaa2750333" +checksum = "e9a5f9e1afbd509761977a2ea02869cedaaba500b4e783deb2e4de5179a55a80" dependencies = [ "arc-swap", - "git-features", - "git-hash", - "git-object", - "git-pack", - "git-path", - "git-quote", - "parking_lot 0.12.1", + "gix-features", + "gix-hash", + "gix-object", + "gix-pack", + "gix-path", + "gix-quote", + "parking_lot", "tempfile", "thiserror", ] [[package]] -name = "git-pack" -version = "0.30.0" +name = "gix-pack" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd16b88f4b66041f41ca510c28bd81c4ee7363c5a544b3d62b4170432965871" +checksum = "e51db84e1459a8022e518d40a8778028d793dbb28e4d35c9a5eaf92658fb0775" dependencies = [ - "bytesize", "clru", - "dashmap", - "git-chunk", - "git-diff", - "git-features", - "git-hash", - "git-hashtable", - "git-object", - "git-path", - "git-tempfile", - "git-traverse", + "gix-chunk", + "gix-diff", + "gix-features", + "gix-hash", + "gix-hashtable", + "gix-object", + "gix-path", + "gix-tempfile", + "gix-traverse", "memmap2", - "parking_lot 0.12.1", + "parking_lot", "smallvec", "thiserror", ] [[package]] -name = "git-path" -version = "0.7.0" +name = "gix-path" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e40e68481a06da243d3f4dfd86a4be39c24eefb535017a862e845140dcdb878a" +checksum = "f6c104a66dec149cb8f7aaafc6ab797654cf82d67f050fd0cb7e7294e328354b" dependencies = [ - "bstr 1.0.1", + "bstr", "thiserror", ] [[package]] -name = "git-prompt" -version = "0.3.0" +name = "gix-prompt" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3612a486e507dd431ef0f7108eeaafc8fd1ed7bd0f205a88554f6f91fe5dccbf" +checksum = "a20cebf73229debaa82574c4fd20dcaf00fa8d4bfce823a862c4e990d7a0b5b4" dependencies = [ - "git-command", - "git-config-value", + "gix-command", + "gix-config-value", "nix", - "parking_lot 0.12.1", + "parking_lot", "thiserror", ] [[package]] -name = "git-quote" -version = "0.4.0" +name = "gix-quote" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd11f4e7f251ab297545faa4c5a4517f4985a43b9c16bf96fa49107f58e837f" +checksum = "a282f5a8d9ee0b09ec47390ac727350c48f2f5c76d803cd8da6b3e7ad56e0bcb" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "quick-error", + "thiserror", ] [[package]] -name = "git-ref" -version = "0.23.0" +name = "gix-ref" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6767925a6fc4af5c5a81e348d1d851c1b3ab2b512bd7f562ac11be37c14468" +checksum = "90a0ed29e581f04b904ecd0c32b11f33b8209b5a0af9c43f415249a4f2fba632" dependencies = [ - "git-actor", - "git-features", - "git-hash", - "git-lock", - "git-object", - "git-path", - "git-tempfile", - "git-validate", + "gix-actor", + "gix-features", + "gix-hash", + "gix-lock", + "gix-object", + "gix-path", + "gix-tempfile", + "gix-validate", "memmap2", "nom", "thiserror", ] [[package]] -name = "git-refspec" -version = "0.7.0" +name = "gix-refspec" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddf310ed5f2829ac0af96e7d4aebd4ae4b89f0718a7ae3666d09b02b2c5a1dfd" +checksum = "aba332462bda2e8efeae4302b39a6ed01ad56ef772fd5b7ef197cf2798294d65" dependencies = [ - "bstr 1.0.1", - "git-hash", - "git-revision", - "git-validate", + "bstr", + "gix-hash", + "gix-revision", + "gix-validate", "smallvec", "thiserror", ] [[package]] -name = "git-repository" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "993277960cb7e2d3991a11c1ec6951c1d142de052c26a18d2db64304e52d3741" -dependencies = [ - "git-actor", - "git-attributes", - "git-config", - "git-credentials", - "git-date", - "git-diff", - "git-discover", - "git-features", - "git-glob", - "git-hash", - "git-hashtable", - "git-index", - "git-lock", - "git-mailmap", - "git-object", - "git-odb", - "git-pack", - "git-path", - "git-prompt", - "git-ref", - "git-refspec", - "git-revision", - "git-sec", - "git-tempfile", - "git-traverse", - "git-url", - "git-validate", - "git-worktree", - "log", - "once_cell", - "prodash", - "signal-hook", - "smallvec", - "thiserror", - "unicode-normalization", -] - -[[package]] -name = "git-revision" -version = "0.10.0" +name = "gix-revision" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f9a6bd28c9d1676bb96f428cd09614ae18a0087d7cea1cebfd177e25f99b2af" +checksum = "ed98e4a0254953c64bc913bd23146a1de662067d5cf974cbdde396958b39e5b0" dependencies = [ - "bstr 1.0.1", - "git-date", - "git-hash", - "git-hashtable", - "git-object", + "bstr", + "gix-date", + "gix-hash", + "gix-hashtable", + "gix-object", "thiserror", ] [[package]] -name = "git-sec" -version = "0.6.0" +name = "gix-sec" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1802e8252fa223b0ad89a393aed461132174ced1e6842a41f56dc92a3fc14f" +checksum = "e8ffa5bf0772f9b01de501c035b6b084cf9b8bb07dec41e3afc6a17336a65f47" dependencies = [ "bitflags", "dirs", - "git-path", + "gix-path", "libc", "windows", ] [[package]] -name = "git-tempfile" -version = "3.0.0" +name = "gix-tempfile" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6bb4dee86c8cae5a078cfaac3b004ef99c31548ed86218f23a7ff9b4b74f3be" +checksum = "a8e0227bd284cd16105e8479602bb8af6bddcb800427e881c1feee4806310a31" dependencies = [ - "dashmap", "libc", "once_cell", + "parking_lot", "signal-hook", "signal-hook-registry", "tempfile", ] [[package]] -name = "git-traverse" -version = "0.22.0" +name = "gix-traverse" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd356da21ec00f69b9d4f105df4cb85543c746b18f4b7fc81529ce77713cdb29" +checksum = "dd9a4a07bb22168dc79c60e1a6a41919d198187ca83d8a5940ad8d7122a45df3" dependencies = [ - "git-hash", - "git-hashtable", - "git-object", + "gix-hash", + "gix-hashtable", + "gix-object", "thiserror", ] [[package]] -name = "git-url" -version = "0.13.0" +name = "gix-url" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85af407ed0dbb8d8da2a7241827d2fd5681186d9dab3570fc8dd8d6152ec48f" +checksum = "044072b7ce8601b62dcec841b92129f5cc677072823324121b395d766ac5f528" dependencies = [ - "bstr 1.0.1", - "git-features", - "git-path", + "bstr", + "gix-features", + "gix-path", "home", "thiserror", "url", ] [[package]] -name = "git-validate" -version = "0.7.1" +name = "gix-validate" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0431cf9352c596dc7c8ec9066ee551ce54e63c86c3c767e5baf763f6019ff3c2" +checksum = "b69ddb780ea1465255e66818d75b7098371c58dbc9560da4488a44b9f5c7e443" dependencies = [ - "bstr 1.0.1", + "bstr", "thiserror", ] [[package]] -name = "git-worktree" -version = "0.12.0" +name = "gix-worktree" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3bc63878f134e08ed52dba5d82422798c01a3f2e48c38ae9a2f7ff9194f362" +checksum = "b7cb9af6e56152953d8fe113c4f9d7cf60cf7a982362711e9200a255579b49cb" dependencies = [ - "bstr 1.0.1", - "git-attributes", - "git-features", - "git-glob", - "git-hash", - "git-index", - "git-object", - "git-path", + "bstr", + "gix-attributes", + "gix-features", + "gix-glob", + "gix-hash", + "gix-index", + "gix-object", + "gix-path", "io-close", "thiserror", ] [[package]] name = "globset" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" dependencies = [ "aho-corasick", - "bstr 0.2.17", + "bstr", "fnv", "log", "regex", @@ -1056,21 +1013,21 @@ dependencies = [ [[package]] name = "grep-matcher" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d27563c33062cd33003b166ade2bb4fd82db1fd6a86db764dfdad132d46c1cc" +checksum = "3902ca28f26945fe35cad349d776f163981d777fee382ccd6ef451126f51b319" dependencies = [ "memchr", ] [[package]] name = "grep-regex" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1345f8d33c89f2d5b081f2f2a41175adef9fd0bed2fea6a26c96c2deb027e58e" +checksum = "997598b41d53a37a2e3fc5300d5c11d825368c054420a9c65125b8fe1078463f" dependencies = [ "aho-corasick", - "bstr 0.2.17", + "bstr", "grep-matcher", "log", "regex", @@ -1080,11 +1037,11 @@ dependencies = [ [[package]] name = "grep-searcher" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48852bd08f9b4eb3040ecb6d2f4ade224afe880a9a0909c5563cc59fa67932cc" +checksum = "5601c4b9f480f0c9ebb40b1f6cbf447b8a50c5369223937a6c5214368c58779f" dependencies = [ - "bstr 0.2.17", + "bstr", "bytecount", "encoding_rs", "encoding_rs_io", @@ -1182,6 +1139,7 @@ dependencies = [ "futures-util", "helix-core", "helix-loader", + "helix-parsec", "log", "lsp-types", "serde", @@ -1192,6 +1150,10 @@ dependencies = [ "which", ] +[[package]] +name = "helix-parsec" +version = "0.6.0" + [[package]] name = "helix-term" version = "0.6.0" @@ -1239,6 +1201,8 @@ dependencies = [ "crossterm", "helix-core", "helix-view", + "log", + "once_cell", "serde", "termini", "unicode-segmentation", @@ -1248,11 +1212,12 @@ dependencies = [ name = "helix-vcs" version = "0.6.0" dependencies = [ - "git-repository", + "arc-swap", + "gix", "helix-core", "imara-diff", "log", - "parking_lot 0.12.1", + "parking_lot", "tempfile", "tokio", ] @@ -1277,6 +1242,7 @@ dependencies = [ "libc", "log", "once_cell", + "parking_lot", "serde", "serde_json", "slotmap", @@ -1311,12 +1277,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "human_format" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86cce260d758a9aa3d7c4b99d55c815a540f8a37514ba6046ab6be402a157cb0" - [[package]] name = "iana-time-zone" version = "0.1.53" @@ -1353,11 +1313,10 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.18" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d" +checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" dependencies = [ - "crossbeam-utils", "globset", "lazy_static", "log", @@ -1414,6 +1373,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "io-lifetimes" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" +dependencies = [ + "libc", + "windows-sys 0.42.0", +] + [[package]] name = "itoa" version = "1.0.4" @@ -1460,6 +1429,12 @@ dependencies = [ "cc", ] +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + [[package]] name = "lock_api" version = "0.4.9" @@ -1481,9 +1456,9 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.93.2" +version = "0.94.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51" +checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" dependencies = [ "bitflags", "serde", @@ -1531,7 +1506,7 @@ dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1605,20 +1580,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" - -[[package]] -name = "parking_lot" -version = "0.11.2" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "parking_lot" @@ -1627,21 +1591,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -1654,7 +1604,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1686,15 +1636,9 @@ dependencies = [ [[package]] name = "prodash" -version = "23.0.0" +version = "23.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8c414345b4a98cbcd0e8d8829c8f54b47a7ed4fb771c45b7c5c6c0ae23dc4c" -dependencies = [ - "bytesize", - "dashmap", - "human_format", - "parking_lot 0.11.2", -] +checksum = "d73c6b64cb5b99eb63ca97d378685712617ec0172ff5c04cd47a489d3e2c51f8" [[package]] name = "pulldown-cmark" @@ -1707,12 +1651,6 @@ dependencies = [ "unicase", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quickcheck" version = "1.0.3" @@ -1792,30 +1730,29 @@ version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "ropey" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f832915525613e83f275694cb8c184f5df13ca26a9ef0ea6ce736921964c8e" +checksum = "53ce7a2c43a32e50d666e33c5a80251b31147bb4b49024bcab11fb6f20c671ed" dependencies = [ "smallvec", "str_indices", ] [[package]] -name = "rustversion" -version = "1.0.9" +name = "rustix" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" +checksum = "d4fdebc4b395b7fbb9ab11e462e20ed9051e7b16e42d24042c776eca0ac81b03" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.42.0", +] [[package]] name = "ryu" @@ -1866,9 +1803,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "itoa", "ryu", @@ -1903,9 +1840,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "signal-hook" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" dependencies = [ "libc", "signal-hook-registry", @@ -2025,16 +1962,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if", "fastrand", - "libc", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys 0.42.0", ] [[package]] @@ -2068,18 +2004,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" dependencies = [ "proc-macro2", "quote", @@ -2150,9 +2086,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.24.2" +version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a12a59981d9e3c38d216785b0c37399f6e415e8d0712047620f189371b0bb" +checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" dependencies = [ "autocfg", "bytes", @@ -2160,12 +2096,12 @@ dependencies = [ "memchr", "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2181,9 +2117,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", @@ -2192,9 +2128,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "772c1426ab886e7362aedf4abc9c0d1348a979517efedfc25862944d10137af0" +checksum = "f7afcae9e3f0fe2c370fd4657108972cbb2fa9db1b9f84849cefd80741b01cb6" dependencies = [ "serde", "serde_spanned", @@ -2213,9 +2149,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.19.1" +version = "0.19.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90a238ee2e6ede22fb95350acc78e21dc40da00bb66c0334bde83de4ed89424e" +checksum = "5e6a7712b49e1775fb9a7b998de6635b299237f48b404dde71704f2e0e7f37e5" dependencies = [ "indexmap", "nom8", @@ -2227,8 +2163,7 @@ dependencies = [ [[package]] name = "tree-sitter" version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4423c784fe11398ca91e505cdc71356b07b1a924fc8735cfab5333afe3e18bc" +source = "git+https://github.com/tree-sitter/tree-sitter?rev=c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14#c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" dependencies = [ "cc", "regex", @@ -2288,9 +2223,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -2431,17 +2366,17 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.40.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e30acc718a52fb130fec72b1cb5f55ffeeec9253e1b785e94db222178a6acaa1" +checksum = "04662ed0e3e5630dfa9b26e4cb823b817f1a9addda855d973a9458c236556244" dependencies = [ - "windows_aarch64_gnullvm 0.40.0", - "windows_aarch64_msvc 0.40.0", - "windows_i686_gnu 0.40.0", - "windows_i686_msvc 0.40.0", - "windows_x86_64_gnu 0.40.0", - "windows_x86_64_gnullvm 0.40.0", - "windows_x86_64_msvc 0.40.0", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] @@ -2450,98 +2385,80 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ - "windows_aarch64_gnullvm 0.42.0", - "windows_aarch64_msvc 0.42.0", - "windows_i686_gnu 0.42.0", - "windows_i686_msvc 0.42.0", - "windows_x86_64_gnu 0.42.0", - "windows_x86_64_gnullvm 0.42.0", - "windows_x86_64_msvc 0.42.0", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.40.0" +name = "windows-sys" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3caa4a1a16561b714323ca6b0817403738583033a6a92e04c5d10d4ba37ca10" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.0" +name = "windows-targets" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] [[package]] -name = "windows_aarch64_msvc" -version = "0.40.0" +name = "windows_aarch64_gnullvm" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "328973c62dfcc50fb1aaa8e7100676e0b642fe56bac6bafff3327902db843ab4" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" [[package]] name = "windows_aarch64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" [[package]] name = "windows_i686_gnu" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa5b09fad70f0df85dea2ac2a525537e415e2bf63ee31cf9b8e263645ee9f3c1" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" [[package]] name = "windows_i686_msvc" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a1ad4031c1a98491fa195d8d43d7489cb749f135f2e5c4eed58da094bd0d876" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" [[package]] name = "windows_x86_64_gnu" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520ff37edd72da8064b49d2281182898e17f0688ae9f4070bca27e4b5c162ac7" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" [[package]] name = "windows_x86_64_gnullvm" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046e5b82215102c44fd75f488f1b9158973d02aa34d06ed85c23d6f5520a2853" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" [[package]] name = "windows_x86_64_msvc" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0c9c6df55dd1bfa76e131cef44bdd8ec9c819ef3611f04dfe453fd5bfeda28" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" [[package]] name = "xtask" diff --git a/Cargo.toml b/Cargo.toml index ecf6848e04a1..aaa21659ada3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,7 @@ members = [ "helix-dap", "helix-loader", "helix-vcs", + "helix-parsec", "xtask", ] @@ -25,3 +26,12 @@ lto = "fat" codegen-units = 1 # strip = "debuginfo" # TODO: or strip = true opt-level = 3 + +[profile.integration] +inherits = "test" +package.helix-core.opt-level = 2 +package.helix-tui.opt-level = 2 +package.helix-term.opt-level = 2 + +[patch.crates-io] +tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" } diff --git a/README.md b/README.md index 60125e11800b..cba52a7a1e86 100644 --- a/README.md +++ b/README.md @@ -45,91 +45,10 @@ Note: Only certain languages have indentation definitions at the moment. Check # Installation -Packages are available for various distributions (see [Installation docs](https://docs.helix-editor.com/install.html)). - -If you would like to build from source: - -```shell -git clone https://github.com/helix-editor/helix -cd helix -cargo install --locked --path helix-term -``` - -This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`. - -Helix needs its runtime files so make sure to copy/symlink the `runtime/` directory into the -config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows). - -| OS | Command | -| -------------------- | ------------------------------------------------ | -| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` | -| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` | -| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` | - -Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires -elevated privileges - i.e. PowerShell or Cmd must be run as administrator. - -**PowerShell:** - -```powershell -New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime" -``` - -**Cmd:** - -```cmd -cd %appdata%\helix -mklink /D runtime "\runtime" -``` - -The runtime location can be overridden via the `HELIX_RUNTIME` environment variable. - -> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --locked --path helix-term`, -> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`. - -If you plan on keeping the repo locally, an alternative to copying/symlinking -runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime` -(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory). - -Packages already solve this for you by wrapping the `hx` binary with a wrapper -that sets the variable to the install dir. - -> NOTE: running via cargo also doesn't require setting explicit `HELIX_RUNTIME` path, it will automatically -> detect the `runtime` directory in the project root. - -If you want to customize your `languages.toml` config, -tree-sitter grammars may be manually fetched and built with `hx --grammar fetch` and `hx --grammar build`. - -In order to use LSP features like auto-complete, you will need to -[install the appropriate Language Server](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) -for a language. +[Installation documentation](https://docs.helix-editor.com/install.html). [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) -## Adding Helix to your desktop environment - -If installing from source, to use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder: - -```bash -cp contrib/Helix.desktop ~/.local/share/applications -cp contrib/helix.png ~/.local/share/icons -``` - -To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`: - -```bash -sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop -sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop -``` - -## macOS - -Helix can be installed on macOS through homebrew: - -``` -brew install helix -``` - # Contributing Contributing guidelines can be found [here](./docs/CONTRIBUTING.md). diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index eaf0c4f483f1..6e780b87fc19 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -6,13 +6,13 @@ - [Usage](./usage.md) - [Keymap](./keymap.md) - [Commands](./commands.md) - - [Language Support](./lang-support.md) + - [Language support](./lang-support.md) - [Migrating from Vim](./from-vim.md) - [Configuration](./configuration.md) - [Themes](./themes.md) - - [Key Remapping](./remapping.md) + - [Key remapping](./remapping.md) - [Languages](./languages.md) - [Guides](./guides/README.md) - - [Adding Languages](./guides/adding_languages.md) - - [Adding Textobject Queries](./guides/textobject.md) - - [Adding Indent Queries](./guides/indent.md) + - [Adding languages](./guides/adding_languages.md) + - [Adding textobject queries](./guides/textobject.md) + - [Adding indent queries](./guides/indent.md) diff --git a/book/src/commands.md b/book/src/commands.md index d9a113866b2f..047a30a91c83 100644 --- a/book/src/commands.md +++ b/book/src/commands.md @@ -1,5 +1,5 @@ # Commands -Command mode can be activated by pressing `:`, similar to Vim. Built-in commands: +Command mode can be activated by pressing `:`. The built-in commands are: {{#include ./generated/typable-cmd.md}} diff --git a/book/src/configuration.md b/book/src/configuration.md index 528fafd048fa..ec692cab1225 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -2,10 +2,10 @@ To override global configuration parameters, create a `config.toml` file located in your config directory: -* Linux and Mac: `~/.config/helix/config.toml` -* Windows: `%AppData%\helix\config.toml` +- Linux and Mac: `~/.config/helix/config.toml` +- Windows: `%AppData%\helix\config.toml` -> Hint: You can easily open the config file by typing `:config-open` within Helix normal mode. +> 💡 You can easily open the config file by typing `:config-open` within Helix normal mode. Example config: @@ -25,12 +25,10 @@ select = "underline" hidden = false ``` -You may also specify a file to use for configuration with the `-c` or -`--config` CLI argument: `hx -c path/to/custom-config.toml`. - -It is also possible to trigger configuration file reloading by sending the `USR1` -signal to the helix process, e.g. via `pkill -USR1 hx`. This is only supported -on unix operating systems. +You can use a custom configuration file by specifying it with the `-c` or +`--config` command line argument, for example `hx -c path/to/custom-config.toml`. +Additionally, you can reload the configuration file by sending the USR1 +signal to the Helix process on Unix operating systems, such as by using the command `pkill -USR1 hx`. ## Editor @@ -38,25 +36,27 @@ on unix operating systems. | Key | Description | Default | |--|--|---------| -| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling. | `5` | -| `mouse` | Enable mouse mode. | `true` | -| `middle-click-paste` | Middle click paste support. | `true` | -| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | -| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | -| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` | -| `cursorline` | Highlight all lines with a cursor. | `false` | -| `cursorcolumn` | Highlight all columns with a cursor. | `false` | +| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling | `5` | +| `mouse` | Enable mouse mode | `true` | +| `middle-click-paste` | Middle click paste support | `true` | +| `scroll-lines` | Number of lines to scroll per scroll wheel step | `3` | +| `shell` | Shell to use when running external commands | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | +| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers | `absolute` | +| `cursorline` | Highlight all lines with a cursor | `false` | +| `cursorcolumn` | Highlight all columns with a cursor | `false` | | `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | -| `auto-completion` | Enable automatic pop up of auto-completion. | `true` | -| `auto-format` | Enable automatic formatting on save. | `true` | -| `auto-save` | Enable automatic saving on focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal. | `false` | -| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` | +| `auto-completion` | Enable automatic pop up of auto-completion | `true` | +| `auto-format` | Enable automatic formatting on save | `true` | +| `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` | +| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `400` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | -| `auto-info` | Whether to display infoboxes | `true` | -| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` | -| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file. | `[]` | +| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` | +| `auto-info` | Whether to display info boxes | `true` | +| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative | `false` | +| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` | | `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` | | `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` | +| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set | `80` | ### `[editor.statusline]` Section @@ -98,6 +98,7 @@ The following statusline elements can be configured: | `spinner` | A progress spinner indicating LSP activity | | `file-name` | The path/name of the opened file | | `file-base-name` | The basename of the opened file | +| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) | | `file-encoding` | The encoding of the opened file if it differs from UTF-8 | | `file-line-ending` | The file line endings (CRLF or LF) | | `total-line-numbers` | The total line numbers of the opened file | @@ -110,23 +111,30 @@ The following statusline elements can be configured: | `position-percentage` | The cursor position as a percentage of the total number of lines | | `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) | | `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) | +| `version-control` | The current branch name or detached commit hash of the opened workspace | ### `[editor.lsp]` Section | Key | Description | Default | | --- | ----------- | ------- | +| `enable` | Enables LSP integration. Setting to false will completely disable language servers regardless of language settings.| `true` | | `display-messages` | Display LSP progress messages below statusline[^1] | `false` | | `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` | +| `display-inlay-hints` | Display inlay hints[^2] | `false` | | `display-signature-help-docs` | Display docs under signature help popup | `true` | [^1]: By default, a progress spinner is shown in the statusline beside the file path. +[^2]: You may also have to activate them in the LSP config for them to appear, not just in Helix. + Inlay hints in Helix are still being improved on and may be a little bit laggy/janky under some circumstances, please report any bugs you see so we can fix them! ### `[editor.cursor-shape]` Section -Defines the shape of cursor in each mode. Note that due to limitations -of the terminal environment, only the primary cursor can change shape. +Defines the shape of cursor in each mode. Valid values for these options are `block`, `bar`, `underline`, or `hidden`. +> 💡 Due to limitations of the terminal environment, only the primary cursor can +> change shape. + | Key | Description | Default | | --- | ----------- | ------- | | `normal` | Cursor shape in [normal mode][normal mode] | `block` | @@ -139,23 +147,22 @@ Valid values for these options are `block`, `bar`, `underline`, or `hidden`. ### `[editor.file-picker]` Section -Sets options for file picker and global search. All but the last key listed in -the default file-picker configuration below are IgnoreOptions: whether hidden -files and files listed within ignore files are ignored by (not visible in) the -helix file picker and global search. There is also one other key, `max-depth` -available, which is not defined by default. +Set options for file picker and global search. Ignoring a file means it is +not visible in the Helix file picker and global search. All git related options are only enabled in a git repository. | Key | Description | Default | |--|--|---------| -|`hidden` | Enables ignoring hidden files. | true -|`parents` | Enables reading ignore files from parent directories. | true -|`ignore` | Enables reading `.ignore` files. | true -|`git-ignore` | Enables reading `.gitignore` files. | true -|`git-global` | Enables reading global .gitignore, whose path is specified in git's config: `core.excludefile` option. | true -|`git-exclude` | Enables reading `.git/info/exclude` files. | true -|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`. +|`hidden` | Enables ignoring hidden files | true +|`follow-links` | Follow symlinks instead of ignoring them | true +|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | true +|`parents` | Enables reading ignore files from parent directories | true +|`ignore` | Enables reading `.ignore` files | true +|`git-ignore` | Enables reading `.gitignore` files | true +|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludefile` option | true +|`git-exclude` | Enables reading `.git/info/exclude` files | true +|`max-depth` | Set with an integer value for maximum depth to recurse | Defaults to `None`. ### `[editor.auto-pairs]` Section @@ -207,7 +214,7 @@ Search specific options. | Key | Description | Default | |--|--|---------| -| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` | +| `smart-case` | Enable smart case regex searching (case-insensitive unless pattern contains upper case characters) | `true` | | `wrap-around`| Whether the search should wrap after depleting the matches | `true` | ### `[editor.whitespace]` Section @@ -216,7 +223,7 @@ Options for rendering whitespace with visible characters. Use `:set whitespace.r | Key | Description | Default | |-----|-------------|---------| -| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `nbsp`, `tab`, and `newline`. | `"none"` | +| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `nbsp`, `tab`, and `newline` | `"none"` | | `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `newline` or `tabpad` | See example below | Example @@ -244,7 +251,7 @@ Options for rendering vertical indent guides. | Key | Description | Default | | --- | --- | --- | -| `render` | Whether to render indent guides. | `false` | +| `render` | Whether to render indent guides | `false` | | `character` | Literal character to use for rendering the indent guide | `│` | | `skip-levels` | Number of indent levels to skip | `0` | @@ -269,7 +276,7 @@ gutters = ["diff", "diagnostics", "line-numbers", "spacer"] To customize the behavior of gutters, the `[editor.gutters]` section must be used. This section contains top level settings, as well as settings for -specific gutter components as sub-sections. +specific gutter components as subsections. | Key | Description | Default | | --- | --- | --- | @@ -311,14 +318,15 @@ Currently unused ### `[editor.soft-wrap]` Section -Options for soft wrapping lines that exceed the view width +Options for soft wrapping lines that exceed the view width: -| Key | Description | Default | -| --- | --- | --- | -| `enable` | Whether soft wrapping is enabled. | `false` | -| `max-wrap` | Maximum free space left at the end of the line. | `20` | -| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` | -| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` | +| Key | Description | Default | +| --- | --- | --- | +| `enable` | Whether soft wrapping is enabled. | `false` | +| `max-wrap` | Maximum free space left at the end of the line. | `20` | +| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` | +| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` | +| `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` | Example: diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 1711ec36d5bf..48cb66f1f515 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -30,7 +30,7 @@ | eex | ✓ | | | | | ejs | ✓ | | | | | elixir | ✓ | ✓ | ✓ | `elixir-ls` | -| elm | ✓ | | | `elm-language-server` | +| elm | ✓ | ✓ | | `elm-language-server` | | elvish | ✓ | | | `elvish` | | env | ✓ | | | | | erb | ✓ | | | | @@ -38,7 +38,7 @@ | esdl | ✓ | | | | | fish | ✓ | ✓ | ✓ | | | fortran | ✓ | | ✓ | `fortls` | -| gdscript | ✓ | ✓ | | | +| gdscript | ✓ | ✓ | ✓ | | | git-attributes | ✓ | | | | | git-commit | ✓ | | | | | git-config | ✓ | | | | @@ -56,6 +56,7 @@ | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | hcl | ✓ | | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | +| hosts | ✓ | | | | | html | ✓ | | | `vscode-html-language-server` | | idris | | | | `idris2-lsp` | | iex | ✓ | | | | @@ -83,6 +84,8 @@ | mermaid | ✓ | | | | | meson | ✓ | | ✓ | | | mint | | | | `mint` | +| msbuild | ✓ | | ✓ | | +| nasm | ✓ | ✓ | | | | nickel | ✓ | | ✓ | `nls` | | nix | ✓ | | | `nil` | | nu | ✓ | | | | @@ -92,12 +95,16 @@ | openscad | ✓ | | | `openscad-lsp` | | org | ✓ | | | | | pascal | ✓ | ✓ | | `pasls` | +| passwd | ✓ | | | | +| pem | ✓ | | | | | perl | ✓ | ✓ | ✓ | | | php | ✓ | ✓ | ✓ | `intelephense` | +| po | ✓ | ✓ | | | | ponylang | ✓ | ✓ | ✓ | | | prisma | ✓ | | | `prisma-language-server` | | prolog | | | | `swipl` | | protobuf | ✓ | | ✓ | | +| prql | ✓ | | | | | purescript | ✓ | | | `purescript-language-server` | | python | ✓ | ✓ | ✓ | `pylsp` | | qml | ✓ | | ✓ | `qmlls` | @@ -107,6 +114,7 @@ | rescript | ✓ | ✓ | | `rescript-language-server` | | rmarkdown | ✓ | | ✓ | `R` | | ron | ✓ | | ✓ | | +| rst | ✓ | | | | | ruby | ✓ | ✓ | ✓ | `solargraph` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` | | sage | ✓ | ✓ | | | @@ -120,6 +128,7 @@ | sshclientconfig | ✓ | | | | | starlark | ✓ | ✓ | | | | svelte | ✓ | | | `svelteserver` | +| sway | ✓ | ✓ | ✓ | `forc` | | swift | ✓ | | | `sourcekit-lsp` | | tablegen | ✓ | ✓ | ✓ | | | task | ✓ | | | | @@ -130,6 +139,7 @@ | twig | ✓ | | | | | typescript | ✓ | ✓ | ✓ | `typescript-language-server` | | ungrammar | ✓ | | | | +| uxntal | ✓ | | | | | v | ✓ | | | `v` | | vala | ✓ | | | `vala-language-server` | | verilog | ✓ | ✓ | | `svlangserver` | @@ -142,4 +152,5 @@ | xit | ✓ | | | | | xml | ✓ | | ✓ | | | yaml | ✓ | | ✓ | `yaml-language-server` | +| yuck | ✓ | | | | | zig | ✓ | ✓ | ✓ | `zls` | diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index 66e6ac039c26..8b367aad8bc4 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -43,11 +43,13 @@ | `:change-current-directory`, `:cd` | Change the current working directory. | | `:show-directory`, `:pwd` | Show the current working directory. | | `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. | +| `:character-info`, `:char` | Get info about the character under the primary cursor. | | `:reload` | Discard changes and reload from the source file. | | `:reload-all` | Discard changes and reload all documents from the source files. | | `:update` | Write changes only if the file has been modified. | | `:lsp-workspace-command` | Open workspace command picker | | `:lsp-restart` | Restarts the Language Server that is in use by the current doc | +| `:lsp-stop` | Stops the Language Server that is in use by the current doc | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | @@ -58,8 +60,9 @@ | `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. | | `:tutor` | Open the tutorial. | | `:goto`, `:g` | Goto line number. | -| `:set-language`, `:lang` | Set the language of current buffer. | +| `:set-language`, `:lang` | Set the language of current buffer (show current language if no value specified). | | `:set-option`, `:set` | Set a config option at runtime.
For example to disable smart case search, use `:set search.smart-case false`. | +| `:toggle-option`, `:toggle` | Toggle a boolean config option at runtime.
For example to toggle smart case search, use `:toggle search.smart-case`. | | `:get-option`, `:get` | Get the current value of a config option. | | `:sort` | Sort ranges in selection. | | `:rsort` | Sort ranges in selection in reverse order. | @@ -73,3 +76,4 @@ | `:pipe` | Pipe each selection to the shell command. | | `:pipe-to` | Pipe each selection to the shell command, ignoring output. | | `:run-shell-command`, `:sh` | Run a shell command | +| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. | diff --git a/book/src/guides/README.md b/book/src/guides/README.md index e0c44ce7d1f1..c25768e68961 100644 --- a/book/src/guides/README.md +++ b/book/src/guides/README.md @@ -1,4 +1,4 @@ # Guides This section contains guides for adding new language server configurations, -tree-sitter grammars, textobject queries, etc. +tree-sitter grammars, textobject queries, and other similar items. diff --git a/book/src/guides/adding_languages.md b/book/src/guides/adding_languages.md index 6598b9bf7488..b92af4028e9a 100644 --- a/book/src/guides/adding_languages.md +++ b/book/src/guides/adding_languages.md @@ -1,45 +1,52 @@ -# Adding languages +# Adding new languages to Helix + +In order to add a new language to Helix, you will need to follow the steps +below. ## Language configuration -To add a new language, you need to add a `[[language]]` entry to the -`languages.toml` (see the [language configuration section]). +1. Add a new `[[language]]` entry in the `languages.toml` file and provide the + necessary configuration for the new language. For more information on + language configuration, refer to the + [language configuration section](../languages.md) of the documentation. +2. If you are adding a new language or updating an existing language server + configuration, run the command `cargo xtask docgen` to update the + [Language Support](../lang-support.md) documentation. -When adding a new language or Language Server configuration for an existing -language, run `cargo xtask docgen` to add the new configuration to the -[Language Support][lang-support] docs before creating a pull request. -When adding a Language Server configuration, be sure to update the -[Language Server Wiki][install-lsp-wiki] with installation notes. +> 💡 If you are adding a new Language Server configuration, make sure to update +> the +> [Language Server Wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) +> with the installation instructions. ## Grammar configuration -If a tree-sitter grammar is available for the language, add a new `[[grammar]]` -entry to `languages.toml`. - -You may use the `source.path` key rather than `source.git` with an absolute path -to a locally available grammar for testing, but switch to `source.git` before -submitting a pull request. +1. If a tree-sitter grammar is available for the new language, add a new + `[[grammar]]` entry to the `languages.toml` file. +2. If you are testing the grammar locally, you can use the `source.path` key + with an absolute path to the grammar. However, before submitting a pull + request, make sure to switch to using `source.git`. ## Queries -For a language to have syntax-highlighting and indentation among -other things, you have to add queries. Add a directory for your -language with the path `runtime/queries//`. The tree-sitter -[website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries) -gives more info on how to write queries. - -> NOTE: When evaluating queries, the first matching query takes -precedence, which is different from other editors like Neovim where -the last matching query supersedes the ones before it. See -[this issue][neovim-query-precedence] for an example. - -## Common Issues - -- If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars. - -- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/.so` - -[language configuration section]: ../languages.md -[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090 -[install-lsp-wiki]: https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers -[lang-support]: ../lang-support.md +1. In order to provide syntax highlighting and indentation for the new language, + you will need to add queries. +2. Create a new directory for the language with the path + `runtime/queries//`. +3. Refer to the + [tree-sitter website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries) + for more information on writing queries. + +> 💡 In Helix, the first matching query takes precedence when evaluating +> queries, which is different from other editors such as Neovim where the last +> matching query supersedes the ones before it. See +> [this issue](https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090) +> for an example. + +## Common issues + +- If you encounter errors when running Helix after switching branches, you may + need to update the tree-sitter grammars. Run the command `hx --grammar fetch` + to fetch the grammars and `hx --grammar build` to build any out-of-date + grammars. +- If a parser is causing a segfault, or you want to remove it, make sure to + remove the compiled parser located at `runtime/grammars/.so`. diff --git a/book/src/guides/indent.md b/book/src/guides/indent.md index 0e259289781e..b660d785734e 100644 --- a/book/src/guides/indent.md +++ b/book/src/guides/indent.md @@ -1,4 +1,4 @@ -# Adding Indent Queries +# Adding indent queries Helix uses tree-sitter to correctly indent new lines. This requires a tree-sitter grammar and an `indent.scm` query file placed in @@ -36,7 +36,7 @@ changed by using a `#set!` declaration anywhere in the pattern: (#set! "scope" "all")) ``` -## Capture Types +## Capture types - `@indent` (default scope `tail`): Increase the indent level by 1. Multiple occurrences in the same line diff --git a/book/src/guides/textobject.md b/book/src/guides/textobject.md index 8a2173547541..405f11c1b00a 100644 --- a/book/src/guides/textobject.md +++ b/book/src/guides/textobject.md @@ -1,14 +1,14 @@ -# Adding Textobject Queries +# Adding textobject queries -Textobjects that are language specific ([like functions, classes, etc][textobjects]) -require an accompanying tree-sitter grammar and a `textobjects.scm` query file +Helix supports textobjects that are language specific, such as functions, classes, etc. +These textobjects require an accompanying tree-sitter grammar and a `textobjects.scm` query file to work properly. Tree-sitter allows us to query the source code syntax tree and capture specific parts of it. The queries are written in a lisp dialect. More information on how to write queries can be found in the [official tree-sitter documentation][tree-sitter-queries]. Query files should be placed in `runtime/queries/{language}/textobjects.scm` -when contributing. Note that to test the query files locally you should put +when contributing to Helix. Note that to test the query files locally you should put them under your local runtime directory (`~/.config/helix/runtime` on Linux for example). @@ -28,9 +28,9 @@ The following [captures][tree-sitter-captures] are recognized: [Example query files][textobject-examples] can be found in the helix GitHub repository. -## Queries for Textobject Based Navigation +## Queries for textobject based navigation -[Tree-sitter based navigation][textobjects-nav] is done using captures in the +Tree-sitter based navigation in Helix is done using captures in the following order: - `object.movement` @@ -38,12 +38,10 @@ following order: - `object.inside` For example if a `function.around` capture has been already defined for a language -in it's `textobjects.scm` file, function navigation should also work automatically. +in its `textobjects.scm` file, function navigation should also work automatically. `function.movement` should be defined only if the node captured by `function.around` doesn't make sense in a navigation context. -[textobjects]: ../usage.md#textobjects -[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation [tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax [tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes [textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l= diff --git a/book/src/install.md b/book/src/install.md index 792799d7a7f9..bd3f502b6e95 100644 --- a/book/src/install.md +++ b/book/src/install.md @@ -1,179 +1,239 @@ -# Installation - -We provide pre-built binaries on the [GitHub Releases page](https://github.com/helix-editor/helix/releases). +# Installing Helix + + +- [Pre-built binaries](#pre-built-binaries) +- [Linux, macOS, Windows and OpenBSD packaging status](#linux-macos-windows-and-openbsd-packaging-status) +- [Linux](#linux) + - [Ubuntu](#ubuntu) + - [Fedora/RHEL](#fedorarhel) + - [Arch Linux community](#arch-linux-community) + - [NixOS](#nixos) +- [macOS](#macos) + - [Homebrew Core](#homebrew-core) +- [Windows](#windows) + - [Scoop](#scoop) + - [Chocolatey](#chocolatey) + - [MSYS2](#msys2) +- [Building from source](#building-from-source) + - [Configuring Helix's runtime files](#configuring-helixs-runtime-files) + - [Validating the installation](#validating-the-installation) + - [Configure the desktop shortcut](#configure-the-desktop-shortcut) + + +To install Helix, follow the instructions specific to your operating system. +Note that: + +- To get the latest nightly version of Helix, you need to + [build from source](#building-from-source). + +- To take full advantage of Helix, install the language servers for your + preferred programming languages. See the + [wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) + for instructions. + +## Pre-built binaries + +Download pre-built binaries from the +[GitHub Releases page](https://github.com/helix-editor/helix/releases). Add the binary to your system's `$PATH` to use it from the command +line. + +## Linux, macOS, Windows and OpenBSD packaging status + +Helix is available for Linux, macOS and Windows via the official repositories listed below. [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) -## OSX +## Linux -Helix is available in homebrew-core: +The following third party repositories are available: -``` -brew install helix -``` +### Ubuntu -## Linux +Helix is available via [Maveonair's PPA](https://launchpad.net/~maveonair/+archive/ubuntu/helix-editor): -### NixOS +```sh +sudo add-apt-repository ppa:maveonair/helix-editor +sudo apt update +sudo apt install helix +``` -A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in -the project root. The flake can also be used to spin up a reproducible development -shell for working on Helix with `nix develop`. +### Fedora/RHEL -Flake outputs are cached for each push to master using -[Cachix](https://www.cachix.org/). The flake is configured to -automatically make use of this cache assuming the user accepts -the new settings on first use. +Helix is available via `copr`: -If you are using a version of Nix without flakes enabled you can -[install Cachix cli](https://docs.cachix.org/installation); `cachix use helix` will -configure Nix to use cached outputs when possible. +```sh +sudo dnf copr enable varlad/helix +sudo dnf install helix +``` -### Arch Linux +### Arch Linux community -Releases are available in the `community` repository. +Releases are available in the `community` repository: -A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch. +```sh +sudo pacman -S helix +``` +Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available +in the AUR, which builds the master branch. -### Fedora Linux +### NixOS -You can install the COPR package for Helix via +Helix is available as a [flake](https://nixos.wiki/wiki/Flakes) in the project +root. Use `nix develop` to spin up a reproducible development shell. Outputs are +cached for each push to master using [Cachix](https://www.cachix.org/). The +flake is configured to automatically make use of this cache assuming the user +accepts the new settings on first use. -``` -sudo dnf copr enable varlad/helix -sudo dnf install helix -``` +If you are using a version of Nix without flakes enabled, +[install Cachix CLI](https://docs.cachix.org/installation) and use +`cachix use helix` to configure Nix to use cached outputs when possible. + +## macOS -### Void Linux +### Homebrew Core -``` -sudo xbps-install helix +```sh +brew install helix ``` ## Windows -Helix can be installed using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/) +Install on Windows using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/) or [MSYS2](https://msys2.org/). -**Scoop:** +### Scoop -``` +```sh scoop install helix ``` -**Chocolatey:** +### Chocolatey -``` +```sh choco install helix ``` -**MSYS2:** - -Choose the [proper command](https://www.msys2.org/docs/package-naming/) for your system from below: +### MSYS2 - - For 32 bit Windows 7 or above: +For 64-bit Windows 8.1 or above: +```sh +pacman -S mingw-w64-ucrt-x86_64-helix ``` -pacman -S mingw-w64-i686-helix -``` - - - For 64 bit Windows 7 or above: -``` -pacman -S mingw-w64-x86_64-helix -``` +## Building from source - - For 64 bit Windows 8.1 or above: +Clone the repository: -``` -pacman -S mingw-w64-ucrt-x86_64-helix +```sh +git clone https://github.com/helix-editor/helix +cd helix ``` -## Build from source +Compile from source: -``` -git clone https://github.com/helix-editor/helix -cd helix +```sh cargo install --path helix-term --locked ``` -This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`. +This command will create the `hx` executable and construct the tree-sitter +grammars in the local `runtime` folder. To build the tree-sitter grammars requires +a c++ compiler to be installed, for example `gcc-c++`. -If you are using the musl-libc instead of glibc the following environment variable must be set during the build -to ensure tree sitter grammars can be loaded correctly: +> 💡 If you are using the musl-libc instead of glibc the following environment variable must be set during the build +> to ensure tree-sitter grammars can be loaded correctly: +> +> ```sh +> RUSTFLAGS="-C target-feature=-crt-static" +> ``` -``` -RUSTFLAGS="-C target-feature=-crt-static" -``` +> 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch +> grammars with `hx --grammar fetch` (requires `git`) and compile them with +> `hx --grammar build` (requires a C++ compiler). This will install them in +> the `runtime` directory within the user's helix config directory (more +> [details below](#multiple-runtime-directories)). +### Configuring Helix's runtime files -Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the -config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overridden -via the `HELIX_RUNTIME` environment variable. +#### Linux and macOS -| OS | Command | -| -------------------- | ------------------------------------------------ | -| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` | -| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` | -| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` | +Either set the `HELIX_RUNTIME` environment variable to point to the runtime files and add it to your `~/.bashrc` or equivalent: -Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires -elevated privileges - i.e. PowerShell or Cmd must be run as administrator. +```sh +HELIX_RUNTIME=/home/user-name/src/helix/runtime +``` -**PowerShell:** +Or, create a symlink in `~/.config/helix` that links to the source code directory: -```powershell -New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime" +```sh +ln -s $PWD/runtime ~/.config/helix/runtime ``` -**Cmd:** +#### Windows + +Either set the `HELIX_RUNTIME` environment variable to point to the runtime files using the Windows setting (search for +`Edit environment variables for your account`) or use the `setx` command in +Cmd: -```cmd -cd %appdata%\helix -mklink /D runtime "\runtime" +```sh +setx HELIX_RUNTIME "%userprofile%\source\repos\helix\runtime" ``` -The runtime location can be overridden via the `HELIX_RUNTIME` environment variable. +> 💡 `%userprofile%` resolves to your user directory like +> `C:\Users\Your-Name\` for example. -> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --path helix-term --locked`, -> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`. +Or, create a symlink in `%appdata%\helix\` that links to the source code directory: -If you plan on keeping the repo locally, an alternative to copying/symlinking -runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime` -(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory). +| Method | Command | +| ---------- | -------------------------------------------------------------------------------------- | +| PowerShell | `New-Item -ItemType Junction -Target "runtime" -Path "$Env:AppData\helix\runtime"` | +| Cmd | `cd %appdata%\helix`
`mklink /D runtime "%userprofile%\src\helix\runtime"` | -To use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder: +> 💡 On Windows, creating a symbolic link may require running PowerShell or +> Cmd as an administrator. -```bash -cp contrib/Helix.desktop ~/.local/share/applications -``` +#### Multiple runtime directories -To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`: +When Helix finds multiple runtime directories it will search through them for files in the +following order: -```bash -sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop -sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop -``` +1. `runtime/` sibling directory to `$CARGO_MANIFEST_DIR` directory (this is intended for + developing and testing helix only). +2. `runtime/` subdirectory of OS-dependent helix user config directory. +3. `$HELIX_RUNTIME`. +4. `runtime/` subdirectory of path to Helix executable. -Please note: there is no icon for Helix yet, so the system default will be used. +This order also sets the priority for selecting which file will be used if multiple runtime +directories have files with the same name. -## Finishing up the installation +### Validating the installation -To make sure everything is set up as expected you should finally run the helix healthcheck via +To make sure everything is set up as expected you should run the Helix health +check: -``` +```sh hx --health ``` -For more information on the information displayed in the health check results refer to [Healthcheck](https://github.com/helix-editor/helix/wiki/Healthcheck). +For more information on the health check results refer to +[Health check](https://github.com/helix-editor/helix/wiki/Healthcheck). -### Building tree-sitter grammars +### Configure the desktop shortcut -Tree-sitter grammars must be fetched and compiled if not pre-packaged. -Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them -with `hx --grammar build` (requires a C++ compiler). +If your desktop environment supports the +[XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html) +you can configure Helix to show up in the application menu by copying the +provided `.desktop` and icon files to their correct folders: -### Installing language servers +```sh +cp contrib/Helix.desktop ~/.local/share/applications +cp contrib/helix.png ~/.icons # or ~/.local/share/icons +``` + +To use another terminal than the system default, you can modify the `.desktop` +file. For example, to use `kitty`: -Language servers can optionally be installed if you want their features (auto-complete, diagnostics etc.). -Follow the [instructions on the wiki page](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) to add your language servers of choice. +```sh +sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop +sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop +``` diff --git a/book/src/keymap.md b/book/src/keymap.md index 0550e57f3f3b..173728f278db 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -14,14 +14,14 @@ - [Space mode](#space-mode) - [Popup](#popup) - [Unimpaired](#unimpaired) -- [Insert Mode](#insert-mode) -- [Select / extend mode](#select--extend-mode) +- [Insert mode](#insert-mode) +- [Select / extend mode](#select-extend-mode) - [Picker](#picker) - [Prompt](#prompt) > 💡 Mappings marked (**LSP**) require an active language server for the file. -> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the filetype. +> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the file type. ## Normal mode @@ -109,7 +109,7 @@ | Key | Description | Command | | ----- | ----------- | ------- | | `s` | Select all regex matches inside selections | `select_regex` | -| `S` | Split selection into subselections on regex matches | `split_selection` | +| `S` | Split selection into sub selections on regex matches | `split_selection` | | `Alt-s` | Split selection on newlines | `split_selection_on_newline` | | `Alt-_ ` | Merge consecutive selections | `merge_consecutive_selections` | | `&` | Align selection in columns | `align_selections` | @@ -130,7 +130,7 @@ | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | | `Alt-x` | Shrink selection to line bounds (line-wise selection) | `shrink_to_line_bounds` | | `J` | Join lines inside selection | `join_selections` | -| `Alt-J` | Join lines inside selection and select space | `join_selections_space` | +| `Alt-J` | Join lines inside selection and select the inserted space | `join_selections_space` | | `K` | Keep selections matching the regex | `keep_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | @@ -141,7 +141,7 @@ ### Search -Search commands all operate on the `/` register by default. Use `"` to operate on a different one. +Search commands all operate on the `/` register by default. To use a different register, use `"`. | Key | Description | Command | | ----- | ----------- | ------- | @@ -175,9 +175,8 @@ Accessed by typing `z` in [normal mode](#normal-mode). View mode is intended for scrolling and manipulating the view without changing the selection. The "sticky" variant of this mode (accessed by typing `Z` in -normal mode) is persistent; use the Escape key to return to normal mode after -usage (useful when you're simply looking over text and not actively editing -it). +normal mode) is persistent and can be exited using the escape key. This is +useful when you're simply looking over text and not actively editing it. | Key | Description | Command | @@ -225,7 +224,7 @@ Jumps to various locations. Accessed by typing `m` in [normal mode](#normal-mode). See the relevant section in [Usage](./usage.md) for an explanation about -[surround](./usage.md#surround) and [textobject](./usage.md#textobjects) usage. +[surround](./usage.md#surround) and [textobject](./usage.md#navigating-using-tree-sitter-textobjects) usage. | Key | Description | Command | | ----- | ----------- | ------- | @@ -242,7 +241,7 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`). Accessed by typing `Ctrl-w` in [normal mode](#normal-mode). -This layer is similar to Vim keybindings as Kakoune does not support window. +This layer is similar to Vim keybindings as Kakoune does not support windows. | Key | Description | Command | | ----- | ------------- | ------- | @@ -268,30 +267,32 @@ Accessed by typing `Space` in [normal mode](#normal-mode). This layer is a kludge of mappings, mostly pickers. -| Key | Description | Command | -| ----- | ----------- | ------- | -| `f` | Open file picker | `file_picker` | -| `F` | Open file picker at current working directory | `file_picker_in_current_directory` | -| `b` | Open buffer picker | `buffer_picker` | -| `j` | Open jumplist picker | `jumplist_picker` | -| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | -| `s` | Open document symbol picker (**LSP**) | `symbol_picker` | -| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | -| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` | -| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` | -| `r` | Rename symbol (**LSP**) | `rename_symbol` | -| `a` | Apply code action (**LSP**) | `code_action` | -| `'` | Open last fuzzy picker | `last_picker` | -| `w` | Enter [window mode](#window-mode) | N/A | -| `p` | Paste system clipboard after selections | `paste_clipboard_after` | -| `P` | Paste system clipboard before selections | `paste_clipboard_before` | -| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` | -| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | -| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | -| `/` | Global search in workspace folder | `global_search` | -| `?` | Open command palette | `command_palette` | - -> TIP: Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file. +| Key | Description | Command | +| ----- | ----------- | ------- | +| `f` | Open file picker | `file_picker` | +| `F` | Open file picker at current working directory | `file_picker_in_current_directory` | +| `b` | Open buffer picker | `buffer_picker` | +| `j` | Open jumplist picker | `jumplist_picker` | +| `g` | Debug (experimental) | N/A | +| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | +| `s` | Open document symbol picker (**LSP**) | `symbol_picker` | +| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | +| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` | +| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` | +| `r` | Rename symbol (**LSP**) | `rename_symbol` | +| `a` | Apply code action (**LSP**) | `code_action` | +| `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | +| `'` | Open last fuzzy picker | `last_picker` | +| `w` | Enter [window mode](#window-mode) | N/A | +| `p` | Paste system clipboard after selections | `paste_clipboard_after` | +| `P` | Paste system clipboard before selections | `paste_clipboard_before` | +| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` | +| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | +| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | +| `/` | Global search in workspace folder | `global_search` | +| `?` | Open command palette | `command_palette` | + +> 💡 Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file. ##### Popup @@ -304,7 +305,7 @@ Displays documentation for item under cursor. #### Unimpaired -Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). +These mappings are in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). | Key | Description | Command | | ----- | ----------- | ------- | @@ -333,12 +334,13 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire ## Insert mode -Insert mode bindings are somewhat minimal by default. Helix is designed to +Insert mode bindings are minimal by default. Helix is designed to be a modal editor, and this is reflected in the user experience and internal -mechanics. For example, changes to the text are only saved for undos when -escaping from insert mode to normal mode. For this reason, new users are -strongly encouraged to learn the modal editing paradigm to get the smoothest -experience. +mechanics. Changes to the text are only saved for undos when +escaping from insert mode to normal mode. + +> 💡 New users are strongly encouraged to learn the modal editing paradigm +> to get the smoothest experience. | Key | Description | Command | | ----- | ----------- | ------- | @@ -350,7 +352,7 @@ experience. | `Alt-d`, `Alt-Delete` | Delete next word | `delete_word_forward` | | `Ctrl-u` | Delete to start of line | `kill_to_line_start` | | `Ctrl-k` | Delete to end of line | `kill_to_line_end` | -| `Ctrl-h`, `Backspace` | Delete previous char | `delete_char_backward` | +| `Ctrl-h`, `Backspace`, `Shift-Backspace` | Delete previous char | `delete_char_backward` | | `Ctrl-d`, `Delete` | Delete next char | `delete_char_forward` | | `Ctrl-j`, `Enter` | Insert new line | `insert_newline` | @@ -368,8 +370,8 @@ with modal editors. | `Home` | Move to line start | `goto_line_start` | | `End` | Move to line end | `goto_line_end_newline` | -If you want to disable them in insert mode as you become more comfortable with modal editing, you can use -the following in your `config.toml`: +As you become more comfortable with modal editing, you may want to disable some +insert mode bindings. You can do this by editing your `config.toml` file. ```toml [keys.insert] @@ -385,7 +387,7 @@ end = "no_op" ## Select / extend mode -This mode echoes Normal mode, but changes any movements to extend +Select mode echoes Normal mode, but changes any movements to extend selections rather than replace them. Goto motions are also changed to extend, so that `vgl` for example extends the selection to the end of the line. @@ -431,7 +433,7 @@ Keys to use within prompt, Remapping currently not supported. | `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word | | `Ctrl-u` | Delete to start of line | | `Ctrl-k` | Delete to end of line | -| `Backspace`, `Ctrl-h` | Delete previous char | +| `Backspace`, `Ctrl-h`, `Shift-Backspace` | Delete previous char | | `Delete`, `Ctrl-d` | Delete next char | | `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later | | `Ctrl-p`, `Up` | Select previous history | diff --git a/book/src/lang-support.md b/book/src/lang-support.md index 6a08cd699813..3f96673bc953 100644 --- a/book/src/lang-support.md +++ b/book/src/lang-support.md @@ -1,10 +1,10 @@ # Language Support -The following languages and Language Servers are supported. In order to use +The following languages and Language Servers are supported. To use Language Server features, you must first [install][lsp-install-wiki] the appropriate Language Server. -Check the language support in your installed helix version with `hx --health`. +You can check the language support in your installed helix version with `hx --health`. Also see the [Language Configuration][lang-config] docs and the [Adding Languages][adding-languages] guide for more language configuration information. diff --git a/book/src/languages.md b/book/src/languages.md index 0646b9af9580..5ed69505d130 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -5,13 +5,15 @@ in `languages.toml` files. ## `languages.toml` files -There are three possible `languages.toml` files. The first is compiled into -Helix and lives in the [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml). -This provides the default configurations for languages and language servers. +There are three possible locations for a `languages.toml` file: -You may define a `languages.toml` in your [configuration directory](./configuration.md) -which overrides values from the built-in language configuration. For example -to disable auto-LSP-formatting in Rust: +1. In the Helix source code, this lives in the + [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml). + It provides the default configurations for languages and language servers. + +2. In your [configuration directory](./configuration.md). This overrides values + from the built-in language configuration. For example to disable + auto-LSP-formatting in Rust: ```toml # in /helix/languages.toml @@ -21,10 +23,10 @@ name = "rust" auto-format = false ``` -Language configuration may also be overridden local to a project by creating -a `languages.toml` file under a `.helix` directory. Its settings will be merged -with the language configuration in the configuration directory and the built-in -configuration. +3. In a `.helix` folder in your project. Language configuration may also be + overridden local to a project by creating a `languages.toml` file in a + `.helix` folder. Its settings will be merged with the language configuration + in the configuration directory and the built-in configuration. ## Language configuration @@ -56,16 +58,16 @@ These configuration keys are available: | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | | `comment-token` | The token to use as a comment-token | -| `indent` | The indent to use. Has sub keys `tab-width` and `unit` | +| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-server` | The Language Server to run. See the Language Server configuration section below. | | `config` | Language Server configuration | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | | `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout | -| `max-line-length` | Maximum line length. Used for the `:reflow` command and soft-wrapping | +| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` | ### File-type detection and the `file-types` key -Helix determines which language configuration to use with the `file-types` key +Helix determines which language configuration to use based on the `file-types` key from the above section. `file-types` is a list of strings or tables, for example: diff --git a/book/src/remapping.md b/book/src/remapping.md index 8339e05fce8c..d762c6add4fc 100644 --- a/book/src/remapping.md +++ b/book/src/remapping.md @@ -1,18 +1,18 @@ -# Key Remapping +# Key remapping -One-way key remapping is temporarily supported via a simple TOML configuration +Helix currently supports one-way key remapping through a simple TOML configuration file. (More powerful solutions such as rebinding via commands will be available in the future). -To remap keys, write a `config.toml` file in your `helix` configuration -directory (default `~/.config/helix` in Linux systems) with a structure like +To remap keys, create a `config.toml` file in your `helix` configuration +directory (default `~/.config/helix` on Linux systems) with a structure like this: ```toml # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select' [keys.normal] -C-s = ":w" # Maps the Ctrl-s to the typable command :w which is an alias for :write (save file) -C-o = ":open ~/.config/helix/config.toml" # Maps the Ctrl-o to opening of the helix config file +C-s = ":w" # Maps Ctrl-s to the typable command :w which is an alias for :write (save file) +C-o = ":open ~/.config/helix/config.toml" # Maps Ctrl-o to opening of the helix config file a = "move_char_left" # Maps the 'a' key to the move_char_left command w = "move_line_up" # Maps the 'w' key move_line_up "C-S-esc" = "extend_line" # Maps Ctrl-Shift-Escape to extend_line @@ -20,10 +20,9 @@ g = { a = "code_action" } # Maps `ga` to show possible code actions "ret" = ["open_below", "normal_mode"] # Maps the enter key to open_below then re-enter normal mode [keys.insert] -"A-x" = "normal_mode" # Maps Alt-X to enter normal mode +"A-x" = "normal_mode" # Maps Alt-X to enter normal mode j = { k = "normal_mode" } # Maps `jk` to exit insert mode ``` -> NOTE: Typable commands can also be remapped, remember to keep the `:` prefix to indicate it's a typable command. ## Minor modes @@ -76,5 +75,5 @@ Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes Keys can be disabled by binding them to the `no_op` command. -Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands. -> Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`. +A list of commands is available in the [Keymap](https://docs.helix-editor.com/keymap.html) documentation + and in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`. diff --git a/book/src/themes.md b/book/src/themes.md index 0d0827fd18e3..929f821e64cf 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -1,14 +1,15 @@ # Themes -To use a theme add `theme = ""` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme `. +To use a theme add `theme = ""` to the top of your [`config.toml`](./configuration.md) file, or select it during runtime using `:theme `. ## Creating a theme -Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand. +Create a file with the name of your theme as the file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes` or `%AppData%\helix\themes` on Windows). The directory might have to be created beforehand. -The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes. +> 💡 The names "default" and "base16_default" are reserved for built-in themes +> and cannot be overridden by user-defined themes. -The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes). +### Overview Each line in the theme file is specified as below: @@ -16,7 +17,7 @@ Each line in the theme file is specified as below: key = { fg = "#ffffff", bg = "#000000", underline = { color = "#ff0000", style = "curl"}, modifiers = ["bold", "italic"] } ``` -where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults. +Where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults. To specify only the foreground color: @@ -24,15 +25,30 @@ To specify only the foreground color: key = "#ffffff" ``` -if the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys). +If the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys). ```toml "key.key" = "#ffffff" ``` +For inspiration, you can find the default `theme.toml` +[here](https://github.com/helix-editor/helix/blob/master/theme.toml) and +user-submitted themes +[here](https://github.com/helix-editor/helix/blob/master/runtime/themes). + +### Using the linter + +Use the supplied linting tool to check for errors and missing scopes: + +```sh +cargo xtask themelint onedark # replace onedark with +``` + +## The details of theme creation + ### Color palettes -It's recommended define a palette of named colors, and refer to them from the +It's recommended to define a palette of named colors, and refer to them in the configuration values in your theme. To do this, add a table called `palette` to your theme file: @@ -45,8 +61,8 @@ white = "#ffffff" black = "#000000" ``` -Remember that the `[palette]` table includes all keys after its header, -so you should define the palette after normal theme options. +Keep in mind that the `[palette]` table includes all keys after its header, +so it should be defined after the normal theme options. The default palette uses the terminal's default 16 colors, and the colors names are listed below. The `[palette]` section in the config file takes precedence @@ -73,9 +89,8 @@ over it and is merged into the default palette. ### Modifiers -The following values may be used as modifiers. - -Less common modifiers might not be supported by your terminal emulator. +The following values may be used as modifier, provided they are supported by +your terminal emulator. | Modifier | | --- | @@ -89,14 +104,13 @@ Less common modifiers might not be supported by your terminal emulator. | `hidden` | | `crossed_out` | -> Note: The `underlined` modifier is deprecated and only available for backwards compatibility. +> 💡 The `underlined` modifier is deprecated and only available for backwards compatibility. > Its behavior is equivalent to setting `underline.style="line"`. -### Underline Style - -One of the following values may be used as a value for `underline.style`. +### Underline style -Some styles might not be supported by your terminal emulator. +One of the following values may be used as a value for `underline.style`, providing it is +supported by your terminal emulator. | Modifier | | --- | @@ -109,7 +123,7 @@ Some styles might not be supported by your terminal emulator. ### Inheritance -Extend upon other themes by setting the `inherits` property to an existing theme. +Extend other themes by setting the `inherits` property to an existing theme. ```toml inherits = "boo_berry" @@ -124,19 +138,19 @@ berry = "#2A2A4D" ### Scopes -The following is a list of scopes available to use for styling. +The following is a list of scopes available to use for styling: #### Syntax highlighting These keys match [tree-sitter scopes](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#theme). -For a given highlight produced, styling will be determined based on the longest matching theme key. For example, the highlight `function.builtin.static` would match the key `function.builtin` rather than `function`. +When determining styling for a highlight, the longest matching theme key will be used. For example, if the highlight is `function.builtin.static`, the key `function.builtin` will be used instead of `function`. We use a similar set of scopes as -[SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also +[Sublime Text](https://www.sublimetext.com/docs/scope_naming.html). See also [TextMate](https://macromates.com/manual/en/language_grammars) scopes. -- `attribute` - Class attributes, html tag attributes +- `attribute` - Class attributes, HTML tag attributes - `type` - Types - `builtin` - Primitive types provided by the language (`int`, `usize`) @@ -144,7 +158,7 @@ We use a similar set of scopes as - `variant` - `constructor` -- `constant` (TODO: constant.other.placeholder for %v) +- `constant` (TODO: constant.other.placeholder for `%v`) - `builtin` Special constants provided by the language (`true`, `false`, `nil` etc) - `boolean` - `character` @@ -162,11 +176,11 @@ We use a similar set of scopes as - `comment` - Code comments - `line` - Single line comments (`//`) - - `block` - Block comments (e.g. (`/* */`) + - `block` - Block comments (e.g. (`/* */`) - `documentation` - Documentation comments (e.g. `///` in Rust) - `variable` - Variables - - `builtin` - Reserved language variables (`self`, `this`, `super`, etc) + - `builtin` - Reserved language variables (`self`, `this`, `super`, etc.) - `parameter` - Function parameters - `other` - `member` - Fields of composite data types (e.g. structs, unions) @@ -186,10 +200,10 @@ We use a similar set of scopes as - `return` - `exception` - `operator` - `or`, `in` - - `directive` - Preprocessor directives (`#if` in C) + - `directive` - Preprocessor directives (`#if` in C) - `function` - `fn`, `func` - `storage` - Keywords describing how things are stored - - `type` - The type of something, `class`, `function`, `var`, `let`, etc. + - `type` - The type of something, `class`, `function`, `var`, `let`, etc. - `modifier` - Storage modifiers like `static`, `mut`, `const`, `ref`, etc. - `operator` - `||`, `+=`, `>` @@ -201,6 +215,7 @@ We use a similar set of scopes as - `special` (preprocessor in C) - `tag` - Tags (e.g. `` in HTML) + - `builtin` - `namespace` @@ -216,9 +231,9 @@ We use a similar set of scopes as - `bold` - `italic` - `link` - - `url` - urls pointed to by links - - `label` - non-url link references - - `text` - url and image descriptions in links + - `url` - URLs pointed to by links + - `label` - non-URL link references + - `text` - URL and image descriptions in links - `quote` - `raw` - `inline` @@ -232,78 +247,76 @@ We use a similar set of scopes as #### Interface -These scopes are used for theming the editor interface. +These scopes are used for theming the editor interface: - `markup` - `normal` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI - `heading` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI - `raw` - `inline` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui - - -| Key | Notes | -| --- | --- | -| `ui.background` | | -| `ui.background.separator` | Picker separator below input line | -| `ui.cursor` | | -| `ui.cursor.normal` | | -| `ui.cursor.insert` | | -| `ui.cursor.select` | | -| `ui.cursor.match` | Matching bracket etc. | -| `ui.cursor.primary` | Cursor with primary selection | -| `ui.cursor.primary.normal` | | -| `ui.cursor.primary.insert` | | -| `ui.cursor.primary.select` | | -| `ui.gutter` | Gutter | -| `ui.gutter.selected` | Gutter for the line the cursor is on | -| `ui.linenr` | Line numbers | -| `ui.linenr.selected` | Line number for the line the cursor is on | -| `ui.statusline` | Statusline | -| `ui.statusline.inactive` | Statusline (unfocused document) | -| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.separator` | Separator character in statusline | -| `ui.popup` | Documentation popups (e.g Space + k) | -| `ui.popup.info` | Prompt for multiple key options | -| `ui.window` | Border lines separating splits | -| `ui.help` | Description box for commands | -| `ui.text` | Command prompts, popup text, etc. | -| `ui.text.focus` | | -| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) | -| `ui.text.info` | The key: command text in `ui.popup.info` boxes | -| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) | -| `ui.virtual.whitespace` | Visible whitespace characters | -| `ui.virtual.indent-guide` | Vertical indent width guides | -| `ui.menu` | Code and command completion menus | -| `ui.menu.selected` | Selected autocomplete item | -| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | -| `ui.selection` | For selections in the editing area | -| `ui.selection.primary` | | -| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | -| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | -| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | -| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) | -| `warning` | Diagnostics warning (gutter) | -| `error` | Diagnostics error (gutter) | -| `info` | Diagnostics info (gutter) | -| `hint` | Diagnostics hint (gutter) | -| `diagnostic` | Diagnostics fallback style (editing area) | -| `diagnostic.hint` | Diagnostics hint (editing area) | -| `diagnostic.info` | Diagnostics info (editing area) | -| `diagnostic.warning` | Diagnostics warning (editing area) | -| `diagnostic.error` | Diagnostics error (editing area) | - -You can check compliance to spec with - -```shell -cargo xtask themelint onedark # replace onedark with -``` + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI + + +| Key | Notes | +| --- | --- | +| `ui.background` | | +| `ui.background.separator` | Picker separator below input line | +| `ui.cursor` | | +| `ui.cursor.normal` | | +| `ui.cursor.insert` | | +| `ui.cursor.select` | | +| `ui.cursor.match` | Matching bracket etc. | +| `ui.cursor.primary` | Cursor with primary selection | +| `ui.cursor.primary.normal` | | +| `ui.cursor.primary.insert` | | +| `ui.cursor.primary.select` | | +| `ui.gutter` | Gutter | +| `ui.gutter.selected` | Gutter for the line the cursor is on | +| `ui.linenr` | Line numbers | +| `ui.linenr.selected` | Line number for the line the cursor is on | +| `ui.statusline` | Statusline | +| `ui.statusline.inactive` | Statusline (unfocused document) | +| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.separator` | Separator character in statusline | +| `ui.popup` | Documentation popups (e.g. Space + k) | +| `ui.popup.info` | Prompt for multiple key options | +| `ui.window` | Borderlines separating splits | +| `ui.help` | Description box for commands | +| `ui.text` | Command prompts, popup text, etc. | +| `ui.text.focus` | | +| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) | +| `ui.text.info` | The key: command text in `ui.popup.info` boxes | +| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) | +| `ui.virtual.whitespace` | Visible whitespace characters | +| `ui.virtual.indent-guide` | Vertical indent width guides | +| `ui.virtual.inlay-hint` | Default style for inlay hints of all kinds | +| `ui.virtual.inlay-hint.parameter` | Style for inlay hints of kind `parameter` (LSPs are not required to set a kind) | +| `ui.virtual.inlay-hint.type` | Style for inlay hints of kind `type` (LSPs are not required to set a kind) | +| `ui.virtual.wrap` | Soft-wrap indicator (see the [`editor.soft-wrap` config][editor-section]) | +| `ui.menu` | Code and command completion menus | +| `ui.menu.selected` | Selected autocomplete item | +| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | +| `ui.selection` | For selections in the editing area | +| `ui.selection.primary` | | +| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | +| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | +| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | +| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) | +| `warning` | Diagnostics warning (gutter) | +| `error` | Diagnostics error (gutter) | +| `info` | Diagnostics info (gutter) | +| `hint` | Diagnostics hint (gutter) | +| `diagnostic` | Diagnostics fallback style (editing area) | +| `diagnostic.hint` | Diagnostics hint (editing area) | +| `diagnostic.info` | Diagnostics info (editing area) | +| `diagnostic.warning` | Diagnostics warning (editing area) | +| `diagnostic.error` | Diagnostics error (editing area) | [editor-section]: ./configuration.md#editor-section diff --git a/book/src/usage.md b/book/src/usage.md index a6eb9ec1d4f1..81cf83725609 100644 --- a/book/src/usage.md +++ b/book/src/usage.md @@ -1,22 +1,43 @@ -# Usage +# Using Helix -(Currently not fully documented, see the [keymappings](./keymap.md) list for more.) + +- [Registers](#registers) + - [User-defined registers](#user-defined-registers) + - [Special registers](#special-registers) +- [Surround](#surround) +- [Selecting and manipulating text with textobjects](#selecting-and-manipulating-text-with-textobjects) +- [Navigating using tree-sitter textobjects](#navigating-using-tree-sitter-textobjects) +- [Moving the selection with syntax-aware motions](#moving-the-selection-with-syntax-aware-motions) + -See [tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) (accessible via `hx --tutor` or `:tutor`) for a vimtutor-like introduction. +For a full interactive introduction to Helix, refer to the +[tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) which +can be accessed via the command `hx --tutor` or `:tutor`. + +> 💡 Currently, not all functionality is fully documented, please refer to the +> [key mappings](./keymap.md) list. ## Registers -Vim-like registers can be used to yank and store text to be pasted later. Usage is similar, with `"` being used to select a register: +In Helix, registers are storage locations for text and other data, such as the +result of a search. Registers can be used to cut, copy, and paste text, similar +to the clipboard in other text editors. Usage is similar to Vim, with `"` being +used to select a register. + +### User-defined registers + +Helix allows you to create your own named registers for storing text, for +example: - `"ay` - Yank the current selection to register `a`. - `"op` - Paste the text in register `o` after the selection. -If there is a selected register before invoking a change or delete command, the selection will be stored in the register and the action will be carried out: +If a register is selected before invoking a change or delete command, the selection will be stored in the register and the action will be carried out: - `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode). - `"md` - Store the selection in register `m` and delete it. -### Special Registers +### Special registers | Register character | Contains | | --- | --- | @@ -25,41 +46,90 @@ If there is a selected register before invoking a change or delete command, the | `"` | Last yanked text | | `_` | Black hole | -> There is no special register for copying to system clipboard, instead special commands and keybindings are provided. See the [keymap](keymap.md#space-mode) for the specifics. -> The black hole register works as a no-op register, meaning no data will be written to / read from it. +The system clipboard is not directly supported by a special register. Instead, special commands and keybindings are provided. Refer to the +[key map](keymap.md#space-mode) for more details. + +The black hole register is a no-op register, meaning that no data will be read or written to it. ## Surround -Functionality similar to [vim-surround](https://github.com/tpope/vim-surround) is built into -helix. The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich): +Helix includes built-in functionality similar to [vim-surround](https://github.com/tpope/vim-surround). +The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich): -![surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif) +![Surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif) -- `ms` - Add surround characters -- `mr` - Replace surround characters -- `md` - Delete surround characters +| Key Sequence | Action | +| --------------------------------- | --------------------------------------- | +| `ms` (after selecting text) | Add surround characters to selection | +| `mr` | Replace the closest surround characters | +| `md` | Delete the closest surround characters | -`ms` acts on a selection, so select the text first and use `ms`. `mr` and `md` work -on the closest pairs found and selections are not required; use counts to act in outer pairs. +You can use counts to act on outer pairs. -It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`: +Surround can also act on multiple selections. For example, to change every occurrence of `(use)` to `[use]`: -- `%` to select the whole file -- `s` to split the selections on a search term -- Input `use` and hit Enter -- `mr([` to replace the parens with square brackets +1. `%` to select the whole file +2. `s` to split the selections on a search term +3. Input `use` and hit Enter +4. `mr([` to replace the parentheses with square brackets -Multiple characters are currently not supported, but planned. +Multiple characters are currently not supported, but planned for future release. -## Syntax-tree Motions +## Selecting and manipulating text with textobjects -`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) move the primary -selection according to the selection's place in the syntax tree. Let's walk -through an example to get familiar with them. Many languages have a syntax like -so for function calls: +In Helix, textobjects are a way to select, manipulate and operate on a piece of +text in a structured way. They allow you to refer to blocks of text based on +their structure or purpose, such as a word, sentence, paragraph, or even a +function or block of code. -``` -func(arg1, arg2, arg3) +![Textobject demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) +![Textobject tree-sitter demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) + +- `ma` - Select around the object (`va` in Vim, `` in Kakoune) +- `mi` - Select inside the object (`vi` in Vim, `` in Kakoune) + +| Key after `mi` or `ma` | Textobject selected | +| --- | --- | +| `w` | Word | +| `W` | WORD | +| `p` | Paragraph | +| `(`, `[`, `'`, etc. | Specified surround pairs | +| `m` | The closest surround pair | +| `f` | Function | +| `c` | Class | +| `a` | Argument/parameter | +| `o` | Comment | +| `t` | Test | +| `g` | Change | + +> 💡 `f`, `c`, etc. need a tree-sitter grammar active for the current +document and a special tree-sitter query file to work properly. [Only +some grammars][lang-support] currently have the query file implemented. +Contributions are welcome! + +## Navigating using tree-sitter textobjects + +Navigating between functions, classes, parameters, and other elements is +possible using tree-sitter and textobject queries. For +example to move to the next function use `]f`, to move to previous +class use `[c`, and so on. + +![Tree-sitter-nav-demo][tree-sitter-nav-demo] + +For the full reference see the [unimpaired][unimpaired-keybinds] section of the key bind +documentation. + +> 💡 This feature relies on tree-sitter textobjects +> and requires the corresponding query file to work properly. + +## Moving the selection with syntax-aware motions + +`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) allow you to move the +selection according to its location in the syntax tree. For example, many languages have the +following syntax for function calls: + +```js +func(arg1, arg2, arg3); ``` A function call might be parsed by tree-sitter into a tree like the following. @@ -93,77 +163,29 @@ a more intuitive tree format: └──────────┘ └──────────┘ └──────────┘ ``` -Say we have a selection that wraps `arg1`. The selection is on the `arg1` leaf -in the tree above. +If you have a selection that wraps `arg1` (see the tree above), and you use +`Alt-n`, it will select the next sibling in the syntax tree: `arg2`. -``` +```js +// before func([arg1], arg2, arg3) +// after +func(arg1, [arg2], arg3); ``` -Using `Alt-n` would select the next sibling in the syntax tree: `arg2`. +Similarly, `Alt-o` will expand the selection to the parent node, in this case, the +arguments node. -``` -func(arg1, [arg2], arg3) -``` - -While `Alt-o` would expand the selection to the parent node. In the tree above we -can see that we would select the `arguments` node. - -``` -func[(arg1, arg2, arg3)] +```js +func[(arg1, arg2, arg3)]; ``` There is also some nuanced behavior that prevents you from getting stuck on a -node with no sibling. If we have a selection on `arg1`, `Alt-p` would bring us -to the previous child node. Since `arg1` doesn't have a sibling to its left, -though, we climb the syntax tree and then take the previous selection. So -`Alt-p` will move the selection over to the "func" `identifier`. - -``` -[func](arg1, arg2, arg3) -``` - -## Textobjects - -![textobject-demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) -![textobject-treesitter-demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) - -- `ma` - Select around the object (`va` in Vim, `` in Kakoune) -- `mi` - Select inside the object (`vi` in Vim, `` in Kakoune) - -| Key after `mi` or `ma` | Textobject selected | -| --- | --- | -| `w` | Word | -| `W` | WORD | -| `p` | Paragraph | -| `(`, `[`, `'`, etc | Specified surround pairs | -| `m` | Closest surround pair | -| `f` | Function | -| `c` | Class | -| `a` | Argument/parameter | -| `o` | Comment | -| `t` | Test | -| `g` | Change | - -> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current -document and a special tree-sitter query file to work properly. [Only -some grammars][lang-support] currently have the query file implemented. -Contributions are welcome! - -## Tree-sitter Textobject Based Navigation - -Navigating between functions, classes, parameters, etc is made -possible by leveraging tree-sitter and textobjects queries. For -example to move to the next function use `]f`, to move to previous -class use `[c`, and so on. - -![tree-sitter-nav-demo][tree-sitter-nav-demo] - -See the [unimpaired][unimpaired-keybinds] section of the keybind -documentation for the full reference. - -> NOTE: This feature is dependent on tree-sitter based textobjects -and therefore requires the corresponding query file to work properly. +node with no sibling. When using `Alt-p` with a selection on `arg1`, the previous +child node will be selected. In the event that `arg1` does not have a previous +sibling, the selection will move up the syntax tree and select the previous +element. As a result, using `Alt-p` with a selection on `arg1` will move the +selection to the "func" `identifier`. [lang-support]: ./lang-support.md [unimpaired-keybinds]: ./keymap.md#unimpaired diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 491cd4249b14..982b2237e960 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -8,7 +8,10 @@ Some suggestions to get started: - Help with packaging on various distributions needed! - To use print debugging to the [Helix log file][log-file], you must: * Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`) - * Pass the appropriate verbosity level option for the desired log level. (`hx -v ` for info, more `v`s for higher severity inclusive) + * Pass the appropriate verbosity level option for the desired log level. (`hx -v ` for info, more `v`s for higher verbosity) + * Want to display the logs in a separate file instead of using the `:log-open` command in your compiled Helix editor? Start your debug version with `cargo run -- --log foo.log` and in a new terminal use `tail -f foo.log` +- Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile +- Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold) - If your preferred language is missing, integrating a tree-sitter grammar for it and defining syntax highlight queries for it is straight forward and doesn't require much knowledge of the internals. @@ -30,7 +33,13 @@ inside the project. We use [xtask][xtask] as an ad-hoc task runner and thus do not require any dependencies other than `cargo` (You don't have to `cargo install` anything either). -# Integration tests +# Testing + +## Unit tests/Documentation tests + +Run `cargo test --workspace` to run unit tests and documentation tests in all packages. + +## Integration tests Integration tests for helix-term can be run with `cargo integration-test`. Code contributors are strongly encouraged to write integration tests for their code. diff --git a/flake.lock b/flake.lock index 4cf1018c4999..fa292273af6e 100644 --- a/flake.lock +++ b/flake.lock @@ -16,22 +16,6 @@ "type": "github" } }, - "devshell": { - "flake": false, - "locked": { - "lastModified": 1667210711, - "narHash": "sha256-IoErjXZAkzYWHEpQqwu/DeRNJGFdR7X2OGbkhMqMrpw=", - "owner": "numtide", - "repo": "devshell", - "rev": "96a9dd12b8a447840cc246e17a47b81a4268bba7", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "devshell", - "type": "github" - } - }, "dream2nix": { "inputs": { "alejandra": [ @@ -42,10 +26,12 @@ ], "crane": "crane", "devshell": [ + "nci" + ], + "flake-parts": [ "nci", - "devshell" + "parts" ], - "flake-parts": "flake-parts", "flake-utils-pre-commit": [ "nci" ], @@ -70,14 +56,17 @@ ], "pre-commit-hooks": [ "nci" + ], + "pruned-racket-catalog": [ + "nci" ] }, "locked": { - "lastModified": 1671323629, - "narHash": "sha256-9KHTPjIDjfnzZ4NjpE3gGIVHVHopy6weRDYO/7Y3hF8=", + "lastModified": 1677289985, + "narHash": "sha256-lUp06cTTlWubeBGMZqPl9jODM99LpWMcwxRiscFAUJg=", "owner": "nix-community", "repo": "dream2nix", - "rev": "2d7d68505c8619410df2c6b6463985f97cbcba6e", + "rev": "28b973a8d4c30cc1cbb3377ea2023a76bc3fb889", "type": "github" }, "original": { @@ -86,24 +75,6 @@ "type": "github" } }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1668450977, - "narHash": "sha256-cfLhMhnvXn6x1vPm+Jow3RiFAUSCw/l1utktCw5rVA4=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "d591857e9d7dd9ddbfba0ea02b43b927c3c0f1fa", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, "flake-utils": { "locked": { "lastModified": 1659877975, @@ -119,23 +90,40 @@ "type": "github" } }, + "mk-naked-shell": { + "flake": false, + "locked": { + "lastModified": 1676572903, + "narHash": "sha256-oQoDHHUTxNVSURfkFcYLuAK+btjs30T4rbEUtCUyKy8=", + "owner": "yusdacra", + "repo": "mk-naked-shell", + "rev": "aeca9f8aa592f5e8f71f407d081cb26fd30c5a57", + "type": "github" + }, + "original": { + "owner": "yusdacra", + "repo": "mk-naked-shell", + "type": "github" + } + }, "nci": { "inputs": { - "devshell": "devshell", "dream2nix": "dream2nix", + "mk-naked-shell": "mk-naked-shell", "nixpkgs": [ "nixpkgs" ], + "parts": "parts", "rust-overlay": [ "rust-overlay" ] }, "locked": { - "lastModified": 1671430291, - "narHash": "sha256-UIc7H8F3N8rK72J/Vj5YJdV72tvDvYjH+UPsOFvlcsE=", + "lastModified": 1677297103, + "narHash": "sha256-ArlJIbp9NGV9yvhZdV0SOUFfRlI/kHeKoCk30NbSiLc=", "owner": "yusdacra", "repo": "nix-cargo-integration", - "rev": "b1b0d38b8c3b0d0e6a38638d5bbe10b0bc67522c", + "rev": "a79272a2cb0942392bb3a5bf9a3ec6bc568795b2", "type": "github" }, "original": { @@ -146,11 +134,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1671359686, - "narHash": "sha256-3MpC6yZo+Xn9cPordGz2/ii6IJpP2n8LE8e/ebUXLrs=", + "lastModified": 1677063315, + "narHash": "sha256-qiB4ajTeAOVnVSAwCNEEkoybrAlA+cpeiBxLobHndE8=", "owner": "nixos", "repo": "nixpkgs", - "rev": "04f574a1c0fde90b51bf68198e2297ca4e7cccf4", + "rev": "988cc958c57ce4350ec248d2d53087777f9e1949", "type": "github" }, "original": { @@ -163,11 +151,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1665349835, - "narHash": "sha256-UK4urM3iN80UXQ7EaOappDzcisYIuEURFRoGQ/yPkug=", + "lastModified": 1675183161, + "narHash": "sha256-Zq8sNgAxDckpn7tJo7V1afRSk2eoVbu3OjI1QklGLNg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "34c5293a71ffdb2fe054eb5288adc1882c1eb0b1", + "rev": "e1e1b192c1a5aab2960bf0a0bd53a2e8124fa18e", "type": "github" }, "original": { @@ -178,10 +166,50 @@ "type": "github" } }, + "parts": { + "inputs": { + "nixpkgs-lib": [ + "nci", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1675933616, + "narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "47478a4a003e745402acf63be7f9a092d51b83d7", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "parts_2": { + "inputs": { + "nixpkgs-lib": "nixpkgs-lib" + }, + "locked": { + "lastModified": 1675933616, + "narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "47478a4a003e745402acf63be7f9a092d51b83d7", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, "root": { "inputs": { "nci": "nci", "nixpkgs": "nixpkgs", + "parts": "parts_2", "rust-overlay": "rust-overlay" } }, @@ -193,11 +221,11 @@ ] }, "locked": { - "lastModified": 1671416426, - "narHash": "sha256-kpSH1Jrxfk2qd0pRPJn1eQdIOseGv5JuE+YaOrqU9s4=", + "lastModified": 1677292251, + "narHash": "sha256-D+6q5Z2MQn3UFJtqsM5/AvVHi3NXKZTIMZt1JGq/spA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "fbaaff24f375ac25ec64268b0a0d63f91e474b7d", + "rev": "34cdbf6ad480ce13a6a526f57d8b9e609f3d65dc", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 673f3cf603d2..2ac764888daa 100644 --- a/flake.nix +++ b/flake.nix @@ -12,16 +12,10 @@ inputs.nixpkgs.follows = "nixpkgs"; inputs.rust-overlay.follows = "rust-overlay"; }; + parts.url = "github:hercules-ci/flake-parts"; }; - outputs = { - self, - nixpkgs, - nci, - ... - }: let - lib = nixpkgs.lib; - ncl = nci.lib.nci-lib; + outputs = inp: let mkRootPath = rel: builtins.path { path = "${toString ./.}/${rel}"; @@ -32,6 +26,12 @@ ".envrc" ".ignore" ".github" + ".gitignore" + "logo.svg" + "logo_dark.svg" + "logo_light.svg" + "rust-toolchain.toml" + "rustfmt.toml" "runtime" "screenshot.png" "book" @@ -46,6 +46,7 @@ "flake.lock" ]; ignorePaths = path: type: let + inherit (inp.nixpkgs) lib; # split the nix store path into its components components = lib.splitString "/" path; # drop off the `/nix/hash-source` section from the path @@ -61,118 +62,107 @@ # filter out unnecessary paths filter = ignorePaths; }; - outputs = nci.lib.makeOutputs { - root = ./.; - config = common: { - outputs = { - # rename helix-term to helix since it's our main package - rename = {"helix-term" = "helix";}; - # Set default app to hx (binary is from helix-term release build) - # Set default package to helix-term release build - defaults = { - app = "hx"; - package = "helix"; - }; - }; - cCompiler.package = with common.pkgs; - if stdenv.isLinux - then gcc - else clang; - shell = { - packages = with common.pkgs; - [lld_13 cargo-flamegraph rust-analyzer] - ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin) - ++ (lib.optional stdenv.isLinux lldb) - ++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation); - env = [ - { - name = "HELIX_RUNTIME"; - eval = "$PWD/runtime"; - } - { - name = "RUST_BACKTRACE"; - value = "1"; - } + in + inp.parts.lib.mkFlake {inputs = inp;} { + imports = [inp.nci.flakeModule]; + systems = [ + "x86_64-linux" + "x86_64-darwin" + "aarch64-linux" + "aarch64-darwin" + "i686-linux" + ]; + perSystem = { + config, + pkgs, + lib, + ... + }: let + makeOverridableHelix = old: config: let + grammars = pkgs.callPackage ./grammars.nix config; + runtimeDir = pkgs.runCommand "helix-runtime" {} '' + mkdir -p $out + ln -s ${mkRootPath "runtime"}/* $out + rm -r $out/grammars + ln -s ${grammars} $out/grammars + ''; + helix-wrapped = + pkgs.runCommand + old.name { - name = "RUSTFLAGS"; - eval = - if common.pkgs.stdenv.isLinux - then "$RUSTFLAGS\" -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment\"" - else "$RUSTFLAGS"; + inherit (old) pname version; + meta = old.meta or {}; + passthru = + (old.passthru or {}) + // { + unwrapped = old; + }; + nativeBuildInputs = [pkgs.makeWrapper]; + makeWrapperArgs = config.makeWrapperArgs or []; } - ]; - }; - }; - pkgConfig = common: { - helix-term = { - # Wrap helix with runtime - wrapper = _: old: let - inherit (common) pkgs; - makeOverridableHelix = old: config: let - grammars = pkgs.callPackage ./grammars.nix config; - runtimeDir = pkgs.runCommand "helix-runtime" {} '' - mkdir -p $out - ln -s ${mkRootPath "runtime"}/* $out - rm -r $out/grammars - ln -s ${grammars} $out/grammars - ''; - helix-wrapped = - common.internal.pkgsSet.utils.wrapDerivation old - { - nativeBuildInputs = [pkgs.makeWrapper]; - makeWrapperArgs = config.makeWrapperArgs or []; - } - '' - rm -rf $out/bin - mkdir -p $out/bin - ln -sf ${old}/bin/* $out/bin/ - wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}" - ''; - in - helix-wrapped - // {override = makeOverridableHelix old;}; - in - makeOverridableHelix old {}; - overrides.fix-build.overrideAttrs = prev: { - src = filteredSource; - - # disable fetching and building of tree-sitter grammars in the helix-term build.rs - HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; - - buildInputs = ncl.addBuildInputs prev [common.config.cCompiler.package.cc.lib]; - - # link languages and theme toml files since helix-term expects them (for tests) - preConfigure = '' - ${prev.preConfigure or ""} - ${ - lib.concatMapStringsSep - "\n" - (path: "ln -sf ${mkRootPath path} ..") - ["languages.toml" "theme.toml" "base16_theme.toml"] - } + '' + cp -rs --no-preserve=mode,ownership ${old} $out + wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}" ''; - checkPhase = ":"; - - meta.mainProgram = "hx"; + in + helix-wrapped + // { + override = makeOverridableHelix old; + passthru = + helix-wrapped.passthru + // { + wrapper = old: makeOverridableHelix old config; + }; + }; + stdenv = + if pkgs.stdenv.isLinux + then pkgs.stdenv + else pkgs.clangStdenv; + rustFlagsEnv = + if stdenv.isLinux + then ''$RUSTFLAGS -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment'' + else "$RUSTFLAGS"; + in { + # by default NCI adds rust-analyzer component, but helix toolchain doesn't have rust-analyzer + nci.toolchains.shell.components = ["rust-src" "rustfmt" "clippy"]; + nci.projects."helix-project".relPath = ""; + nci.crates."helix-term" = { + overrides = { + add-meta.override = _: {meta.mainProgram = "hx";}; + add-inputs.overrideAttrs = prev: { + buildInputs = (prev.buildInputs or []) ++ [stdenv.cc.cc.lib]; + }; + disable-grammar-builds = { + # disable fetching and building of tree-sitter grammars in the helix-term build.rs + HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; + }; + disable-tests = {checkPhase = ":";}; + set-stdenv.override = _: {inherit stdenv;}; + set-filtered-src.override = _: {src = filteredSource;}; }; }; + + packages.helix-unwrapped = config.nci.outputs."helix-term".packages.release; + packages.helix-unwrapped-dev = config.nci.outputs."helix-term".packages.dev; + packages.helix = makeOverridableHelix config.packages.helix-unwrapped {}; + packages.helix-dev = makeOverridableHelix config.packages.helix-unwrapped-dev {}; + packages.default = config.packages.helix; + + devShells.default = config.nci.outputs."helix-project".devShell.overrideAttrs (old: { + nativeBuildInputs = + (old.nativeBuildInputs or []) + ++ (with pkgs; [lld_13 cargo-flamegraph rust-analyzer]) + ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) pkgs.cargo-tarpaulin) + ++ (lib.optional stdenv.isLinux pkgs.lldb) + ++ (lib.optional stdenv.isDarwin pkgs.darwin.apple_sdk.frameworks.CoreFoundation); + shellHook = '' + export HELIX_RUNTIME="$PWD/runtime" + export RUST_BACKTRACE="1" + export RUSTFLAGS="${rustFlagsEnv}" + ''; + }); }; }; - in - outputs - // { - packages = - lib.mapAttrs - ( - system: packages: - packages - // { - helix-unwrapped = packages.helix.passthru.unwrapped; - helix-unwrapped-dev = packages.helix-dev.passthru.unwrapped; - } - ) - outputs.packages; - }; nixConfig = { extra-substituters = ["https://helix.cachix.org"]; diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 10de738f80b8..62ec87b485ca 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -17,7 +17,7 @@ integration = [] [dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } -ropey = { version = "1.5.1", default-features = false, features = ["simd"] } +ropey = { version = "1.6.0", default-features = false, features = ["simd"] } smallvec = "1.10" smartstring = "1.0.1" unicode-segmentation = "1.10" diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index ec5d7a45aba2..9c7e50f335b1 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -45,7 +45,7 @@ fn find_line_comment( // determine margin of 0 or 1 for uncommenting; if any comment token is not followed by a space, // a margin of 0 is used for all lines. - if matches!(line_slice.get_char(pos + token_len), Some(c) if c != ' ') { + if !matches!(line_slice.get_char(pos + token_len), Some(c) if c == ' ') { margin = 0; } @@ -68,7 +68,7 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st let mut min_next_line = 0; for selection in selection { let (start, end) = selection.line_range(text); - let start = start.max(min_next_line).min(text.len_lines()); + let start = start.clamp(min_next_line, text.len_lines()); let end = (end + 1).min(text.len_lines()); lines.extend(start..end); @@ -108,8 +108,8 @@ mod test { let text = doc.slice(..); let res = find_line_comment("//", text, 0..3); - // (commented = true, to_change = [line 0, line 2], min = col 2, margin = 1) - assert_eq!(res, (false, vec![0, 2], 2, 1)); + // (commented = true, to_change = [line 0, line 2], min = col 2, margin = 0) + assert_eq!(res, (false, vec![0, 2], 2, 0)); // comment let transaction = toggle_line_comments(&doc, &selection, None); @@ -136,6 +136,17 @@ mod test { assert_eq!(doc, " 1\n\n 2\n 3"); assert!(selection.len() == 1); // to ignore the selection unused warning + // 0 margin comments, with no space + doc = Rope::from("//"); + // reset the selection. + selection = Selection::single(0, doc.len_chars() - 1); + + let transaction = toggle_line_comments(&doc, &selection, None); + transaction.apply(&mut doc); + selection = selection.map(transaction.changes()); + assert_eq!(doc, ""); + assert!(selection.len() == 1); // to ignore the selection unused warning + // TODO: account for uncommenting with uneven comment indentation } } diff --git a/helix-core/src/diagnostic.rs b/helix-core/src/diagnostic.rs index 6b5da17ef56f..58ddb0383a0a 100644 --- a/helix-core/src/diagnostic.rs +++ b/helix-core/src/diagnostic.rs @@ -35,7 +35,7 @@ pub enum DiagnosticTag { Deprecated, } -/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html) +/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.94.0/lsp_types/struct.Diagnostic.html) #[derive(Debug, Clone)] pub struct Diagnostic { pub range: Range, diff --git a/helix-core/src/doc_formatter/test.rs b/helix-core/src/doc_formatter/test.rs index e68b31fd570f..ac8918bb71ea 100644 --- a/helix-core/src/doc_formatter/test.rs +++ b/helix-core/src/doc_formatter/test.rs @@ -119,16 +119,7 @@ fn overlay() { "foobar", 0, false, - &[ - Overlay { - char_idx: 0, - grapheme: "X".into(), - }, - Overlay { - char_idx: 2, - grapheme: "\t".into(), - }, - ] + &[Overlay::new(0, "X"), Overlay::new(2, "\t")], ), "Xo bar " ); @@ -138,18 +129,9 @@ fn overlay() { 0, true, &[ - Overlay { - char_idx: 2, - grapheme: "\t".into(), - }, - Overlay { - char_idx: 5, - grapheme: "\t".into(), - }, - Overlay { - char_idx: 16, - grapheme: "X".into(), - }, + Overlay::new(2, "\t"), + Overlay::new(5, "\t"), + Overlay::new(16, "X"), ] ), "fo f o foo \n.foo Xoo foo foo \n.foo foo foo " @@ -170,24 +152,14 @@ fn annotate_text(text: &str, softwrap: bool, annotations: &[InlineAnnotation]) - #[test] fn annotation() { assert_eq!( - annotate_text( - "bar", - false, - &[InlineAnnotation { - char_idx: 0, - text: "foo".into(), - }] - ), + annotate_text("bar", false, &[InlineAnnotation::new(0, "foo")]), "foobar " ); assert_eq!( annotate_text( &"foo ".repeat(10), true, - &[InlineAnnotation { - char_idx: 0, - text: "foo ".into(), - }] + &[InlineAnnotation::new(0, "foo ")] ), "foo foo foo foo \n.foo foo foo foo \n.foo foo foo " ); @@ -199,20 +171,8 @@ fn annotation_and_overlay() { "bbar".into(), &TextFormat::new_test(false), TextAnnotations::default() - .add_inline_annotations( - Rc::new([InlineAnnotation { - char_idx: 0, - text: "fooo".into(), - }]), - None - ) - .add_overlay( - Rc::new([Overlay { - char_idx: 0, - grapheme: "\t".into(), - }]), - None - ), + .add_inline_annotations(Rc::new([InlineAnnotation::new(0, "fooo")]), None) + .add_overlay(Rc::new([Overlay::new(0, "\t")]), None), 0, ) .0 diff --git a/helix-core/src/increment/integer.rs b/helix-core/src/increment/integer.rs index 30803e175b8c..0dfabc0d38af 100644 --- a/helix-core/src/increment/integer.rs +++ b/helix-core/src/increment/integer.rs @@ -69,8 +69,8 @@ pub fn increment(selected_text: &str, amount: i64) -> Option { let (lower_count, upper_count): (usize, usize) = number.chars().fold((0, 0), |(lower, upper), c| { ( - lower + c.is_ascii_lowercase().then(|| 1).unwrap_or(0), - upper + c.is_ascii_uppercase().then(|| 1).unwrap_or(0), + lower + c.is_ascii_lowercase() as usize, + upper + c.is_ascii_uppercase() as usize, ) }); if upper_count > lower_count { diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index d6aa5edb8b71..950a7f652c99 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -56,6 +56,14 @@ impl IndentStyle { } } } + + #[inline] + pub fn indent_width(&self, tab_width: usize) -> usize { + match *self { + IndentStyle::Tabs => tab_width, + IndentStyle::Spaces(width) => width as usize, + } + } } /// Attempts to detect the indentation style used in a document. @@ -177,7 +185,7 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option { /// To determine indentation of a newly inserted line, figure out the indentation at the last col /// of the previous line. -pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize { +pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: usize) -> usize { let mut len = 0; for ch in line.chars() { match ch { @@ -187,7 +195,7 @@ pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize { } } - len / tab_width + len / indent_width } /// Computes for node and all ancestors whether they are the first node on their line. @@ -466,6 +474,7 @@ fn extend_nodes<'a>( text: RopeSlice, line: usize, tab_width: usize, + indent_width: usize, ) { let mut stop_extend = false; @@ -490,10 +499,12 @@ fn extend_nodes<'a>( if deepest_preceding.end_position().row == line { extend_node = true; } else { - let cursor_indent = indent_level_for_line(text.line(line), tab_width); + let cursor_indent = + indent_level_for_line(text.line(line), tab_width, indent_width); let node_indent = indent_level_for_line( text.line(deepest_preceding.start_position().row), tab_width, + indent_width, ); if cursor_indent > node_indent { extend_node = true; @@ -562,6 +573,7 @@ pub fn treesitter_indent_for_pos( syntax: &Syntax, indent_style: &IndentStyle, tab_width: usize, + indent_width: usize, text: RopeSlice, line: usize, pos: usize, @@ -604,7 +616,7 @@ pub fn treesitter_indent_for_pos( &mut cursor, text, query_range, - new_line.then(|| (line, byte_pos)), + new_line.then_some((line, byte_pos)), ); ts_parser.cursors.push(cursor); (query_result, deepest_preceding) @@ -622,9 +634,10 @@ pub fn treesitter_indent_for_pos( text, line, tab_width, + indent_width, ); } - let mut first_in_line = get_first_in_line(node, new_line.then(|| byte_pos)); + let mut first_in_line = get_first_in_line(node, new_line.then_some(byte_pos)); let mut result = Indentation::default(); // We always keep track of all the indent changes on one line, in order to only indent once @@ -709,6 +722,7 @@ pub fn indent_for_newline( line_before_end_pos: usize, current_line: usize, ) -> String { + let indent_width = indent_style.indent_width(tab_width); if let (Some(query), Some(syntax)) = ( language_config.and_then(|config| config.indent_query()), syntax, @@ -718,6 +732,7 @@ pub fn indent_for_newline( syntax, indent_style, tab_width, + indent_width, text, line_before, line_before_end_pos, @@ -726,7 +741,7 @@ pub fn indent_for_newline( return indent; }; } - let indent_level = indent_level_for_line(text.line(current_line), tab_width); + let indent_level = indent_level_for_line(text.line(current_line), tab_width, indent_width); indent_style.as_str().repeat(indent_level) } @@ -763,12 +778,22 @@ mod test { #[test] fn test_indent_level() { let tab_width = 4; + let indent_width = 4; let line = Rope::from(" fn new"); // 8 spaces - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 2); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 2 + ); let line = Rope::from("\t\t\tfn new"); // 3 tabs - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 3 + ); // mixed indentation let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 3 + ); } } diff --git a/helix-core/src/line_ending.rs b/helix-core/src/line_ending.rs index 09e9252306e7..953d567d5f10 100644 --- a/helix-core/src/line_ending.rs +++ b/helix-core/src/line_ending.rs @@ -203,6 +203,13 @@ pub fn line_end_char_index(slice: &RopeSlice, line: usize) -> usize { .unwrap_or(0) } +pub fn line_end_byte_index(slice: &RopeSlice, line: usize) -> usize { + slice.line_to_byte(line + 1) + - get_line_ending(&slice.line(line)) + .map(|le| le.as_str().len()) + .unwrap_or(0) +} + /// Fetches line `line_idx` from the passed rope slice, sans any line ending. pub fn line_without_line_ending<'a>(slice: &'a RopeSlice, line_idx: usize) -> RopeSlice<'a> { let start = slice.line_to_char(line_idx); diff --git a/helix-core/src/movement.rs b/helix-core/src/movement.rs index 11c12a6f0b25..8e6b63066201 100644 --- a/helix-core/src/movement.rs +++ b/helix-core/src/movement.rs @@ -227,9 +227,15 @@ fn word_move(slice: RopeSlice, range: Range, count: usize, target: WordMotionTar }; // Do the main work. - (0..count).fold(start_range, |r, _| { - slice.chars_at(r.head).range_to_target(target, r) - }) + let mut range = start_range; + for _ in 0..count { + let next_range = slice.chars_at(range.head).range_to_target(target, range); + if range == next_range { + break; + } + range = next_range; + } + range } pub fn move_prev_paragraph( @@ -251,6 +257,7 @@ pub fn move_prev_paragraph( let mut lines = slice.lines_at(line); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); + let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| e).is_some() { line -= 1; @@ -258,6 +265,10 @@ pub fn move_prev_paragraph( while lines.next_if(|&e| !e).is_some() { line -= 1; } + if line == last_line { + break; + } + last_line = line; } let head = slice.line_to_char(line); @@ -293,6 +304,7 @@ pub fn move_next_paragraph( line += 1; } let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable(); + let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| !e).is_some() { line += 1; @@ -300,6 +312,10 @@ pub fn move_next_paragraph( while lines.next_if(|&e| e).is_some() { line += 1; } + if line == last_line { + break; + } + last_line = line; } let head = slice.line_to_char(line); let anchor = if behavior == Movement::Move { @@ -523,7 +539,14 @@ pub fn goto_treesitter_object( // head of range should be at beginning Some(Range::new(start_char, end_char)) }; - (0..count).fold(range, |range, _| get_range(range).unwrap_or(range)) + let mut last_range = range; + for _ in 0..count { + match get_range(last_range) { + Some(r) if r != last_range => last_range = r, + _ => break, + } + } + last_range } #[cfg(test)] diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index 7817618fb488..8e93c633e4ad 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -21,14 +21,14 @@ use std::borrow::Cow; /// can be in any order, or even share the same position. /// /// The anchor and head positions use gap indexing, meaning -/// that their indices represent the the gaps *between* `char`s +/// that their indices represent the gaps *between* `char`s /// rather than the `char`s themselves. For example, 1 /// represents the position between the first and second `char`. /// -/// Below are some example `Range` configurations to better -/// illustrate. The anchor and head indices are show as -/// "(anchor, head)", followed by example text with "[" and "]" -/// inserted to represent the anchor and head positions: +/// Below are some examples of `Range` configurations. +/// The anchor and head indices are shown as "(anchor, head)" +/// tuples, followed by example text with "[" and "]" symbols +/// representing the anchor and head positions: /// /// - (0, 3): `[Som]e text`. /// - (3, 0): `]Som[e text`. @@ -578,6 +578,16 @@ impl Selection { self.normalize() } + /// Takes a closure and maps each `Range` over the closure to multiple `Range`s. + pub fn transform_iter(mut self, f: F) -> Self + where + F: FnMut(Range) -> I, + I: Iterator, + { + self.ranges = self.ranges.into_iter().flat_map(f).collect(); + self.normalize() + } + // Ensures the selection adheres to the following invariants: // 1. All ranges are grapheme aligned. // 2. All ranges are at least 1 character wide, unless at the @@ -615,11 +625,6 @@ impl Selection { // returns true if self ⊇ other pub fn contains(&self, other: &Selection) -> bool { - // can't contain other if it is larger - if other.len() > self.len() { - return false; - } - let (mut iter_self, mut iter_other) = (self.iter(), other.iter()); let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next()); @@ -656,6 +661,15 @@ impl<'a> IntoIterator for &'a Selection { } } +impl IntoIterator for Selection { + type Item = Range; + type IntoIter = smallvec::IntoIter<[Range; 1]>; + + fn into_iter(self) -> smallvec::IntoIter<[Range; 1]> { + self.ranges.into_iter() + } +} + // TODO: checkSelection -> check if valid for doc length && sorted pub fn keep_or_remove_matches( @@ -1230,5 +1244,11 @@ mod test { vec!((3, 4), (7, 9)) )); assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6)))); + + // multiple ranges of other are all contained in some ranges of self, + assert!(contains( + vec!((1, 4), (7, 10)), + vec!((1, 2), (3, 4), (7, 9)) + )); } } diff --git a/helix-core/src/shellwords.rs b/helix-core/src/shellwords.rs index 9475f5e50cea..0883eb9172ae 100644 --- a/helix-core/src/shellwords.rs +++ b/helix-core/src/shellwords.rs @@ -129,8 +129,9 @@ impl<'a> From<&'a str> for Shellwords<'a> { DquoteEscaped => Dquoted, }; - if i >= input.len() - 1 && end == 0 { - end = i + 1; + let c_len = c.len_utf8(); + if i == input.len() - c_len && end == 0 { + end = i + c_len; } if end > 0 { @@ -333,4 +334,17 @@ mod test { assert_eq!(Shellwords::from(":o a").parts(), &[":o", "a"]); assert_eq!(Shellwords::from(":o a\\ ").parts(), &[":o", "a\\"]); } + + #[test] + fn test_multibyte_at_end() { + assert_eq!(Shellwords::from("𒀀").parts(), &["𒀀"]); + assert_eq!( + Shellwords::from(":sh echo 𒀀").parts(), + &[":sh", "echo", "𒀀"] + ); + assert_eq!( + Shellwords::from(":sh echo 𒀀 hello world𒀀").parts(), + &[":sh", "echo", "𒀀", "hello", "world𒀀"] + ); + } } diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index a3de3cd17509..f430aee8a152 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -1,6 +1,6 @@ use std::fmt::Display; -use crate::{search, Range, Selection}; +use crate::{movement::Direction, search, Range, Selection}; use ropey::RopeSlice; pub const PAIRS: &[(char, char)] = &[ @@ -55,15 +55,18 @@ pub fn get_pair(ch: char) -> (char, char) { pub fn find_nth_closest_pairs_pos( text: RopeSlice, range: Range, - n: usize, + mut skip: usize, ) -> Result<(usize, usize)> { let is_open_pair = |ch| PAIRS.iter().any(|(open, _)| *open == ch); let is_close_pair = |ch| PAIRS.iter().any(|(_, close)| *close == ch); let mut stack = Vec::with_capacity(2); - let pos = range.cursor(text); + let pos = range.from(); + let mut close_pos = pos.saturating_sub(1); for ch in text.chars_at(pos) { + close_pos += 1; + if is_open_pair(ch) { // Track open pairs encountered so that we can step over // the corresponding close pairs that will come up further @@ -71,20 +74,46 @@ pub fn find_nth_closest_pairs_pos( // open pair is before the cursor position. stack.push(ch); continue; - } else if is_close_pair(ch) { - let (open, _) = get_pair(ch); - if stack.last() == Some(&open) { - stack.pop(); - continue; - } else { - // In the ideal case the stack would be empty here and the - // current character would be the close pair that we are - // looking for. It could also be the case that the pairs - // are unbalanced and we encounter a close pair that doesn't - // close the last seen open pair. In either case use this - // char as the auto-detected closest pair. - return find_nth_pairs_pos(text, ch, range, n); + } + + if !is_close_pair(ch) { + // We don't care if this character isn't a brace pair item, + // so short circuit here. + continue; + } + + let (open, close) = get_pair(ch); + + if stack.last() == Some(&open) { + // If we are encountering the closing pair for an opener + // we just found while traversing, then its inside the + // selection and should be skipped over. + stack.pop(); + continue; + } + + match find_nth_open_pair(text, open, close, close_pos, 1) { + // Before we accept this pair, we want to ensure that the + // pair encloses the range rather than just the cursor. + Some(open_pos) + if open_pos <= pos.saturating_add(1) + && close_pos >= range.to().saturating_sub(1) => + { + // Since we have special conditions for when to + // accept, we can't just pass the skip parameter on + // through to the find_nth_*_pair methods, so we + // track skips manually here. + if skip > 1 { + skip -= 1; + continue; + } + + return match range.direction() { + Direction::Forward => Ok((open_pos, close_pos)), + Direction::Backward => Ok((close_pos, open_pos)), + }; } + _ => continue, } } @@ -244,141 +273,140 @@ mod test { use ropey::Rope; use smallvec::SmallVec; - #[allow(clippy::type_complexity)] - fn check_find_nth_pair_pos( - text: &str, - cases: Vec<(usize, char, usize, Result<(usize, usize)>)>, - ) { - let doc = Rope::from(text); - let slice = doc.slice(..); - - for (cursor_pos, ch, n, expected_range) in cases { - let range = find_nth_pairs_pos(slice, ch, (cursor_pos, cursor_pos + 1).into(), n); - assert_eq!( - range, expected_range, - "Expected {:?}, got {:?}", - expected_range, range + #[test] + fn test_get_surround_pos() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "(some) (chars)\n(newline)", + "_ ^ _ _ ^ _\n_ ^ _" ); - } + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1).unwrap(), + expectations + ); } #[test] - fn test_find_nth_pairs_pos() { - check_find_nth_pair_pos( - "some (text) here", - vec![ - // cursor on [t]ext - (6, '(', 1, Ok((5, 10))), - (6, ')', 1, Ok((5, 10))), - // cursor on so[m]e - (2, '(', 1, Err(Error::PairNotFound)), - // cursor on bracket itself - (5, '(', 1, Ok((5, 10))), - (10, '(', 1, Ok((5, 10))), - ], + fn test_get_surround_pos_bail_different_surround_chars() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " ^ \n ^ \n " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1), + Err(Error::PairNotFound) ); } #[test] - fn test_find_nth_pairs_pos_skip() { - check_find_nth_pair_pos( - "(so (many (good) text) here)", - vec![ - // cursor on go[o]d - (13, '(', 1, Ok((10, 15))), - (13, '(', 2, Ok((4, 21))), - (13, '(', 3, Ok((0, 27))), - ], + fn test_get_surround_pos_bail_overlapping_surround_chars() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " \n ^ \n ^ " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1), + Err(Error::PairNotFound) // overlapping surround chars ); } #[test] - fn test_find_nth_pairs_pos_same() { - check_find_nth_pair_pos( - "'so 'many 'good' text' here'", - vec![ - // cursor on go[o]d - (13, '\'', 1, Ok((10, 15))), - (13, '\'', 2, Ok((4, 21))), - (13, '\'', 3, Ok((0, 27))), - // cursor on the quotes - (10, '\'', 1, Err(Error::CursorOnAmbiguousPair)), - ], - ) + fn test_get_surround_pos_bail_cursor_overlap() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " ^^ \n \n " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('['), 1), + Err(Error::CursorOverlap) + ); } #[test] - fn test_find_nth_pairs_pos_step() { - check_find_nth_pair_pos( - "((so)((many) good (text))(here))", - vec![ - // cursor on go[o]d - (15, '(', 1, Ok((5, 24))), - (15, '(', 2, Ok((0, 31))), - ], + fn test_find_nth_pairs_pos_quote_success() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "some 'quoted text' on this 'line'\n'and this one'", + " _ ^ _ \n " + ); + + assert_eq!(2, expectations.len()); + assert_eq!( + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1) + .expect("find should succeed"), + (expectations[0], expectations[1]) ) } #[test] - fn test_find_nth_pairs_pos_mixed() { - check_find_nth_pair_pos( - "(so [many {good} text] here)", - vec![ - // cursor on go[o]d - (13, '{', 1, Ok((10, 15))), - (13, '[', 1, Ok((4, 21))), - (13, '(', 1, Ok((0, 27))), - ], + fn test_find_nth_pairs_pos_nested_quote_success() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "some 'nested 'quoted' text' on this 'line'\n'and this one'", + " _ ^ _ \n " + ); + + assert_eq!(2, expectations.len()); + assert_eq!( + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 2) + .expect("find should succeed"), + (expectations[0], expectations[1]) ) } #[test] - fn test_get_surround_pos() { - let doc = Rope::from("(some) (chars)\n(newline)"); - let slice = doc.slice(..); - let selection = Selection::new( - SmallVec::from_slice(&[Range::point(2), Range::point(9), Range::point(20)]), - 0, - ); + fn test_find_nth_pairs_pos_inside_quote_ambiguous() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "some 'nested 'quoted' text' on this 'line'\n'and this one'", + " ^ \n " + ); - // cursor on s[o]me, c[h]ars, newl[i]ne assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1) - .unwrap() - .as_slice(), - &[0, 5, 7, 13, 15, 23] - ); + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1), + Err(Error::CursorOnAmbiguousPair) + ) } - #[test] - fn test_get_surround_pos_bail() { - let doc = Rope::from("[some]\n(chars)xx\n(newline)"); - let slice = doc.slice(..); + // Create a Rope and a matching Selection using a specification language. + // ^ is a single-point selection. + // _ is an expected index. These are returned as a Vec for use in assertions. + fn rope_with_selections_and_expectations( + text: &str, + spec: &str, + ) -> (Rope, Selection, Vec) { + if text.len() != spec.len() { + panic!("specification must match text length -- are newlines aligned?"); + } - let selection = - Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0); - // cursor on s[o]me, c[h]ars - assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1), - Err(Error::PairNotFound) // different surround chars - ); + let rope = Rope::from(text); - let selection = Selection::new( - SmallVec::from_slice(&[Range::point(14), Range::point(24)]), - 0, - ); - // cursor on [x]x, newli[n]e - assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1), - Err(Error::PairNotFound) // overlapping surround chars - ); + let selections: SmallVec<[Range; 1]> = spec + .match_indices('^') + .into_iter() + .map(|(i, _)| Range::point(i)) + .collect(); - let selection = - Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0); - // cursor on s[o][m]e - assert_eq!( - get_surround_pos(slice, &selection, Some('['), 1), - Err(Error::CursorOverlap) - ); + let expectations: Vec = spec + .match_indices('_') + .into_iter() + .map(|(i, _)| i) + .collect(); + + (rope, Selection::new(selections, 0), expectations) } } diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 41ab23e1343a..941e3ba7bd3b 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -82,7 +82,8 @@ pub struct LanguageConfiguration { pub shebangs: Vec, // interpreter(s) associated with language pub roots: Vec, // these indicate project roots <.git, Cargo.toml> pub comment_token: Option, - pub max_line_length: Option, + pub text_width: Option, + pub soft_wrap: Option, #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] pub config: Option, @@ -427,7 +428,7 @@ impl TextObjectQuery { let nodes: Vec<_> = mat .captures .iter() - .filter_map(|cap| (cap.index == capture_idx).then(|| cap.node)) + .filter_map(|cap| (cap.index == capture_idx).then_some(cap.node)) .collect(); if nodes.len() > 1 { @@ -546,6 +547,33 @@ impl LanguageConfiguration { .ok() } } +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(default, rename_all = "kebab-case", deny_unknown_fields)] +pub struct SoftWrap { + /// Soft wrap lines that exceed viewport width. Default to off + pub enable: Option, + /// Maximum space left free at the end of the line. + /// This space is used to wrap text at word boundaries. If that is not possible within this limit + /// the word is simply split at the end of the line. + /// + /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. + /// + /// Default to 20 + pub max_wrap: Option, + /// Maximum number of indentation that can be carried over from the previous line when softwrapping. + /// If a line is indented further then this limit it is rendered at the start of the viewport instead. + /// + /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. + /// + /// Default to 40 + pub max_indent_retain: Option, + /// Indicator placed at the beginning of softwrapped lines + /// + /// Defaults to ↪ + pub wrap_indicator: Option, + /// Softwrap at `text_width` instead of viewport width if it is shorter + pub wrap_at_text_width: Option, +} // Expose loader as Lazy<> global since it's always static? @@ -1092,21 +1120,14 @@ impl Syntax { }], cursor, _tree: None, - captures, + captures: RefCell::new(captures), config: layer.config.as_ref(), // TODO: just reuse `layer` depth: layer.depth, // TODO: just reuse `layer` - ranges: &layer.ranges, // TODO: temp }) }) .collect::>(); - // HAXX: arrange layers by byte range, with deeper layers positioned first - layers.sort_by_key(|layer| { - ( - layer.ranges.first().cloned(), - std::cmp::Reverse(layer.depth), - ) - }); + layers.sort_unstable_by_key(|layer| layer.sort_key()); let mut result = HighlightIter { source, @@ -1424,12 +1445,11 @@ impl<'a> TextProvider<'a> for RopeProvider<'a> { struct HighlightIterLayer<'a> { _tree: Option, cursor: QueryCursor, - captures: iter::Peekable>>, + captures: RefCell>>>, config: &'a HighlightConfiguration, highlight_end_stack: Vec, scope_stack: Vec>, depth: u32, - ranges: &'a [Range], } impl<'a> fmt::Debug for HighlightIterLayer<'a> { @@ -1610,10 +1630,11 @@ impl<'a> HighlightIterLayer<'a> { // First, sort scope boundaries by their byte offset in the document. At a // given position, emit scope endings before scope beginnings. Finally, emit // scope boundaries from deeper layers first. - fn sort_key(&mut self) -> Option<(usize, bool, isize)> { + fn sort_key(&self) -> Option<(usize, bool, isize)> { let depth = -(self.depth as isize); let next_start = self .captures + .borrow_mut() .peek() .map(|(m, i)| m.captures[*i].node.start_byte()); let next_end = self.highlight_end_stack.last().cloned(); @@ -1838,7 +1859,8 @@ impl<'a> Iterator for HighlightIter<'a> { // Get the next capture from whichever layer has the earliest highlight boundary. let range; let layer = &mut self.layers[0]; - if let Some((next_match, capture_index)) = layer.captures.peek() { + let captures = layer.captures.get_mut(); + if let Some((next_match, capture_index)) = captures.peek() { let next_capture = next_match.captures[*capture_index]; range = next_capture.node.byte_range(); @@ -1861,7 +1883,7 @@ impl<'a> Iterator for HighlightIter<'a> { return self.emit_event(self.source.len_bytes(), None); }; - let (mut match_, capture_index) = layer.captures.next().unwrap(); + let (mut match_, capture_index) = captures.next().unwrap(); let mut capture = match_.captures[capture_index]; // Remove from the local scope stack any local scopes that have already ended. @@ -1937,11 +1959,11 @@ impl<'a> Iterator for HighlightIter<'a> { } // Continue processing any additional matches for the same node. - if let Some((next_match, next_capture_index)) = layer.captures.peek() { + if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; - match_ = layer.captures.next().unwrap().0; + match_ = captures.next().unwrap().0; continue; } } @@ -1964,11 +1986,11 @@ impl<'a> Iterator for HighlightIter<'a> { // highlighting patterns that are disabled for local variables. if definition_highlight.is_some() || reference_highlight.is_some() { while layer.config.non_local_variable_patterns[match_.pattern_index] { - if let Some((next_match, next_capture_index)) = layer.captures.peek() { + if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; - match_ = layer.captures.next().unwrap().0; + match_ = captures.next().unwrap().0; continue; } } @@ -1983,10 +2005,10 @@ impl<'a> Iterator for HighlightIter<'a> { // for a given node are ordered by pattern index, so these subsequent // captures are guaranteed to be for highlighting, not injections or // local variables. - while let Some((next_match, next_capture_index)) = layer.captures.peek() { + while let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { - layer.captures.next(); + captures.next(); } else { break; } diff --git a/helix-core/src/text_annotations.rs b/helix-core/src/text_annotations.rs index 1956f6b5bf11..3e48de4d87ce 100644 --- a/helix-core/src/text_annotations.rs +++ b/helix-core/src/text_annotations.rs @@ -15,6 +15,15 @@ pub struct InlineAnnotation { pub char_idx: usize, } +impl InlineAnnotation { + pub fn new(char_idx: usize, text: impl Into) -> Self { + Self { + char_idx, + text: text.into(), + } + } +} + /// Represents a **single Grapheme** that is part of the document /// that start at `char_idx` that will be replaced with /// a different `grapheme`. @@ -33,22 +42,13 @@ pub struct InlineAnnotation { /// use helix_core::text_annotations::Overlay; /// /// // replaces a -/// Overlay { -/// char_idx: 0, -/// grapheme: "X".into(), -/// }; +/// Overlay::new(0, "X"); /// /// // replaces X͎̊͢͜͝͡ -/// Overlay{ -/// char_idx: 1, -/// grapheme: "\t".into(), -/// }; +/// Overlay::new(1, "\t"); /// /// // replaces b -/// Overlay{ -/// char_idx: 6, -/// grapheme: "X̢̢̟͖̲͌̋̇͑͝".into(), -/// }; +/// Overlay::new(6, "X̢̢̟͖̲͌̋̇͑͝"); /// ``` /// /// The following examples are invalid uses @@ -57,16 +57,10 @@ pub struct InlineAnnotation { /// use helix_core::text_annotations::Overlay; /// /// // overlay is not aligned at grapheme boundary -/// Overlay{ -/// char_idx: 3, -/// grapheme: "x".into(), -/// }; +/// Overlay::new(3, "x"); /// /// // overlay contains multiple graphemes -/// Overlay{ -/// char_idx: 0, -/// grapheme: "xy".into(), -/// }; +/// Overlay::new(0, "xy"); /// ``` #[derive(Debug, Clone)] pub struct Overlay { @@ -74,6 +68,15 @@ pub struct Overlay { pub grapheme: Tendril, } +impl Overlay { + pub fn new(char_idx: usize, grapheme: impl Into) -> Self { + Self { + char_idx, + grapheme: grapheme.into(), + } + } +} + /// Line annotations allow for virtual text between normal /// text lines. They cause `height` empty lines to be inserted /// below the document line that contains `anchor_char_idx`. diff --git a/helix-core/src/textobject.rs b/helix-core/src/textobject.rs index 76c6d103e6c7..972a80e78a60 100644 --- a/helix-core/src/textobject.rs +++ b/helix-core/src/textobject.rs @@ -231,8 +231,20 @@ fn textobject_pair_surround_impl( }; pair_pos .map(|(anchor, head)| match textobject { - TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head), - TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)), + TextObject::Inside => { + if anchor < head { + Range::new(next_grapheme_boundary(slice, anchor), head) + } else { + Range::new(anchor, next_grapheme_boundary(slice, head)) + } + } + TextObject::Around => { + if anchor < head { + Range::new(anchor, next_grapheme_boundary(slice, head)) + } else { + Range::new(next_grapheme_boundary(slice, anchor), head) + } + } TextObject::Movement => unreachable!(), }) .unwrap_or(range) diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index 482fd6d97e5e..d8e581aae12f 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -1,3 +1,5 @@ +use smallvec::SmallVec; + use crate::{Range, Rope, Selection, Tendril}; use std::borrow::Cow; @@ -466,6 +468,33 @@ impl Transaction { self } + /// Generate a transaction from a set of potentially overlapping changes. The `change_ranges` + /// iterator yield the range (of removed text) in the old document for each edit. If any change + /// overlaps with a range overlaps with a previous range then that range is ignored. + /// + /// The `process_change` callback is called for each edit that is not ignored (in the order + /// yielded by `changes`) and should return the new text that the associated range will be + /// replaced with. + /// + /// To make this function more flexible the iterator can yield additional data for each change + /// that is passed to `process_change` + pub fn change_ignore_overlapping( + doc: &Rope, + change_ranges: impl Iterator, + mut process_change: impl FnMut(usize, usize, T) -> Option, + ) -> Self { + let mut last = 0; + let changes = change_ranges.filter_map(|(from, to, data)| { + if from < last { + return None; + } + let tendril = process_change(from, to, data); + last = to; + Some((from, to, tendril)) + }); + Self::change(doc, changes) + } + /// Generate a transaction from a set of changes. pub fn change(doc: &Rope, changes: I) -> Self where @@ -481,6 +510,11 @@ impl Transaction { for (from, to, tendril) in changes { // Verify ranges are ordered and not overlapping debug_assert!(last <= from); + // Verify ranges are correct + debug_assert!( + from <= to, + "Edit end must end before it starts (should {from} <= {to})" + ); // Retain from last "to" to current "from" changeset.retain(from - last); @@ -508,6 +542,44 @@ impl Transaction { Self::change(doc, selection.iter().map(f)) } + pub fn change_by_selection_ignore_overlapping( + doc: &Rope, + selection: &Selection, + mut change_range: impl FnMut(&Range) -> (usize, usize), + mut create_tendril: impl FnMut(usize, usize) -> Option, + ) -> (Transaction, Selection) { + let mut last_selection_idx = None; + let mut new_primary_idx = None; + let mut ranges: SmallVec<[Range; 1]> = SmallVec::new(); + let process_change = |change_start, change_end, (idx, range): (usize, &Range)| { + // update the primary idx + if idx == selection.primary_index() { + new_primary_idx = Some(idx); + } else if new_primary_idx.is_none() { + if idx > selection.primary_index() { + new_primary_idx = last_selection_idx; + } else { + last_selection_idx = Some(idx); + } + } + ranges.push(*range); + create_tendril(change_start, change_end) + }; + let transaction = Self::change_ignore_overlapping( + doc, + selection.iter().enumerate().map(|range| { + let (change_start, change_end) = change_range(range.1); + (change_start, change_end, range) + }), + process_change, + ); + + ( + transaction, + Selection::new(ranges, new_primary_idx.unwrap_or(0)), + ) + } + /// Insert text at each selection head. pub fn insert(doc: &Rope, selection: &Selection, text: Tendril) -> Self { Self::change_by_selection(doc, selection, |range| { diff --git a/helix-core/src/wrap.rs b/helix-core/src/wrap.rs index eabc47d471dd..2ba8d173ee1d 100644 --- a/helix-core/src/wrap.rs +++ b/helix-core/src/wrap.rs @@ -2,6 +2,6 @@ use smartstring::{LazyCompact, SmartString}; /// Given a slice of text, return the text re-wrapped to fit it /// within the given width. -pub fn reflow_hard_wrap(text: &str, max_line_len: usize) -> SmartString { - textwrap::refill(text, max_line_len).into() +pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString { + textwrap::refill(text, text_width).into() } diff --git a/helix-core/tests/indent.rs b/helix-core/tests/indent.rs index f74b576ac996..b3c543d6c1ff 100644 --- a/helix-core/tests/indent.rs +++ b/helix-core/tests/indent.rs @@ -46,11 +46,13 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) { for i in 0..doc.len_lines() { let line = text.line(i); if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { + let tab_and_indent_width: usize = 4; let suggested_indent = treesitter_indent_for_pos( indent_query, &syntax, - &IndentStyle::Spaces(4), - 4, + &IndentStyle::Spaces(tab_and_indent_width as u8), + tab_and_indent_width, + tab_and_indent_width, text, i, text.line_to_char(i) + pos, diff --git a/helix-dap/src/client.rs b/helix-dap/src/client.rs index e72d290e3fd5..ff727d00abe2 100644 --- a/helix-dap/src/client.rs +++ b/helix-dap/src/client.rs @@ -1,4 +1,5 @@ use crate::{ + requests::DisconnectArguments, transport::{Payload, Request, Response, Transport}, types::*, Error, Result, ThreadId, @@ -31,6 +32,8 @@ pub struct Client { _process: Option, server_tx: UnboundedSender, request_counter: AtomicU64, + connection_type: Option, + starting_request_args: Option, pub caps: Option, // thread_id -> frames pub stack_frames: HashMap>, @@ -41,6 +44,12 @@ pub struct Client { pub quirks: DebuggerQuirks, } +#[derive(Clone, Copy, Debug)] +pub enum ConnectionType { + Launch, + Attach, +} + impl Client { // Spawn a process and communicate with it by either TCP or stdio pub async fn process( @@ -78,7 +87,8 @@ impl Client { server_tx, request_counter: AtomicU64::new(0), caps: None, - // + connection_type: None, + starting_request_args: None, stack_frames: HashMap::new(), thread_states: HashMap::new(), thread_id: None, @@ -150,6 +160,10 @@ impl Client { ) } + pub fn starting_request_args(&self) -> &Option { + &self.starting_request_args + } + pub async fn tcp_process( cmd: &str, args: Vec<&str>, @@ -207,6 +221,10 @@ impl Client { self.id } + pub fn connection_type(&self) -> Option { + self.connection_type + } + fn next_request_id(&self) -> u64 { self.request_counter.fetch_add(1, Ordering::Relaxed) } @@ -254,7 +272,7 @@ impl Client { // TODO: specifiable timeout, delay other calls until initialize success timeout(Duration::from_secs(20), callback_rx.recv()) .await - .map_err(|_| Error::Timeout)? // return Timeout + .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? .map(|response| response.body.unwrap_or_default()) // TODO: check response.success @@ -334,18 +352,35 @@ impl Client { Ok(()) } - pub fn disconnect(&self) -> impl Future> { - self.call::(()) + pub fn disconnect( + &mut self, + args: Option, + ) -> impl Future> { + self.connection_type = None; + self.call::(args) } - pub fn launch(&self, args: serde_json::Value) -> impl Future> { + pub fn launch(&mut self, args: serde_json::Value) -> impl Future> { + self.connection_type = Some(ConnectionType::Launch); + self.starting_request_args = Some(args.clone()); self.call::(args) } - pub fn attach(&self, args: serde_json::Value) -> impl Future> { + pub fn attach(&mut self, args: serde_json::Value) -> impl Future> { + self.connection_type = Some(ConnectionType::Attach); + self.starting_request_args = Some(args.clone()); self.call::(args) } + pub fn restart(&self) -> impl Future> { + let args = if let Some(args) = &self.starting_request_args { + args.clone() + } else { + Value::Null + }; + self.call::(args) + } + pub async fn set_breakpoints( &self, file: PathBuf, diff --git a/helix-dap/src/lib.rs b/helix-dap/src/lib.rs index f60b102c0ccd..21162cb86e72 100644 --- a/helix-dap/src/lib.rs +++ b/helix-dap/src/lib.rs @@ -2,7 +2,7 @@ mod client; mod transport; mod types; -pub use client::Client; +pub use client::{Client, ConnectionType}; pub use events::Event; pub use transport::{Payload, Response, Transport}; pub use types::*; @@ -14,8 +14,8 @@ pub enum Error { Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), - #[error("request timed out")] - Timeout, + #[error("request {0} timed out")] + Timeout(u64), #[error("server closed the stream")] StreamClosed, #[error(transparent)] diff --git a/helix-dap/src/types.rs b/helix-dap/src/types.rs index 0a9ebe5e9540..bbaf53a602c1 100644 --- a/helix-dap/src/types.rs +++ b/helix-dap/src/types.rs @@ -378,7 +378,7 @@ pub mod requests { impl Request for Launch { type Arguments = Value; - type Result = Value; + type Result = (); const COMMAND: &'static str = "launch"; } @@ -387,15 +387,35 @@ pub mod requests { impl Request for Attach { type Arguments = Value; - type Result = Value; + type Result = (); const COMMAND: &'static str = "attach"; } + #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct DisconnectArguments { + #[serde(skip_serializing_if = "Option::is_none")] + pub restart: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub terminate_debuggee: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub suspend_debuggee: Option, + } + + #[derive(Debug)] + pub enum Restart {} + + impl Request for Restart { + type Arguments = Value; + type Result = (); + const COMMAND: &'static str = "restart"; + } + #[derive(Debug)] pub enum Disconnect {} impl Request for Disconnect { - type Arguments = (); + type Arguments = Option; type Result = (); const COMMAND: &'static str = "disconnect"; } diff --git a/helix-loader/src/grammar.rs b/helix-loader/src/grammar.rs index 01c966c8c4bd..a85cb274cfb8 100644 --- a/helix-loader/src/grammar.rs +++ b/helix-loader/src/grammar.rs @@ -67,8 +67,9 @@ pub fn get_language(name: &str) -> Result { #[cfg(not(target_arch = "wasm32"))] pub fn get_language(name: &str) -> Result { use libloading::{Library, Symbol}; - let mut library_path = crate::runtime_dir().join("grammars").join(name); - library_path.set_extension(DYLIB_EXTENSION); + let mut rel_library_path = PathBuf::new().join("grammars").join(name); + rel_library_path.set_extension(DYLIB_EXTENSION); + let library_path = crate::runtime_file(&rel_library_path); let library = unsafe { Library::new(&library_path) } .with_context(|| format!("Error opening dynamic library {:?}", library_path))?; @@ -252,7 +253,9 @@ fn fetch_grammar(grammar: GrammarConfiguration) -> Result { remote, revision, .. } = grammar.source { - let grammar_dir = crate::runtime_dir() + let grammar_dir = crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id); @@ -350,7 +353,9 @@ fn build_grammar(grammar: GrammarConfiguration, target: Option<&str>) -> Result< let grammar_dir = if let GrammarSource::Local { path } = &grammar.source { PathBuf::from(&path) } else { - crate::runtime_dir() + crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id) @@ -401,7 +406,10 @@ fn build_tree_sitter_library( None } }; - let parser_lib_path = crate::runtime_dir().join("grammars"); + let parser_lib_path = crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition + .join("grammars"); let mut library_path = parser_lib_path.join(&grammar.grammar_id); library_path.set_extension(DYLIB_EXTENSION); @@ -511,9 +519,6 @@ fn mtime(path: &Path) -> Result { /// Gives the contents of a file from a language's `runtime/queries/` /// directory pub fn load_runtime_file(language: &str, filename: &str) -> Result { - let path = crate::RUNTIME_DIR - .join("queries") - .join(language) - .join(filename); + let path = crate::runtime_file(&PathBuf::new().join("queries").join(language).join(filename)); std::fs::read_to_string(path) } diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 8dc2928adc9f..04b44b5aa4ee 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -2,11 +2,12 @@ pub mod config; pub mod grammar; use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); -pub static RUNTIME_DIR: once_cell::sync::Lazy = once_cell::sync::Lazy::new(runtime_dir); +static RUNTIME_DIRS: once_cell::sync::Lazy> = + once_cell::sync::Lazy::new(prioritize_runtime_dirs); static CONFIG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); @@ -25,31 +26,83 @@ pub fn initialize_config_file(specified_file: Option) { CONFIG_FILE.set(config_file).ok(); } -pub fn runtime_dir() -> PathBuf { - if let Ok(dir) = std::env::var("HELIX_RUNTIME") { - return dir.into(); - } - +/// A list of runtime directories from highest to lowest priority +/// +/// The priority is: +/// +/// 1. sibling directory to `CARGO_MANIFEST_DIR` (if environment variable is set) +/// 2. subdirectory of user config directory (always included) +/// 3. `HELIX_RUNTIME` (if environment variable is set) +/// 4. subdirectory of path to helix executable (always included) +/// +/// Postcondition: returns at least two paths (they might not exist). +fn prioritize_runtime_dirs() -> Vec { + const RT_DIR: &str = "runtime"; + // Adding higher priority first + let mut rt_dirs = Vec::new(); if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") { // this is the directory of the crate being run by cargo, we need the workspace path so we take the parent let path = std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR); log::debug!("runtime dir: {}", path.to_string_lossy()); - return path; + rt_dirs.push(path); } - const RT_DIR: &str = "runtime"; - let conf_dir = config_dir().join(RT_DIR); - if conf_dir.exists() { - return conf_dir; + let conf_rt_dir = config_dir().join(RT_DIR); + rt_dirs.push(conf_rt_dir); + + if let Ok(dir) = std::env::var("HELIX_RUNTIME") { + rt_dirs.push(dir.into()); } // fallback to location of the executable being run // canonicalize the path in case the executable is symlinked - std::env::current_exe() + let exe_rt_dir = std::env::current_exe() .ok() .and_then(|path| std::fs::canonicalize(path).ok()) .and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR))) - .unwrap() + .unwrap(); + rt_dirs.push(exe_rt_dir); + rt_dirs +} + +/// Runtime directories ordered from highest to lowest priority +/// +/// All directories should be checked when looking for files. +/// +/// Postcondition: returns at least one path (it might not exist). +pub fn runtime_dirs() -> &'static [PathBuf] { + &RUNTIME_DIRS +} + +/// Find file with path relative to runtime directory +/// +/// `rel_path` should be the relative path from within the `runtime/` directory. +/// The valid runtime directories are searched in priority order and the first +/// file found to exist is returned, otherwise None. +fn find_runtime_file(rel_path: &Path) -> Option { + RUNTIME_DIRS.iter().find_map(|rt_dir| { + let path = rt_dir.join(rel_path); + if path.exists() { + Some(path) + } else { + None + } + }) +} + +/// Find file with path relative to runtime directory +/// +/// `rel_path` should be the relative path from within the `runtime/` directory. +/// The valid runtime directories are searched in priority order and the first +/// file found to exist is returned, otherwise the path to the final attempt +/// that failed. +pub fn runtime_file(rel_path: &Path) -> PathBuf { + find_runtime_file(rel_path).unwrap_or_else(|| { + RUNTIME_DIRS + .last() + .map(|dir| dir.join(rel_path)) + .unwrap_or_default() + }) } pub fn config_dir() -> PathBuf { diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index 0db61ad4a5ab..9d76822dc96b 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -14,15 +14,16 @@ homepage = "https://helix-editor.com" [dependencies] helix-core = { version = "0.6", path = "../helix-core" } helix-loader = { version = "0.6", path = "../helix-loader" } +helix-parsec = { version = "0.6", path = "../helix-parsec" } anyhow = "1.0" futures-executor = "0.3" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } log = "0.4" -lsp-types = { version = "0.93", features = ["proposed"] } +lsp-types = { version = "0.94" } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" -tokio = { version = "1.24", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } -tokio-stream = "0.1.11" +tokio = { version = "1.26", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } +tokio-stream = "0.1.12" which = "4.4" diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 6827f568d986..9cb7c1470049 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -6,6 +6,7 @@ use crate::{ use helix_core::{find_root, ChangeSet, Rope}; use helix_loader::{self, VERSION_AND_GIT_HASH}; +use lsp::PositionEncodingKind; use lsp_types as lsp; use serde::Deserialize; use serde_json::Value; @@ -32,7 +33,6 @@ pub struct Client { server_tx: UnboundedSender, request_counter: AtomicU64, pub(crate) capabilities: OnceCell, - offset_encoding: OffsetEncoding, config: Option, root_path: std::path::PathBuf, root_uri: Option, @@ -104,7 +104,6 @@ impl Client { server_tx, request_counter: AtomicU64::new(0), capabilities: OnceCell::new(), - offset_encoding: OffsetEncoding::Utf8, config, req_timeout, @@ -147,7 +146,19 @@ impl Client { } pub fn offset_encoding(&self) -> OffsetEncoding { - self.offset_encoding + self.capabilities() + .position_encoding + .as_ref() + .and_then(|encoding| match encoding.as_str() { + "utf-8" => Some(OffsetEncoding::Utf8), + "utf-16" => Some(OffsetEncoding::Utf16), + "utf-32" => Some(OffsetEncoding::Utf32), + encoding => { + log::error!("Server provided invalid position encording {encoding}, defaulting to utf-16"); + None + }, + }) + .unwrap_or_default() } pub fn config(&self) -> Option<&Value> { @@ -190,7 +201,7 @@ impl Client { let request = jsonrpc::MethodCall { jsonrpc: Some(jsonrpc::Version::V2), - id, + id: id.clone(), method: R::METHOD.to_string(), params: Self::value_into_params(params), }; @@ -207,7 +218,7 @@ impl Client { // TODO: delay other calls until initialize success timeout(Duration::from_secs(timeout_secs), rx.recv()) .await - .map_err(|_| Error::Timeout)? // return Timeout + .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? } } @@ -304,12 +315,15 @@ impl Client { execute_command: Some(lsp::DynamicRegistrationClientCapabilities { dynamic_registration: Some(false), }), + inlay_hint: Some(lsp::InlayHintWorkspaceClientCapabilities { + refresh_support: Some(false), + }), ..Default::default() }), text_document: Some(lsp::TextDocumentClientCapabilities { completion: Some(lsp::CompletionClientCapabilities { completion_item: Some(lsp::CompletionItemCapability { - snippet_support: Some(false), + snippet_support: Some(true), resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport { properties: vec![ String::from("documentation"), @@ -318,6 +332,10 @@ impl Client { ], }), insert_replace_support: Some(true), + deprecated_support: Some(true), + tag_support: Some(lsp::TagSupport { + value_set: vec![lsp::CompletionItemTag::DEPRECATED], + }), ..Default::default() }), completion_item_kind: Some(lsp::CompletionItemKindCapability { @@ -344,7 +362,7 @@ impl Client { }), rename: Some(lsp::RenameClientCapabilities { dynamic_registration: Some(false), - prepare_support: Some(false), + prepare_support: Some(true), prepare_support_default_behavior: None, honors_change_annotations: Some(false), }), @@ -371,12 +389,24 @@ impl Client { publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { ..Default::default() }), + inlay_hint: Some(lsp::InlayHintClientCapabilities { + dynamic_registration: Some(false), + resolve_support: None, + }), ..Default::default() }), window: Some(lsp::WindowClientCapabilities { work_done_progress: Some(true), ..Default::default() }), + general: Some(lsp::GeneralClientCapabilities { + position_encodings: Some(vec![ + PositionEncodingKind::UTF32, + PositionEncodingKind::UTF8, + PositionEncodingKind::UTF16, + ]), + ..Default::default() + }), ..Default::default() }, trace: None, @@ -577,7 +607,7 @@ impl Client { }] } lsp::TextDocumentSyncKind::INCREMENTAL => { - Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding) + Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding()) } lsp::TextDocumentSyncKind::NONE => return None, kind => unimplemented!("{:?}", kind), @@ -628,7 +658,7 @@ impl Client { Some(self.notify::( lsp::DidSaveTextDocumentParams { text_document, - text: include_text.then(|| text.into()), + text: include_text.then_some(text.into()), }, )) } @@ -703,6 +733,31 @@ impl Client { Some(self.call::(params)) } + pub fn text_document_range_inlay_hints( + &self, + text_document: lsp::TextDocumentIdentifier, + range: lsp::Range, + work_done_token: Option, + ) -> Option>> { + let capabilities = self.capabilities.get().unwrap(); + + match capabilities.inlay_hint_provider { + Some( + lsp::OneOf::Left(true) + | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), + ) => (), + _ => return None, + } + + let params = lsp::InlayHintParams { + text_document, + range, + work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, + }; + + Some(self.call::(params)) + } + pub fn text_document_hover( &self, text_document: lsp::TextDocumentIdentifier, @@ -1011,6 +1066,29 @@ impl Client { Some(self.call::(params)) } + pub fn prepare_rename( + &self, + text_document: lsp::TextDocumentIdentifier, + position: lsp::Position, + ) -> Option>> { + let capabilities = self.capabilities.get().unwrap(); + + match capabilities.rename_provider { + Some(lsp::OneOf::Right(lsp::RenameOptions { + prepare_provider: Some(true), + .. + })) => (), + _ => return None, + } + + let params = lsp::TextDocumentPositionParams { + text_document, + position, + }; + + Some(self.call::(params)) + } + // empty string to get all symbols pub fn workspace_symbols(&self, query: String) -> Option>> { let capabilities = self.capabilities.get().unwrap(); @@ -1027,7 +1105,7 @@ impl Client { partial_result_params: lsp::PartialResultParams::default(), }; - Some(self.call::(params)) + Some(self.call::(params)) } pub fn code_actions( diff --git a/helix-lsp/src/jsonrpc.rs b/helix-lsp/src/jsonrpc.rs index 69d02707e17a..f415dde0be4b 100644 --- a/helix-lsp/src/jsonrpc.rs +++ b/helix-lsp/src/jsonrpc.rs @@ -108,6 +108,16 @@ pub enum Id { Str(String), } +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Id::Null => f.write_str("null"), + Id::Num(num) => write!(f, "{}", num), + Id::Str(string) => f.write_str(string), + } + } +} + /// Protocol Version #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum Version { diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index 8418896cbb73..e31df59f4e2d 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -1,5 +1,6 @@ mod client; pub mod jsonrpc; +pub mod snippet; mod transport; pub use client::Client; @@ -20,7 +21,6 @@ use std::{ }, }; -use serde::{Deserialize, Serialize}; use thiserror::Error; use tokio_stream::wrappers::UnboundedReceiverStream; @@ -35,8 +35,8 @@ pub enum Error { Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), - #[error("request timed out")] - Timeout, + #[error("request {0} timed out")] + Timeout(jsonrpc::Id), #[error("server closed the stream")] StreamClosed, #[error("Unhandled")] @@ -45,18 +45,21 @@ pub enum Error { Other(#[from] anyhow::Error), } -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, Default)] pub enum OffsetEncoding { /// UTF-8 code units aka bytes - #[serde(rename = "utf-8")] Utf8, + /// UTF-32 code units aka chars + Utf32, /// UTF-16 code units - #[serde(rename = "utf-16")] + #[default] Utf16, } pub mod util { use super::*; + use helix_core::line_ending::{line_end_byte_index, line_end_char_index}; + use helix_core::{chars, RopeSlice, SmallVec}; use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction}; /// Converts a diagnostic in the document to [`lsp::Diagnostic`]. @@ -117,7 +120,7 @@ pub mod util { /// Converts [`lsp::Position`] to a position in the document. /// - /// Returns `None` if position exceeds document length or an operation overflows. + /// Returns `None` if position.line is out of bounds or an overflow occurs pub fn lsp_pos_to_pos( doc: &Rope, pos: lsp::Position, @@ -128,22 +131,63 @@ pub mod util { return None; } - match offset_encoding { + // We need to be careful here to fully comply ith the LSP spec. + // Two relevant quotes from the spec: + // + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position + // > If the character value is greater than the line length it defaults back + // > to the line length. + // + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments + // > To ensure that both client and server split the string into the same + // > line representation the protocol specifies the following end-of-line sequences: + // > ‘\n’, ‘\r\n’ and ‘\r’. Positions are line end character agnostic. + // > So you can not specify a position that denotes \r|\n or \n| where | represents the character offset. + // + // This means that while the line must be in bounds the `charater` + // must be capped to the end of the line. + // Note that the end of the line here is **before** the line terminator + // so we must use `line_end_char_index` istead of `doc.line_to_char(pos_line + 1)` + // + // FIXME: Helix does not fully comply with the LSP spec for line terminators. + // The LSP standard requires that line terminators are ['\n', '\r\n', '\r']. + // Without the unicode-linebreak feature disabled, the `\r` terminator is not handled by helix. + // With the unicode-linebreak feature, helix recognizes multiple extra line break chars + // which means that positions will be decoded/encoded incorrectly in their presence + + let line = match offset_encoding { OffsetEncoding::Utf8 => { - let line = doc.line_to_char(pos_line); - let pos = line.checked_add(pos.character as usize)?; - if pos <= doc.len_chars() { - Some(pos) - } else { - None - } + let line_start = doc.line_to_byte(pos_line); + let line_end = line_end_byte_index(&doc.slice(..), pos_line); + line_start..line_end } OffsetEncoding::Utf16 => { - let line = doc.line_to_char(pos_line); - let line_start = doc.char_to_utf16_cu(line); - let pos = line_start.checked_add(pos.character as usize)?; - doc.try_utf16_cu_to_char(pos).ok() + // TODO directly translate line index to char-idx + // ropey can do this just as easily as utf-8 byte translation + // but the functions are just missing. + // Translate to char first and then utf-16 as a workaround + let line_start = doc.line_to_char(pos_line); + let line_end = line_end_char_index(&doc.slice(..), pos_line); + doc.char_to_utf16_cu(line_start)..doc.char_to_utf16_cu(line_end) + } + OffsetEncoding::Utf32 => { + let line_start = doc.line_to_char(pos_line); + let line_end = line_end_char_index(&doc.slice(..), pos_line); + line_start..line_end } + }; + + // The LSP spec demands that the offset is capped to the end of the line + let pos = line + .start + .checked_add(pos.character as usize) + .unwrap_or(line.end) + .min(line.end); + + match offset_encoding { + OffsetEncoding::Utf8 => doc.try_byte_to_char(pos).ok(), + OffsetEncoding::Utf16 => doc.try_utf16_cu_to_char(pos).ok(), + OffsetEncoding::Utf32 => Some(pos), } } @@ -158,8 +202,8 @@ pub mod util { match offset_encoding { OffsetEncoding::Utf8 => { let line = doc.char_to_line(pos); - let line_start = doc.line_to_char(line); - let col = pos - line_start; + let line_start = doc.line_to_byte(line); + let col = doc.char_to_byte(pos) - line_start; lsp::Position::new(line as u32, col as u32) } @@ -168,6 +212,13 @@ pub mod util { let line_start = doc.char_to_utf16_cu(doc.line_to_char(line)); let col = doc.char_to_utf16_cu(pos) - line_start; + lsp::Position::new(line as u32, col as u32) + } + OffsetEncoding::Utf32 => { + let line = doc.char_to_line(pos); + let line_start = doc.line_to_char(line); + let col = pos - line_start; + lsp::Position::new(line as u32, col as u32) } } @@ -196,40 +247,227 @@ pub mod util { Some(Range::new(start, end)) } + /// If the LS did not provide a range for the completion or the range of the + /// primary cursor can not be used for the secondary cursor, this function + /// can be used to find the completion range for a cursor + fn find_completion_range(text: RopeSlice, replace_mode: bool, cursor: usize) -> (usize, usize) { + let start = cursor + - text + .chars_at(cursor) + .reversed() + .take_while(|ch| chars::char_is_word(*ch)) + .count(); + let mut end = cursor; + if replace_mode { + end += text + .chars_at(cursor) + .skip(1) + .take_while(|ch| chars::char_is_word(*ch)) + .count(); + } + (start, end) + } + fn completion_range( + text: RopeSlice, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + cursor: usize, + ) -> Option<(usize, usize)> { + let res = match edit_offset { + Some((start_offset, end_offset)) => { + let start_offset = cursor as i128 + start_offset; + if start_offset < 0 { + return None; + } + let end_offset = cursor as i128 + end_offset; + if end_offset > text.len_chars() as i128 { + return None; + } + (start_offset as usize, end_offset as usize) + } + None => find_completion_range(text, replace_mode, cursor), + }; + Some(res) + } + /// Creates a [Transaction] from the [lsp::TextEdit] in a completion response. /// The transaction applies the edit to all cursors. pub fn generate_transaction_from_completion_edit( doc: &Rope, selection: &Selection, - edit: lsp::TextEdit, - offset_encoding: OffsetEncoding, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + new_text: String, ) -> Transaction { - let replacement: Option = if edit.new_text.is_empty() { + let replacement: Option = if new_text.is_empty() { None } else { - Some(edit.new_text.into()) + Some(new_text.into()) }; let text = doc.slice(..); - let primary_cursor = selection.primary().cursor(text); + let (removed_start, removed_end) = completion_range( + text, + edit_offset, + replace_mode, + selection.primary().cursor(text), + ) + .expect("transaction must be valid for primary selection"); + let removed_text = text.slice(removed_start..removed_end); - let start_offset = match lsp_pos_to_pos(doc, edit.range.start, offset_encoding) { - Some(start) => start as i128 - primary_cursor as i128, - None => return Transaction::new(doc), - }; - let end_offset = match lsp_pos_to_pos(doc, edit.range.end, offset_encoding) { - Some(end) => end as i128 - primary_cursor as i128, - None => return Transaction::new(doc), - }; + let (transaction, mut selection) = Transaction::change_by_selection_ignore_overlapping( + doc, + selection, + |range| { + let cursor = range.cursor(text); + completion_range(text, edit_offset, replace_mode, cursor) + .filter(|(start, end)| text.slice(start..end) == removed_text) + .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) + }, + |_, _| replacement.clone(), + ); + if transaction.changes().is_empty() { + return transaction; + } + selection = selection.map(transaction.changes()); + transaction.with_selection(selection) + } - Transaction::change_by_selection(doc, selection, |range| { - let cursor = range.cursor(text); - ( - (cursor as i128 + start_offset) as usize, - (cursor as i128 + end_offset) as usize, - replacement.clone(), - ) - }) + /// Creates a [Transaction] from the [snippet::Snippet] in a completion response. + /// The transaction applies the edit to all cursors. + #[allow(clippy::too_many_arguments)] + pub fn generate_transaction_from_snippet( + doc: &Rope, + selection: &Selection, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + snippet: snippet::Snippet, + line_ending: &str, + include_placeholder: bool, + tab_width: usize, + indent_width: usize, + ) -> Transaction { + let text = doc.slice(..); + + let mut off = 0i128; + let mut mapped_doc = doc.clone(); + let mut selection_tabstops: SmallVec<[_; 1]> = SmallVec::new(); + let (removed_start, removed_end) = completion_range( + text, + edit_offset, + replace_mode, + selection.primary().cursor(text), + ) + .expect("transaction must be valid for primary selection"); + let removed_text = text.slice(removed_start..removed_end); + + let (transaction, selection) = Transaction::change_by_selection_ignore_overlapping( + doc, + selection, + |range| { + let cursor = range.cursor(text); + completion_range(text, edit_offset, replace_mode, cursor) + .filter(|(start, end)| text.slice(start..end) == removed_text) + .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) + }, + |replacement_start, replacement_end| { + let mapped_replacement_start = (replacement_start as i128 + off) as usize; + let mapped_replacement_end = (replacement_end as i128 + off) as usize; + + let line_idx = mapped_doc.char_to_line(mapped_replacement_start); + let indent_level = helix_core::indent::indent_level_for_line( + mapped_doc.line(line_idx), + tab_width, + indent_width, + ) * indent_width; + + let newline_with_offset = format!( + "{line_ending}{blank:indent_level$}", + line_ending = line_ending, + blank = "" + ); + + let (replacement, tabstops) = + snippet::render(&snippet, &newline_with_offset, include_placeholder); + selection_tabstops.push((mapped_replacement_start, tabstops)); + mapped_doc.remove(mapped_replacement_start..mapped_replacement_end); + mapped_doc.insert(mapped_replacement_start, &replacement); + off += + replacement_start as i128 - replacement_end as i128 + replacement.len() as i128; + + Some(replacement) + }, + ); + + let changes = transaction.changes(); + if changes.is_empty() { + return transaction; + } + + let mut mapped_selection = SmallVec::with_capacity(selection.len()); + let mut mapped_primary_idx = 0; + let primary_range = selection.primary(); + for (range, (tabstop_anchor, tabstops)) in selection.into_iter().zip(selection_tabstops) { + if range == primary_range { + mapped_primary_idx = mapped_selection.len() + } + + let range = range.map(changes); + let tabstops = tabstops.first().filter(|tabstops| !tabstops.is_empty()); + let Some(tabstops) = tabstops else{ + // no tabstop normal mapping + mapped_selection.push(range); + continue; + }; + + // expand the selection to cover the tabstop to retain the helix selection semantic + // the tabstop closest to the range simply replaces `head` while anchor remains in place + // the remaining tabstops receive their own single-width cursor + if range.head < range.anchor { + let first_tabstop = tabstop_anchor + tabstops[0].1; + + // if selection is forward but was moved to the right it is + // contained entirely in the replacement text, just do a point + // selection (fallback below) + if range.anchor >= first_tabstop { + let range = Range::new(range.anchor, first_tabstop); + mapped_selection.push(range); + let rem_tabstops = tabstops[1..] + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.1)); + mapped_selection.extend(rem_tabstops); + continue; + } + } else { + let last_idx = tabstops.len() - 1; + let last_tabstop = tabstop_anchor + tabstops[last_idx].1; + + // if selection is forward but was moved to the right it is + // contained entirely in the replacement text, just do a point + // selection (fallback below) + if range.anchor <= last_tabstop { + // we can't properly compute the the next grapheme + // here because the transaction hasn't been applied yet + // that is not a problem because the range gets grapheme aligned anyway + // tough so just adding one will always cause head to be grapheme + // aligned correctly when applied to the document + let range = Range::new(range.anchor, last_tabstop + 1); + mapped_selection.push(range); + let rem_tabstops = tabstops[..last_idx] + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.0)); + mapped_selection.extend(rem_tabstops); + continue; + } + }; + + let tabstops = tabstops + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.0)); + mapped_selection.extend(tabstops); + } + + transaction.with_selection(Selection::new(mapped_selection, mapped_primary_idx)) } pub fn generate_transaction_from_edits( @@ -427,6 +665,16 @@ impl Registry { } } + pub fn stop(&mut self, language_config: &LanguageConfiguration) { + let scope = language_config.scope.clone(); + + if let Some((_, client)) = self.inner.remove(&scope) { + tokio::spawn(async move { + let _ = client.force_shutdown().await; + }); + } + } + pub fn get( &mut self, language_config: &LanguageConfiguration, @@ -606,16 +854,55 @@ mod tests { } test_case!("", (0, 0) => Some(0)); - test_case!("", (0, 1) => None); + test_case!("", (0, 1) => Some(0)); test_case!("", (1, 0) => None); test_case!("\n\n", (0, 0) => Some(0)); test_case!("\n\n", (1, 0) => Some(1)); - test_case!("\n\n", (1, 1) => Some(2)); + test_case!("\n\n", (1, 1) => Some(1)); test_case!("\n\n", (2, 0) => Some(2)); test_case!("\n\n", (3, 0) => None); test_case!("test\n\n\n\ncase", (4, 3) => Some(11)); test_case!("test\n\n\n\ncase", (4, 4) => Some(12)); - test_case!("test\n\n\n\ncase", (4, 5) => None); + test_case!("test\n\n\n\ncase", (4, 5) => Some(12)); test_case!("", (u32::MAX, u32::MAX) => None); } + + #[test] + fn emoji_format_gh_4791() { + use lsp_types::{Position, Range, TextEdit}; + + let edits = vec![ + TextEdit { + range: Range { + start: Position { + line: 0, + character: 1, + }, + end: Position { + line: 1, + character: 0, + }, + }, + new_text: "\n ".to_string(), + }, + TextEdit { + range: Range { + start: Position { + line: 1, + character: 7, + }, + end: Position { + line: 2, + character: 0, + }, + }, + new_text: "\n ".to_string(), + }, + ]; + + let mut source = Rope::from_str("[\n\"🇺🇸\",\n\"🎄\",\n]"); + + let transaction = generate_transaction_from_edits(&source, edits, OffsetEncoding::Utf8); + assert!(transaction.apply(&mut source)); + } } diff --git a/helix-lsp/src/snippet.rs b/helix-lsp/src/snippet.rs new file mode 100644 index 000000000000..4713ad8bbe80 --- /dev/null +++ b/helix-lsp/src/snippet.rs @@ -0,0 +1,525 @@ +use std::borrow::Cow; + +use anyhow::{anyhow, Result}; +use helix_core::{smallvec, SmallVec, Tendril}; + +#[derive(Debug, PartialEq, Eq)] +pub enum CaseChange { + Upcase, + Downcase, + Capitalize, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum FormatItem<'a> { + Text(&'a str), + Capture(usize), + CaseChange(usize, CaseChange), + Conditional(usize, Option<&'a str>, Option<&'a str>), +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Regex<'a> { + value: &'a str, + replacement: Vec>, + options: Option<&'a str>, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum SnippetElement<'a> { + Tabstop { + tabstop: usize, + }, + Placeholder { + tabstop: usize, + value: Vec>, + }, + Choice { + tabstop: usize, + choices: Vec<&'a str>, + }, + Variable { + name: &'a str, + default: Option<&'a str>, + regex: Option>, + }, + Text(&'a str), +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Snippet<'a> { + elements: Vec>, +} + +pub fn parse(s: &str) -> Result> { + parser::parse(s).map_err(|rest| anyhow!("Failed to parse snippet. Remaining input: {}", rest)) +} + +fn render_elements( + snippet_elements: &[SnippetElement<'_>], + insert: &mut Tendril, + offset: &mut usize, + tabstops: &mut Vec<(usize, (usize, usize))>, + newline_with_offset: &str, + include_placeholer: bool, +) { + use SnippetElement::*; + + for element in snippet_elements { + match element { + &Text(text) => { + // small optimization to avoid calling replace when it's unnecessary + let text = if text.contains('\n') { + Cow::Owned(text.replace('\n', newline_with_offset)) + } else { + Cow::Borrowed(text) + }; + *offset += text.chars().count(); + insert.push_str(&text); + } + &Variable { + name: _, + regex: _, + r#default, + } => { + // TODO: variables. For now, fall back to the default, which defaults to "". + let text = r#default.unwrap_or_default(); + *offset += text.chars().count(); + insert.push_str(text); + } + &Tabstop { tabstop } => { + tabstops.push((tabstop, (*offset, *offset))); + } + Placeholder { + tabstop, + value: inner_snippet_elements, + } => { + let start_offset = *offset; + if include_placeholer { + render_elements( + inner_snippet_elements, + insert, + offset, + tabstops, + newline_with_offset, + include_placeholer, + ); + } + tabstops.push((*tabstop, (start_offset, *offset))); + } + &Choice { + tabstop, + choices: _, + } => { + // TODO: choices + tabstops.push((tabstop, (*offset, *offset))); + } + } + } +} + +#[allow(clippy::type_complexity)] // only used one time +pub fn render( + snippet: &Snippet<'_>, + newline_with_offset: &str, + include_placeholer: bool, +) -> (Tendril, Vec>) { + let mut insert = Tendril::new(); + let mut tabstops = Vec::new(); + let mut offset = 0; + + render_elements( + &snippet.elements, + &mut insert, + &mut offset, + &mut tabstops, + newline_with_offset, + include_placeholer, + ); + + // sort in ascending order (except for 0, which should always be the last one (per lsp doc)) + tabstops.sort_unstable_by_key(|(n, _)| if *n == 0 { usize::MAX } else { *n }); + + // merge tabstops with the same index (we take advantage of the fact that we just sorted them + // above to simply look backwards) + let mut ntabstops = Vec::>::new(); + { + let mut prev = None; + for (tabstop, r) in tabstops { + if prev == Some(tabstop) { + let len_1 = ntabstops.len() - 1; + ntabstops[len_1].push(r); + } else { + prev = Some(tabstop); + ntabstops.push(smallvec![r]); + } + } + } + + (insert, ntabstops) +} + +mod parser { + use helix_parsec::*; + + use super::{CaseChange, FormatItem, Regex, Snippet, SnippetElement}; + + /* + https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax + + any ::= tabstop | placeholder | choice | variable | text + tabstop ::= '$' int | '${' int '}' + placeholder ::= '${' int ':' any '}' + choice ::= '${' int '|' text (',' text)* '|}' + variable ::= '$' var | '${' var }' + | '${' var ':' any '}' + | '${' var '/' regex '/' (format | text)+ '/' options '}' + format ::= '$' int | '${' int '}' + | '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' + | '${' int ':+' if '}' + | '${' int ':?' if ':' else '}' + | '${' int ':-' else '}' | '${' int ':' else '}' + regex ::= Regular Expression value (ctor-string) + options ::= Regular Expression option (ctor-options) + var ::= [_a-zA-Z] [_a-zA-Z0-9]* + int ::= [0-9]+ + text ::= .* + if ::= text + else ::= text + */ + + fn var<'a>() -> impl Parser<'a, Output = &'a str> { + // var = [_a-zA-Z][_a-zA-Z0-9]* + move |input: &'a str| match input + .char_indices() + .take_while(|(p, c)| { + *c == '_' + || if *p == 0 { + c.is_ascii_alphabetic() + } else { + c.is_ascii_alphanumeric() + } + }) + .last() + { + Some((index, c)) if index >= 1 => { + let index = index + c.len_utf8(); + Ok((&input[index..], &input[0..index])) + } + _ => Err(input), + } + } + + fn text<'a, const SIZE: usize>(cs: [char; SIZE]) -> impl Parser<'a, Output = &'a str> { + take_while(move |c| cs.into_iter().all(|c1| c != c1)) + } + + fn digit<'a>() -> impl Parser<'a, Output = usize> { + filter_map(take_while(|c| c.is_ascii_digit()), |s| s.parse().ok()) + } + + fn case_change<'a>() -> impl Parser<'a, Output = CaseChange> { + use CaseChange::*; + + choice!( + map("upcase", |_| Upcase), + map("downcase", |_| Downcase), + map("capitalize", |_| Capitalize), + ) + } + + fn format<'a>() -> impl Parser<'a, Output = FormatItem<'a>> { + use FormatItem::*; + + choice!( + // '$' int + map(right("$", digit()), Capture), + // '${' int '}' + map(seq!("${", digit(), "}"), |seq| Capture(seq.1)), + // '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' + map(seq!("${", digit(), ":/", case_change(), "}"), |seq| { + CaseChange(seq.1, seq.3) + }), + // '${' int ':+' if '}' + map( + seq!("${", digit(), ":+", take_until(|c| c == '}'), "}"), + |seq| { Conditional(seq.1, Some(seq.3), None) } + ), + // '${' int ':?' if ':' else '}' + map( + seq!( + "${", + digit(), + ":?", + take_until(|c| c == ':'), + ":", + take_until(|c| c == '}'), + "}" + ), + |seq| { Conditional(seq.1, Some(seq.3), Some(seq.5)) } + ), + // '${' int ':-' else '}' | '${' int ':' else '}' + map( + seq!( + "${", + digit(), + ":", + optional("-"), + take_until(|c| c == '}'), + "}" + ), + |seq| { Conditional(seq.1, None, Some(seq.4)) } + ), + ) + } + + fn regex<'a>() -> impl Parser<'a, Output = Regex<'a>> { + let text = map(text(['$', '/']), FormatItem::Text); + let replacement = reparse_as( + take_until(|c| c == '/'), + one_or_more(choice!(format(), text)), + ); + + map( + seq!( + "/", + take_until(|c| c == '/'), + "/", + replacement, + "/", + optional(take_until(|c| c == '}')), + ), + |(_, value, _, replacement, _, options)| Regex { + value, + replacement, + options, + }, + ) + } + + fn tabstop<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + map( + or( + right("$", digit()), + map(seq!("${", digit(), "}"), |values| values.1), + ), + |digit| SnippetElement::Tabstop { tabstop: digit }, + ) + } + + fn placeholder<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + let text = map(text(['$', '}']), SnippetElement::Text); + map( + seq!( + "${", + digit(), + ":", + one_or_more(choice!(anything(), text)), + "}" + ), + |seq| SnippetElement::Placeholder { + tabstop: seq.1, + value: seq.3, + }, + ) + } + + fn choice<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + map( + seq!( + "${", + digit(), + "|", + sep(take_until(|c| c == ',' || c == '|'), ","), + "|}", + ), + |seq| SnippetElement::Choice { + tabstop: seq.1, + choices: seq.3, + }, + ) + } + + fn variable<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + choice!( + // $var + map(right("$", var()), |name| SnippetElement::Variable { + name, + default: None, + regex: None, + }), + // ${var:default} + map( + seq!("${", var(), ":", take_until(|c| c == '}'), "}",), + |values| SnippetElement::Variable { + name: values.1, + default: Some(values.3), + regex: None, + } + ), + // ${var/value/format/options} + map(seq!("${", var(), regex(), "}"), |values| { + SnippetElement::Variable { + name: values.1, + default: None, + regex: Some(values.2), + } + }), + ) + } + + fn anything<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + // The parser has to be constructed lazily to avoid infinite opaque type recursion + |input: &'a str| { + let parser = choice!(tabstop(), placeholder(), choice(), variable()); + parser.parse(input) + } + } + + fn snippet<'a>() -> impl Parser<'a, Output = Snippet<'a>> { + let text = map(text(['$']), SnippetElement::Text); + map(one_or_more(choice!(anything(), text)), |parts| Snippet { + elements: parts, + }) + } + + pub fn parse(s: &str) -> Result { + snippet().parse(s).map(|(_input, elements)| elements) + } + + #[cfg(test)] + mod test { + use super::SnippetElement::*; + use super::*; + + #[test] + fn empty_string_is_error() { + assert_eq!(Err(""), parse("")); + } + + #[test] + fn parse_placeholders_in_function_call() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("match("), + Placeholder { + tabstop: 1, + value: vec!(Text("Arg1")), + }, + Text(")") + ] + }), + parse("match(${1:Arg1})") + ) + } + + #[test] + fn parse_placeholders_in_statement() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("local "), + Placeholder { + tabstop: 1, + value: vec!(Text("var")), + }, + Text(" = "), + Placeholder { + tabstop: 1, + value: vec!(Text("value")), + }, + ] + }), + parse("local ${1:var} = ${1:value}") + ) + } + + #[test] + fn parse_tabstop_nested_in_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![Placeholder { + tabstop: 1, + value: vec!(Text("var, "), Tabstop { tabstop: 2 },), + },] + }), + parse("${1:var, $2}") + ) + } + + #[test] + fn parse_placeholder_nested_in_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![Placeholder { + tabstop: 1, + value: vec!( + Text("foo "), + Placeholder { + tabstop: 2, + value: vec!(Text("bar")), + }, + ), + },] + }), + parse("${1:foo ${2:bar}}") + ) + } + + #[test] + fn parse_all() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("hello "), + Tabstop { tabstop: 1 }, + Tabstop { tabstop: 2 }, + Text(" "), + Choice { + tabstop: 1, + choices: vec!["one", "two", "three"] + }, + Text(" "), + Variable { + name: "name", + default: Some("foo"), + regex: None + }, + Text(" "), + Variable { + name: "var", + default: None, + regex: None + }, + Text(" "), + Variable { + name: "TM", + default: None, + regex: None + }, + ] + }), + parse("hello $1${2} ${1|one,two,three|} ${name:foo} $var $TM") + ); + } + + #[test] + fn regex_capture_replace() { + assert_eq!( + Ok(Snippet { + elements: vec![Variable { + name: "TM_FILENAME", + default: None, + regex: Some(Regex { + value: "(.*).+$", + replacement: vec![FormatItem::Capture(1)], + options: None, + }), + }] + }), + parse("${TM_FILENAME/(.*).+$/$1/}") + ); + } + } +} diff --git a/helix-parsec/Cargo.toml b/helix-parsec/Cargo.toml new file mode 100644 index 000000000000..505a4247e3ef --- /dev/null +++ b/helix-parsec/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "helix-parsec" +version = "0.6.0" +authors = ["Blaž Hrastnik "] +edition = "2021" +license = "MPL-2.0" +description = "Parser combinators for Helix" +categories = ["editor"] +repository = "https://github.com/helix-editor/helix" +homepage = "https://helix-editor.com" +include = ["src/**/*", "README.md"] + +[dependencies] diff --git a/helix-parsec/src/lib.rs b/helix-parsec/src/lib.rs new file mode 100644 index 000000000000..e09814b81f5b --- /dev/null +++ b/helix-parsec/src/lib.rs @@ -0,0 +1,561 @@ +//! Parser-combinator functions +//! +//! This module provides parsers and parser combinators which can be used +//! together to build parsers by functional composition. + +// This module implements parser combinators following https://bodil.lol/parser-combinators/. +// `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`, +// `zero_or_more`, as well as the `Parser` trait originate mostly from that post. +// The remaining parsers and parser combinators are either based on +// https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138 +// or are novel. + +// When a parser matches the input successfully, it returns `Ok((next_input, some_value))` +// where the type of the returned value depends on the parser. If the parser fails to match, +// it returns `Err(input)`. +type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>; + +/// A parser or parser-combinator. +/// +/// Parser-combinators compose multiple parsers together to parse input. +/// For example, two basic parsers (`&'static str`s) may be combined with +/// a parser-combinator like [or] to produce a new parser. +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let foo = "foo"; // matches "foo" literally +/// let bar = "bar"; // matches "bar" literally +/// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar" +/// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo")); +/// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar")); +/// assert_eq!(Err("baz"), foo_or_bar.parse("baz")); +/// ``` +pub trait Parser<'a> { + type Output; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>; +} + +// Most parser-combinators are written as higher-order functions which take some +// parser(s) as input and return a new parser: a function that takes input and returns +// a parse result. The underlying implementation of [Parser::parse] for these functions +// is simply application. +#[doc(hidden)] +impl<'a, F, T> Parser<'a> for F +where + F: Fn(&'a str) -> ParseResult, +{ + type Output = T; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + self(input) + } +} + +/// A parser which matches the string literal exactly. +/// +/// This parser succeeds if the next characters in the input are equal to the given +/// string literal. +/// +/// Note that [str::parse] interferes with calling [Parser::parse] on string literals +/// directly; this trait implementation works when used within any parser combinator +/// but does not work on its own. To call [Parser::parse] on a parser for a string +/// literal, use the [token] parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let parser = or("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("baz"), parser.parse("baz")); +/// ``` +impl<'a> Parser<'a> for &'static str { + type Output = &'a str; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + match input.get(0..self.len()) { + Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])), + _ => Err(input), + } + } +} + +// Parsers + +/// A parser which matches the given string literally. +/// +/// This function is a convenience for interpreting string literals as parsers +/// and is only necessary to avoid conflict with [str::parse]. See the documentation +/// for the `&'static str` implementation of [Parser]. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{token, Parser}; +/// let parser = token("foo"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Err("bar"), parser.parse("bar")); +/// ``` +pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> { + literal +} + +/// A parser which matches all values until the specified pattern is found. +/// +/// If the pattern is not found, this parser does not match. The input up to the +/// character which returns `true` is returned but not that character itself. +/// +/// If the pattern function returns true on the first input character, this +/// parser fails. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{take_until, Parser}; +/// let parser = take_until(|c| c == '.'); +/// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar")); +/// assert_eq!(Err(".foo"), parser.parse(".foo")); +/// assert_eq!(Err("foo"), parser.parse("foo")); +/// ``` +pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> +where + F: Fn(char) -> bool, +{ + move |input: &'a str| match input.find(&pattern) { + Some(index) if index != 0 => Ok((&input[index..], &input[0..index])), + _ => Err(input), + } +} + +/// A parser which matches all values until the specified pattern no longer match. +/// +/// This parser only ever fails if the input has a length of zero. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{take_while, Parser}; +/// let parser = take_while(|c| c == '1'); +/// assert_eq!(Ok(("2", "11")), parser.parse("112")); +/// assert_eq!(Err("22"), parser.parse("22")); +/// ``` +pub fn take_while<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> +where + F: Fn(char) -> bool, +{ + move |input: &'a str| match input + .char_indices() + .take_while(|(_p, c)| pattern(*c)) + .last() + { + Some((index, c)) => { + let index = index + c.len_utf8(); + Ok((&input[index..], &input[0..index])) + } + _ => Err(input), + } +} + +// Variadic parser combinators + +/// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion. +/// +/// The returned value is a tuple containing the outputs of all parsers in order. Each +/// parser in the sequence may be typed differently. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{seq, Parser}; +/// let parser = seq!("<", "a", ">"); +/// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("")); +/// assert_eq!(Err(""), parser.parse("")); +/// ``` +#[macro_export] +macro_rules! seq { + ($($parsers: expr),+ $(,)?) => { + ($($parsers),+) + } +} + +// Seq is implemented using trait-implementations of Parser for various size tuples. +// This allows sequences to be typed heterogeneously. +macro_rules! seq_impl { + ($($parser:ident),+) => { + #[allow(non_snake_case)] + impl<'a, $($parser),+> Parser<'a> for ($($parser),+) + where + $($parser: Parser<'a>),+ + { + type Output = ($($parser::Output),+); + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + let ($($parser),+) = self; + seq_body_impl!(input, input, $($parser),+ ; ) + } + } + } +} + +macro_rules! seq_body_impl { + ($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => { + match $head.parse($next_input) { + Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head), + Err(_) => Err($input), + } + }; + ($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => { + match $last.parse($next_input) { + Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))), + Err(_) => Err($input), + } + } +} + +seq_impl!(A, B); +seq_impl!(A, B, C); +seq_impl!(A, B, C, D); +seq_impl!(A, B, C, D, E); +seq_impl!(A, B, C, D, E, F); +seq_impl!(A, B, C, D, E, F, G); +seq_impl!(A, B, C, D, E, F, G, H); +seq_impl!(A, B, C, D, E, F, G, H, I); +seq_impl!(A, B, C, D, E, F, G, H, I, J); + +/// A parser combinator which chooses the first of the input parsers which matches +/// successfully. +/// +/// All input parsers must have the same output type. This is a variadic form for [or]. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{choice, or, Parser}; +/// let parser = choice!("foo", "bar", "baz"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("quiz"), parser.parse("quiz")); +/// ``` +#[macro_export] +macro_rules! choice { + ($parser: expr $(,)?) => { + $parser + }; + ($parser: expr, $($rest: expr),+ $(,)?) => { + or($parser, choice!($($rest),+)) + } +} + +// Ordinary parser combinators + +/// A parser combinator which takes a parser as input and maps the output using the +/// given transformation function. +/// +/// This corresponds to [Result::map]. The value is only mapped if the input parser +/// matches against input. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{map, Parser}; +/// let parser = map("123", |s| s.parse::().unwrap()); +/// assert_eq!(Ok(("", 123)), parser.parse("123")); +/// assert_eq!(Err("abc"), parser.parse("abc")); +/// ``` +pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a>, + F: Fn(P::Output) -> T, +{ + move |input| { + parser + .parse(input) + .map(|(next_input, result)| (next_input, map_fn(result))) + } +} + +/// A parser combinator which succeeds if the given parser matches the input and +/// the given `filter_map_fn` returns `Some`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{filter_map, take_until, Parser}; +/// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::().ok()); +/// assert_eq!(Ok((".456", 123)), parser.parse("123.456")); +/// assert_eq!(Err("abc.def"), parser.parse("abc.def")); +/// ``` +pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a>, + F: Fn(P::Output) -> Option, +{ + move |input| match parser.parse(input) { + Ok((next_input, value)) => match filter_map_fn(value) { + Some(value) => Ok((next_input, value)), + None => Err(input), + }, + Err(_) => Err(input), + } +} + +/// A parser combinator which succeeds if the first given parser matches the input and +/// the second given parse also matches. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{reparse_as, take_until, one_or_more, Parser}; +/// let parser = reparse_as(take_until(|c| c == '/'), one_or_more("a")); +/// assert_eq!(Ok(("/bb", vec!["a", "a"])), parser.parse("aa/bb")); +/// ``` +pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> +where + P1: Parser<'a, Output = &'a str>, + P2: Parser<'a, Output = T>, +{ + filter_map(parser1, move |str| { + parser2.parse(str).map(|(_, value)| value).ok() + }) +} + +/// A parser combinator which only matches the input when the predicate function +/// returns true. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{filter, take_until, Parser}; +/// let parser = filter(take_until(|c| c == '.'), |s| s == &"123"); +/// assert_eq!(Ok((".456", "123")), parser.parse("123.456")); +/// assert_eq!(Err("456.123"), parser.parse("456.123")); +/// ``` +pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a, Output = T>, + F: Fn(&P::Output) -> bool, +{ + move |input| { + if let Ok((next_input, value)) = parser.parse(input) { + if pred_fn(&value) { + return Ok((next_input, value)); + } + } + Err(input) + } +} + +/// A parser combinator which matches either of the input parsers. +/// +/// Both parsers must have the same output type. For a variadic form which +/// can take any number of parsers, use `choice!`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let parser = or("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("baz"), parser.parse("baz")); +/// ``` +pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> +where + P1: Parser<'a, Output = T>, + P2: Parser<'a, Output = T>, +{ + move |input| match parser1.parse(input) { + ok @ Ok(_) => ok, + Err(_) => parser2.parse(input), + } +} + +/// A parser combinator which attempts to match the given parser, returning a +/// `None` output value if the parser does not match. +/// +/// The parser produced with this combinator always succeeds. If the given parser +/// succeeds, `Some(value)` is returned where `value` is the output of the given +/// parser. Otherwise, `None`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{optional, Parser}; +/// let parser = optional("foo"); +/// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar")); +/// assert_eq!(Ok(("bar", None)), parser.parse("bar")); +/// ``` +pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option> +where + P: Parser<'a, Output = T>, +{ + move |input| match parser.parse(input) { + Ok((next_input, value)) => Ok((next_input, Some(value))), + Err(_) => Ok((input, None)), + } +} + +/// A parser combinator which runs the given parsers in sequence and returns the +/// value of `left` if both are matched. +/// +/// This is useful for two-element sequences in which you only want the output +/// value of the `left` parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{left, Parser}; +/// let parser = left("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foobar")); +/// ``` +pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> +where + L: Parser<'a, Output = T>, + R: Parser<'a>, +{ + map(seq!(left, right), |(left_value, _)| left_value) +} + +/// A parser combinator which runs the given parsers in sequence and returns the +/// value of `right` if both are matched. +/// +/// This is useful for two-element sequences in which you only want the output +/// value of the `right` parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{right, Parser}; +/// let parser = right("foo", "bar"); +/// assert_eq!(Ok(("", "bar")), parser.parse("foobar")); +/// ``` +pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> +where + L: Parser<'a>, + R: Parser<'a, Output = T>, +{ + map(seq!(left, right), |(_, right_value)| right_value) +} + +/// A parser combinator which matches the given parser against the input zero or +/// more times. +/// +/// This parser always succeeds and returns the empty Vec when it matched zero +/// times. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{zero_or_more, Parser}; +/// let parser = zero_or_more("a"); +/// assert_eq!(Ok(("", vec![])), parser.parse("")); +/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); +/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); +/// assert_eq!(Ok(("bb", vec![])), parser.parse("bb")); +/// ``` +pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, +{ + move |mut input| { + let mut values = Vec::new(); + + while let Ok((next_input, value)) = parser.parse(input) { + input = next_input; + values.push(value); + } + + Ok((input, values)) + } +} + +/// A parser combinator which matches the given parser against the input one or +/// more times. +/// +/// This parser combinator acts the same as [zero_or_more] but must match at +/// least once. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{one_or_more, Parser}; +/// let parser = one_or_more("a"); +/// assert_eq!(Err(""), parser.parse("")); +/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); +/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); +/// assert_eq!(Err("bb"), parser.parse("bb")); +/// ``` +pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, +{ + move |mut input| { + let mut values = Vec::new(); + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(err) => return Err(err), + } + + while let Ok((next_input, value)) = parser.parse(input) { + input = next_input; + values.push(value); + } + + Ok((input, values)) + } +} + +/// A parser combinator which matches one or more instances of the given parser +/// interspersed with the separator parser. +/// +/// Output values of the separator parser are discarded. +/// +/// This is typically used to parse function arguments or list items. +/// +/// # Examples +/// +/// ```rust +/// use helix_parsec::{sep, Parser}; +/// let parser = sep("a", ","); +/// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a")); +/// ``` +pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, + S: Parser<'a>, +{ + move |mut input| { + let mut values = Vec::new(); + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(err) => return Err(err), + } + + loop { + match separator.parse(input) { + Ok((next_input, _)) => input = next_input, + Err(_) => break, + } + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(_) => break, + } + } + + Ok((input, values)) + } +} diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 603f37d39ab8..bca567c28688 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -10,11 +10,7 @@ repository = "https://github.com/helix-editor/helix" homepage = "https://helix-editor.com" include = ["src/**/*", "README.md"] default-run = "hx" -rust-version = "1.57" - -[package.metadata.nix] -build = true -app = true +rust-version = "1.65" [features] default = ["git"] @@ -41,7 +37,7 @@ which = "4.4" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } -crossterm = { version = "0.25", features = ["event-stream"] } +crossterm = { version = "0.26", features = ["event-stream"] } signal-hook = "0.3" tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } @@ -67,8 +63,8 @@ serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } # ripgrep for global search -grep-regex = "0.1.10" -grep-searcher = "0.1.10" +grep-regex = "0.1.11" +grep-searcher = "0.1.11" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } @@ -79,4 +75,4 @@ helix-loader = { version = "0.6", path = "../helix-loader" } [dev-dependencies] smallvec = "1.10" indoc = "2.0.0" -tempfile = "3.3.0" +tempfile = "3.4.0" diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 05ceb874e84d..c7e939959ca4 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -30,21 +30,15 @@ use crate::{ use log::{debug, error, warn}; use std::{ - io::{stdin, stdout, Write}, + io::{stdin, stdout}, + path::Path, sync::Arc, time::{Duration, Instant}, }; use anyhow::{Context, Error}; -use crossterm::{ - event::{ - DisableBracketedPaste, DisableFocusChange, DisableMouseCapture, EnableBracketedPaste, - EnableFocusChange, EnableMouseCapture, Event as CrosstermEvent, - }, - execute, terminal, - tty::IsTty, -}; +use crossterm::{event::Event as CrosstermEvent, tty::IsTty}; #[cfg(not(windows))] use { signal_hook::{consts::signal, low_level}, @@ -62,10 +56,12 @@ use tui::backend::CrosstermBackend; use tui::backend::TestBackend; #[cfg(not(feature = "integration"))] -type Terminal = tui::terminal::Terminal>; +type TerminalBackend = CrosstermBackend; #[cfg(feature = "integration")] -type Terminal = tui::terminal::Terminal; +type TerminalBackend = TestBackend; + +type Terminal = tui::terminal::Terminal; pub struct Application { compositor: Compositor, @@ -107,23 +103,6 @@ fn setup_integration_logging() { .apply(); } -fn restore_term() -> Result<(), Error> { - let mut stdout = stdout(); - // reset cursor shape - write!(stdout, "\x1B[0 q")?; - // Ignore errors on disabling, this might trigger on windows if we call - // disable without calling enable previously - let _ = execute!(stdout, DisableMouseCapture); - execute!( - stdout, - DisableBracketedPaste, - DisableFocusChange, - terminal::LeaveAlternateScreen - )?; - terminal::disable_raw_mode()?; - Ok(()) -} - impl Application { pub fn new( args: Args, @@ -135,10 +114,9 @@ impl Application { use helix_view::editor::Action; - let theme_loader = std::sync::Arc::new(theme::Loader::new( - &helix_loader::config_dir(), - &helix_loader::runtime_dir(), - )); + let mut theme_parent_dirs = vec![helix_loader::config_dir()]; + theme_parent_dirs.extend(helix_loader::runtime_dirs().iter().cloned()); + let theme_loader = std::sync::Arc::new(theme::Loader::new(&theme_parent_dirs)); let true_color = config.editor.true_color || crate::true_color(); let theme = config @@ -184,7 +162,7 @@ impl Application { compositor.push(editor_view); if args.load_tutor { - let path = helix_loader::runtime_dir().join("tutor"); + let path = helix_loader::runtime_file(Path::new("tutor")); editor.open(&path, Action::VerticalSplit)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(editor).set_path(None)?; @@ -277,10 +255,6 @@ impl Application { Ok(app) } - #[cfg(feature = "integration")] - async fn render(&mut self) {} - - #[cfg(not(feature = "integration"))] async fn render(&mut self) { let mut cx = crate::compositor::Context { editor: &mut self.editor, @@ -345,12 +319,12 @@ impl Application { tokio::select! { biased; - Some(event) = input_stream.next() => { - self.handle_terminal_events(event).await; - } Some(signal) = self.signals.next() => { self.handle_signals(signal).await; } + Some(event) = input_stream.next() => { + self.handle_terminal_events(event).await; + } Some(callback) = self.jobs.futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render().await; @@ -472,13 +446,7 @@ impl Application { pub async fn handle_signals(&mut self, signal: i32) { match signal { signal::SIGTSTP => { - // restore cursor - use helix_view::graphics::CursorKind; - self.terminal - .backend_mut() - .show_cursor(CursorKind::Block) - .ok(); - restore_term().unwrap(); + self.restore_term().unwrap(); low_level::emulate_default_handler(signal::SIGTSTP).unwrap(); } signal::SIGCONT => { @@ -638,6 +606,11 @@ impl Application { self.compositor .handle_event(&Event::Resize(width, height), &mut cx) } + // Ignore keyboard release events. + CrosstermEvent::Key(crossterm::event::KeyEvent { + kind: crossterm::event::KeyEventKind::Release, + .. + }) => false, event => self.compositor.handle_event(&event.into(), &mut cx), }; @@ -707,7 +680,13 @@ impl Application { } } Notification::PublishDiagnostics(mut params) => { - let path = params.uri.to_file_path().unwrap(); + let path = match params.uri.to_file_path() { + Ok(path) => path, + Err(_) => { + log::error!("Unsupported file URI: {}", params.uri); + return; + } + }; let doc = self.editor.document_by_path_mut(&path); if let Some(doc) = doc { @@ -949,24 +928,32 @@ impl Application { Call::MethodCall(helix_lsp::jsonrpc::MethodCall { method, params, id, .. }) => { - let call = match MethodCall::parse(&method, params) { - Ok(call) => call, + let reply = match MethodCall::parse(&method, params) { Err(helix_lsp::Error::Unhandled) => { - error!("Language Server: Method not found {}", method); - return; + error!( + "Language Server: Method {} not found in request {}", + method, id + ); + Err(helix_lsp::jsonrpc::Error { + code: helix_lsp::jsonrpc::ErrorCode::MethodNotFound, + message: format!("Method not found: {}", method), + data: None, + }) } Err(err) => { log::error!( - "received malformed method call from Language Server: {}: {}", + "Language Server: Received malformed method call {} in request {}: {}", method, + id, err ); - return; + Err(helix_lsp::jsonrpc::Error { + code: helix_lsp::jsonrpc::ErrorCode::ParseError, + message: format!("Malformed method call: {}", method), + data: None, + }) } - }; - - let reply = match call { - MethodCall::WorkDoneProgressCreate(params) => { + Ok(MethodCall::WorkDoneProgressCreate(params)) => { self.lsp_progress.create(server_id, params.token); let editor_view = self @@ -980,7 +967,7 @@ impl Application { Ok(serde_json::Value::Null) } - MethodCall::ApplyWorkspaceEdit(params) => { + Ok(MethodCall::ApplyWorkspaceEdit(params)) => { apply_workspace_edit( &mut self.editor, helix_lsp::OffsetEncoding::Utf8, @@ -993,13 +980,13 @@ impl Application { failed_change: None, })) } - MethodCall::WorkspaceFolders => { + Ok(MethodCall::WorkspaceFolders) => { let language_server = self.editor.language_servers.get_by_id(server_id).unwrap(); Ok(json!(language_server.workspace_folders())) } - MethodCall::WorkspaceConfiguration(params) => { + Ok(MethodCall::WorkspaceConfiguration(params)) => { let result: Vec<_> = params .items .iter() @@ -1043,24 +1030,19 @@ impl Application { } } - async fn claim_term(&mut self) -> Result<(), Error> { + async fn claim_term(&mut self) -> std::io::Result<()> { + let terminal_config = self.config.load().editor.clone().into(); + self.terminal.claim(terminal_config) + } + + fn restore_term(&mut self) -> std::io::Result<()> { + let terminal_config = self.config.load().editor.clone().into(); use helix_view::graphics::CursorKind; - terminal::enable_raw_mode()?; - if self.terminal.cursor_kind() == CursorKind::Hidden { - self.terminal.backend_mut().hide_cursor().ok(); - } - let mut stdout = stdout(); - execute!( - stdout, - terminal::EnterAlternateScreen, - EnableBracketedPaste, - EnableFocusChange - )?; - execute!(stdout, terminal::Clear(terminal::ClearType::All))?; - if self.config.load().editor.mouse { - execute!(stdout, EnableMouseCapture)?; - } - Ok(()) + self.terminal + .backend_mut() + .show_cursor(CursorKind::Block) + .ok(); + self.terminal.restore(terminal_config) } pub async fn run(&mut self, input_stream: &mut S) -> Result @@ -1075,7 +1057,7 @@ impl Application { // We can't handle errors properly inside this closure. And it's // probably not a good idea to `unwrap()` inside a panic handler. // So we just ignore the `Result`. - let _ = restore_term(); + let _ = TerminalBackend::force_restore(); hook(info); })); @@ -1083,13 +1065,7 @@ impl Application { let close_errs = self.close().await; - // restore cursor - use helix_view::graphics::CursorKind; - self.terminal - .backend_mut() - .show_cursor(CursorKind::Block) - .ok(); - restore_term()?; + self.restore_term()?; for err in close_errs { self.editor.exit_code = 1; diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index ae98d40f2f0f..297a294c9c1d 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -5,6 +5,7 @@ pub(crate) mod typed; pub use dap::*; use helix_vcs::Hunk; pub use lsp::*; +use tokio::sync::oneshot; use tui::widgets::Row; pub use typed::*; @@ -49,9 +50,13 @@ use movement::Movement; use crate::{ args, compositor::{self, Component, Compositor}, + filter_picker_entry, job::Callback, keymap::ReverseKeymap, - ui::{self, overlay::overlayed, FilePicker, Picker, Popup, Prompt, PromptEvent}, + ui::{ + self, editor::InsertEvent, overlay::overlayed, FilePicker, Picker, Popup, Prompt, + PromptEvent, + }, }; use crate::job::{self, Jobs}; @@ -72,13 +77,15 @@ use grep_searcher::{sinks, BinaryDetection, SearcherBuilder}; use ignore::{DirEntry, WalkBuilder, WalkState}; use tokio_stream::wrappers::UnboundedReceiverStream; +pub type OnKeyCallback = Box; + pub struct Context<'a> { pub register: Option, pub count: Option, pub editor: &'a mut Editor, pub callback: Option, - pub on_next_key_callback: Option>, + pub on_next_key_callback: Option, pub jobs: &'a mut Jobs, } @@ -107,17 +114,7 @@ impl<'a> Context<'a> { T: for<'de> serde::Deserialize<'de> + Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { - let callback = Box::pin(async move { - let json = call.await?; - let response = serde_json::from_value(json)?; - let call: job::Callback = Callback::EditorCompositor(Box::new( - move |editor: &mut Editor, compositor: &mut Compositor| { - callback(editor, compositor, response) - }, - )); - Ok(call) - }); - self.jobs.callback(callback); + self.jobs.callback(make_job_callback(call, callback)); } /// Returns 1 if no explicit count was provided @@ -127,6 +124,27 @@ impl<'a> Context<'a> { } } +#[inline] +fn make_job_callback( + call: impl Future> + 'static + Send, + callback: F, +) -> std::pin::Pin>>> +where + T: for<'de> serde::Deserialize<'de> + Send + 'static, + F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, +{ + Box::pin(async move { + let json = call.await?; + let response = serde_json::from_value(json)?; + let call: job::Callback = Callback::EditorCompositor(Box::new( + move |editor: &mut Editor, compositor: &mut Compositor| { + callback(editor, compositor, response) + }, + )); + Ok(call) + }) +} + use helix_view::{align_view, Align}; /// A MappableCommand is either a static command like "jump_view_up" or a Typable command like @@ -275,6 +293,7 @@ impl MappableCommand { append_mode, "Append after selection", command_mode, "Enter command mode", file_picker, "Open file picker", + file_picker_in_current_buffer_directory, "Open file picker at current buffers's directory", file_picker_in_current_directory, "Open file picker at current working directory", code_action, "Perform code action", buffer_picker, "Open buffer picker", @@ -426,6 +445,7 @@ impl MappableCommand { goto_next_paragraph, "Goto next paragraph", goto_prev_paragraph, "Goto previous paragraph", dap_launch, "Launch debug target", + dap_restart, "Restart debugging session", dap_toggle_breakpoint, "Toggle breakpoint", dap_continue, "Continue program execution", dap_pause, "Pause program execution", @@ -955,9 +975,10 @@ fn goto_window(cx: &mut Context, align: Align) { Align::Bottom => { view.offset.vertical_offset + last_visual_line.saturating_sub(scrolloff + count) } - } - .max(view.offset.vertical_offset + scrolloff) - .min(view.offset.vertical_offset + last_visual_line.saturating_sub(scrolloff)); + }; + let visual_line = visual_line + .max(view.offset.vertical_offset + scrolloff) + .min(view.offset.vertical_offset + last_visual_line.saturating_sub(scrolloff)); let pos = view .pos_at_visual_coords(doc, visual_line as u16, 0, false) @@ -1613,6 +1634,10 @@ fn copy_selection_on_line(cx: &mut Context, direction: Direction) { sels += 1; } + if anchor_row == 0 && head_row == 0 { + break; + } + i += 1; } } @@ -2013,6 +2038,11 @@ fn global_search(cx: &mut Context) { let search_root = std::env::current_dir() .expect("Global search error: Failed to get current dir"); + let dedup_symlinks = file_picker_config.deduplicate_links; + let absolute_root = search_root + .canonicalize() + .unwrap_or_else(|_| search_root.clone()); + WalkBuilder::new(search_root) .hidden(file_picker_config.hidden) .parents(file_picker_config.parents) @@ -2022,10 +2052,9 @@ fn global_search(cx: &mut Context) { .git_global(file_picker_config.git_global) .git_exclude(file_picker_config.git_exclude) .max_depth(file_picker_config.max_depth) - // We always want to ignore the .git directory, otherwise if - // `ignore` is turned off above, we end up with a lot of noise - // in our picker. - .filter_entry(|entry| entry.file_name() != ".git") + .filter_entry(move |entry| { + filter_picker_entry(entry, &absolute_root, dedup_symlinks) + }) .build_parallel() .run(|| { let mut searcher = searcher.clone(); @@ -2396,6 +2425,22 @@ fn file_picker(cx: &mut Context) { cx.push_layer(Box::new(overlayed(picker))); } +fn file_picker_in_current_buffer_directory(cx: &mut Context) { + let doc_dir = doc!(cx.editor) + .path() + .and_then(|path| path.parent().map(|path| path.to_path_buf())); + + let path = match doc_dir { + Some(path) => path, + None => { + cx.editor.set_error("current buffer has no path or parent"); + return; + } + }; + + let picker = ui::file_picker(path, &cx.editor.config()); + cx.push_layer(Box::new(overlayed(picker))); +} fn file_picker_in_current_directory(cx: &mut Context) { let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("./")); let picker = ui::file_picker(cwd, &cx.editor.config()); @@ -2426,20 +2471,15 @@ fn buffer_picker(cx: &mut Context) { None => SCRATCH_BUFFER_NAME, }; - let mut flags = Vec::new(); + let mut flags = String::new(); if self.is_modified { - flags.push("+"); + flags.push('+'); } if self.is_current { - flags.push("*"); + flags.push('*'); } - let flag = if flags.is_empty() { - "".into() - } else { - format!(" ({})", flags.join("")) - }; - format!("{} {}{}", self.id, path, flag).into() + Row::new([self.id.to_string(), flags, path.to_string()]) } } @@ -2811,10 +2851,15 @@ fn push_jump(view: &mut View, doc: &Document) { } fn goto_line(cx: &mut Context) { - goto_line_impl(cx.editor, cx.count) + if cx.count.is_some() { + let (view, doc) = current!(cx.editor); + push_jump(view, doc); + + goto_line_without_jumplist(cx.editor, cx.count); + } } -fn goto_line_impl(editor: &mut Editor, count: Option) { +fn goto_line_without_jumplist(editor: &mut Editor, count: Option) { if let Some(count) = count { let (view, doc) = current!(editor); let text = doc.text().slice(..); @@ -2831,7 +2876,6 @@ fn goto_line_impl(editor: &mut Editor, count: Option) { .clone() .transform(|range| range.put_cursor(text, pos, editor.mode == Mode::Select)); - push_jump(view, doc); doc.set_selection(view.id, selection); } } @@ -2918,14 +2962,6 @@ fn exit_select_mode(cx: &mut Context) { } } -fn goto_pos(editor: &mut Editor, pos: usize) { - let (view, doc) = current!(editor); - - push_jump(view, doc); - doc.set_selection(view.id, Selection::point(pos)); - align_view(doc, view, Align::Center); -} - fn goto_first_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = match doc.diagnostics().first() { @@ -2933,7 +2969,6 @@ fn goto_first_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_last_diag(cx: &mut Context) { @@ -2943,7 +2978,6 @@ fn goto_last_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_next_diag(cx: &mut Context) { @@ -2965,7 +2999,6 @@ fn goto_next_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_prev_diag(cx: &mut Context) { @@ -2990,7 +3023,6 @@ fn goto_prev_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_first_change(cx: &mut Context) { @@ -3003,20 +3035,20 @@ fn goto_last_change(cx: &mut Context) { fn goto_first_change_impl(cx: &mut Context, reverse: bool) { let editor = &mut cx.editor; - let (_, doc) = current!(editor); + let (view, doc) = current!(editor); if let Some(handle) = doc.diff_handle() { let hunk = { - let hunks = handle.hunks(); + let diff = handle.load(); let idx = if reverse { - hunks.len().saturating_sub(1) + diff.len().saturating_sub(1) } else { 0 }; - hunks.nth_hunk(idx) + diff.nth_hunk(idx) }; if hunk != Hunk::NONE { - let pos = doc.text().line_to_char(hunk.after.start as usize); - goto_pos(editor, pos) + let range = hunk_range(hunk, doc.text().slice(..)); + doc.set_selection(view.id, Selection::single(range.anchor, range.head)); } } } @@ -3044,30 +3076,20 @@ fn goto_next_change_impl(cx: &mut Context, direction: Direction) { let selection = doc.selection(view.id).clone().transform(|range| { let cursor_line = range.cursor_line(doc_text) as u32; - let hunks = diff_handle.hunks(); + let diff = diff_handle.load(); let hunk_idx = match direction { - Direction::Forward => hunks + Direction::Forward => diff .next_hunk(cursor_line) - .map(|idx| (idx + count).min(hunks.len() - 1)), - Direction::Backward => hunks + .map(|idx| (idx + count).min(diff.len() - 1)), + Direction::Backward => diff .prev_hunk(cursor_line) .map(|idx| idx.saturating_sub(count)), }; - // TODO refactor with let..else once MSRV reaches 1.65 - let hunk_idx = if let Some(hunk_idx) = hunk_idx { - hunk_idx - } else { + let Some(hunk_idx) = hunk_idx else { return range; }; - let hunk = hunks.nth_hunk(hunk_idx); - - let hunk_start = doc_text.line_to_char(hunk.after.start as usize); - let hunk_end = if hunk.after.is_empty() { - hunk_start + 1 - } else { - doc_text.line_to_char(hunk.after.end as usize) - }; - let new_range = Range::new(hunk_start, hunk_end); + let hunk = diff.nth_hunk(hunk_idx); + let new_range = hunk_range(hunk, doc_text); if editor.mode == Mode::Select { let head = if new_range.head < range.anchor { new_range.anchor @@ -3087,6 +3109,20 @@ fn goto_next_change_impl(cx: &mut Context, direction: Direction) { cx.editor.last_motion = Some(Motion(Box::new(motion))); } +/// Returns the [Range] for a [Hunk] in the given text. +/// Additions and modifications cover the added and modified ranges. +/// Deletions are represented as the point at the start of the deletion hunk. +fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range { + let anchor = text.line_to_char(hunk.after.start as usize); + let head = if hunk.after.is_empty() { + anchor + 1 + } else { + text.line_to_char(hunk.after.end as usize) + }; + + Range::new(anchor, head) +} + pub mod insert { use super::*; pub type Hook = fn(&Rope, &Selection, char) -> Option; @@ -3349,8 +3385,8 @@ pub mod insert { let count = cx.count(); let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); - let indent_unit = doc.indent_style.as_str(); - let tab_size = doc.tab_width(); + let tab_width = doc.tab_width(); + let indent_width = doc.indent_width(); let auto_pairs = doc.auto_pairs(cx.editor); let transaction = @@ -3371,18 +3407,11 @@ pub mod insert { None, ) } else { - let unit_len = indent_unit.chars().count(); - // NOTE: indent_unit always contains 'only spaces' or 'only tab' according to `IndentStyle` definition. - let unit_size = if indent_unit.starts_with('\t') { - tab_size * unit_len - } else { - unit_len - }; let width: usize = fragment .chars() .map(|ch| { if ch == '\t' { - tab_size + tab_width } else { // it can be none if it still meet control characters other than '\t' // here just set the width to 1 (or some value better?). @@ -3390,9 +3419,9 @@ pub mod insert { } }) .sum(); - let mut drop = width % unit_size; // round down to nearest unit + let mut drop = width % indent_width; // round down to nearest unit if drop == 0 { - drop = unit_size + drop = indent_width }; // if it's already at a unit, consume a whole unit let mut chars = fragment.chars().rev(); let mut start = pos; @@ -3737,6 +3766,7 @@ fn paste_impl( } doc.apply(&transaction, view.id); + doc.append_changes_to_history(view); } pub(crate) fn paste_bracketed_value(cx: &mut Context, contents: String) { @@ -3933,7 +3963,7 @@ fn unindent(cx: &mut Context) { let lines = get_lines(doc, view.id); let mut changes = Vec::with_capacity(lines.len()); let tab_width = doc.tab_width(); - let indent_width = count * tab_width; + let indent_width = count * doc.indent_width(); for line_idx in lines { let line = doc.text().line(line_idx); @@ -4164,6 +4194,24 @@ pub fn completion(cx: &mut Context) { None => return, }; + // setup a chanel that allows the request to be canceled + let (tx, rx) = oneshot::channel(); + // set completion_request so that this request can be canceled + // by setting completion_request, the old channel stored there is dropped + // and the associated request is automatically dropped + cx.editor.completion_request_handle = Some(tx); + let future = async move { + tokio::select! { + biased; + _ = rx => { + Ok(serde_json::Value::Null) + } + res = future => { + res + } + } + }; + let trigger_offset = cursor; // TODO: trigger_offset should be the cursor offset but we also need a starting offset from where we want to apply @@ -4174,12 +4222,35 @@ pub fn completion(cx: &mut Context) { iter.reverse(); let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count(); let start_offset = cursor.saturating_sub(offset); + let savepoint = doc.savepoint(view); + + let trigger_doc = doc.id(); + let trigger_view = view.id; + + // FIXME: The commands Context can only have a single callback + // which means it gets overwritten when executing keybindings + // with multiple commands or macros. This would mean that completion + // might be incorrectly applied when repeating the insertmode action + // + // TODO: to solve this either make cx.callback a Vec of callbacks or + // alternatively move `last_insert` to `helix_view::Editor` + cx.callback = Some(Box::new( + move |compositor: &mut Compositor, _cx: &mut compositor::Context| { + let ui = compositor.find::().unwrap(); + ui.last_insert.1.push(InsertEvent::RequestCompletion); + }, + )); cx.callback( future, move |editor, compositor, response: Option| { - if editor.mode != Mode::Insert { - // we're not in insert mode anymore + let (view, doc) = current_ref!(editor); + // check if the completion request is stale. + // + // Completions are completed asynchrounsly and therefore the user could + //switch document/view or leave insert mode. In all of thoise cases the + // completion should be discarded + if editor.mode != Mode::Insert || view.id != trigger_view || doc.id() != trigger_doc { return; } @@ -4201,6 +4272,7 @@ pub fn completion(cx: &mut Context) { let ui = compositor.find::().unwrap(); ui.set_completion( editor, + savepoint, items, offset_encoding, start_offset, @@ -4318,7 +4390,6 @@ fn shrink_selection(cx: &mut Context) { // try to restore previous selection if let Some(prev_selection) = view.object_selections.pop() { if current_selection.contains(&prev_selection) { - // allow shrinking the selection only if current selection contains the previous object selection doc.set_selection(view.id, prev_selection); return; } else { @@ -4718,14 +4789,14 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { let textobject_change = |range: Range| -> Range { let diff_handle = doc.diff_handle().unwrap(); - let hunks = diff_handle.hunks(); + let diff = diff_handle.load(); let line = range.cursor_line(text); - let hunk_idx = if let Some(hunk_idx) = hunks.hunk_at(line as u32, false) { + let hunk_idx = if let Some(hunk_idx) = diff.hunk_at(line as u32, false) { hunk_idx } else { return range; }; - let hunk = hunks.nth_hunk(hunk_idx).after; + let hunk = diff.nth_hunk(hunk_idx).after; let start = text.line_to_char(hunk.start as usize); let end = text.line_to_char(hunk.end as usize); @@ -4774,7 +4845,7 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { ("a", "Argument/parameter (tree-sitter)"), ("c", "Comment (tree-sitter)"), ("T", "Test (tree-sitter)"), - ("m", "Closest surrounding pair to cursor"), + ("m", "Closest surrounding pair"), (" ", "... or any character acting as a pair"), ]; @@ -4783,35 +4854,39 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { fn surround_add(cx: &mut Context) { cx.on_next_key(move |cx, event| { - let ch = match event.char() { - Some(ch) => ch, + let (view, doc) = current!(cx.editor); + // surround_len is the number of new characters being added. + let (open, close, surround_len) = match event.char() { + Some(ch) => { + let (o, c) = surround::get_pair(ch); + let mut open = Tendril::new(); + open.push(o); + let mut close = Tendril::new(); + close.push(c); + (open, close, 2) + } + None if event.code == KeyCode::Enter => ( + doc.line_ending.as_str().into(), + doc.line_ending.as_str().into(), + 2 * doc.line_ending.len_chars(), + ), None => return, }; - let (view, doc) = current!(cx.editor); - let selection = doc.selection(view.id); - let (open, close) = surround::get_pair(ch); - // The number of chars in get_pair - let surround_len = 2; + let selection = doc.selection(view.id); let mut changes = Vec::with_capacity(selection.len() * 2); let mut ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; for range in selection.iter() { - let mut o = Tendril::new(); - o.push(open); - let mut c = Tendril::new(); - c.push(close); - changes.push((range.from(), range.from(), Some(o))); - changes.push((range.to(), range.to(), Some(c))); - - // Add 2 characters to the range to select them + changes.push((range.from(), range.from(), Some(open.clone()))); + changes.push((range.to(), range.to(), Some(close.clone()))); + ranges.push( Range::new(offs + range.from(), offs + range.to() + surround_len) .with_direction(range.direction()), ); - // Add 2 characters to the offset for the next ranges offs += surround_len; } @@ -5022,7 +5097,10 @@ async fn shell_impl_async( log::error!("Shell error: {}", err); bail!("Shell error: {}", err); } - bail!("Shell command failed"); + match output.status.code() { + Some(exit_code) => bail!("Shell command failed: status {}", exit_code), + None => bail!("Shell command failed"), + } } else if !output.stderr.is_empty() { log::debug!( "Command printed to stderr: {}", diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs index b3166e395d90..dac1e9d5258d 100644 --- a/helix-term/src/commands/dap.rs +++ b/helix-term/src/commands/dap.rs @@ -289,6 +289,36 @@ pub fn dap_launch(cx: &mut Context) { )))); } +pub fn dap_restart(cx: &mut Context) { + let debugger = match &cx.editor.debugger { + Some(debugger) => debugger, + None => { + cx.editor.set_error("Debugger is not running"); + return; + } + }; + if !debugger + .capabilities() + .supports_restart_request + .unwrap_or(false) + { + cx.editor + .set_error("Debugger does not support session restarts"); + return; + } + if debugger.starting_request_args().is_none() { + cx.editor + .set_error("No arguments found with which to restart the sessions"); + return; + } + + dap_callback( + cx.jobs, + debugger.restart(), + |editor, _compositor, _resp: ()| editor.set_status("Debugging session restarted"), + ); +} + fn debug_parameter_prompt( completions: Vec, config_name: String, @@ -475,19 +505,36 @@ pub fn dap_variables(cx: &mut Context) { if debugger.thread_id.is_none() { cx.editor - .set_status("Cannot access variables while target is running"); + .set_status("Cannot access variables while target is running."); return; } let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { (Some(frame), Some(thread_id)) => (frame, thread_id), _ => { cx.editor - .set_status("Cannot find current stack frame to access variables"); + .set_status("Cannot find current stack frame to access variables."); + return; + } + }; + + let thread_frame = match debugger.stack_frames.get(&thread_id) { + Some(thread_frame) => thread_frame, + None => { + cx.editor + .set_error("Failed to get stack frame for thread: {thread_id}"); + return; + } + }; + let stack_frame = match thread_frame.get(frame) { + Some(stack_frame) => stack_frame, + None => { + cx.editor + .set_error("Failed to get stack frame for thread {thread_id} and frame {frame}."); return; } }; - let frame_id = debugger.stack_frames[&thread_id][frame].id; + let frame_id = stack_frame.id; let scopes = match block_on(debugger.scopes(frame_id)) { Ok(s) => s, Err(e) => { @@ -539,7 +586,7 @@ pub fn dap_variables(cx: &mut Context) { pub fn dap_terminate(cx: &mut Context) { let debugger = debugger!(cx.editor); - let request = debugger.disconnect(); + let request = debugger.disconnect(None); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { // editor.set_error(format!("Failed to disconnect: {}", e)); editor.debugger = None; diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index d12aa436dd98..f9d9856f58c7 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -1,8 +1,11 @@ use futures_util::FutureExt; use helix_lsp::{ block_on, - lsp::{self, CodeAction, CodeActionOrCommand, DiagnosticSeverity, NumberOrString}, - util::{diagnostic_to_lsp_diagnostic, lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range}, + lsp::{ + self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity, + NumberOrString, + }, + util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range}, OffsetEncoding, }; use tui::{ @@ -12,8 +15,13 @@ use tui::{ use super::{align_view, push_jump, Align, Context, Editor, Open}; -use helix_core::{path, Selection}; -use helix_view::{document::Mode, editor::Action, theme::Style}; +use helix_core::{path, text_annotations::InlineAnnotation, Selection}; +use helix_view::{ + document::{DocumentInlayHints, DocumentInlayHintsId, Mode}, + editor::Action, + theme::Style, + Document, View, +}; use crate::{ compositor::{self, Compositor}, @@ -24,7 +32,8 @@ use crate::{ }; use std::{ - borrow::Cow, cmp::Ordering, collections::BTreeMap, fmt::Write, path::PathBuf, sync::Arc, + borrow::Cow, cmp::Ordering, collections::BTreeMap, fmt::Write, future::Future, path::PathBuf, + sync::Arc, }; /// Gets the language server that is attached to a document, and @@ -142,7 +151,8 @@ impl ui::menu::Item for PickerDiagnostic { let path = match format { DiagnosticsFormat::HideSourcePath => String::new(), DiagnosticsFormat::ShowSourcePath => { - let path = path::get_truncated_path(self.url.path()); + let file_path = self.url.to_file_path().unwrap(); + let path = path::get_truncated_path(file_path); format!("{}: ", path.to_string_lossy()) } }; @@ -192,15 +202,15 @@ fn jump_to_location( } } let (view, doc) = current!(editor); - let definition_pos = location.range.start; // TODO: convert inside server - let new_pos = if let Some(new_pos) = lsp_pos_to_pos(doc.text(), definition_pos, offset_encoding) - { - new_pos - } else { - return; - }; - doc.set_selection(view.id, Selection::point(new_pos)); + let new_range = + if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { + new_range + } else { + log::warn!("lsp position out of bounds - {:?}", location.range); + return; + }; + doc.set_selection(view.id, Selection::single(new_range.anchor, new_range.head)); align_view(doc, view, Align::Center); } @@ -561,6 +571,7 @@ pub fn code_action(cx: &mut Context) { .map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding)) .collect(), only: None, + trigger_kind: Some(CodeActionTriggerKind::INVOKED), }, ) { Some(future) => future, @@ -1227,49 +1238,115 @@ pub fn hover(cx: &mut Context) { } pub fn rename_symbol(cx: &mut Context) { - let (view, doc) = current_ref!(cx.editor); - let text = doc.text().slice(..); - let primary_selection = doc.selection(view.id).primary(); - let prefill = if primary_selection.len() > 1 { - primary_selection - } else { - use helix_core::textobject::{textobject_word, TextObject}; - textobject_word(text, primary_selection, TextObject::Inside, 1, false) + fn get_prefill_from_word_boundary(editor: &Editor) -> String { + let (view, doc) = current_ref!(editor); + let text = doc.text().slice(..); + let primary_selection = doc.selection(view.id).primary(); + if primary_selection.len() > 1 { + primary_selection + } else { + use helix_core::textobject::{textobject_word, TextObject}; + textobject_word(text, primary_selection, TextObject::Inside, 1, false) + } + .fragment(text) + .into() } - .fragment(text) - .into(); - ui::prompt_with_input( - cx, - "rename-to:".into(), - prefill, - None, - ui::completers::none, - move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { - if event != PromptEvent::Validate { - return; + + fn get_prefill_from_lsp_response( + editor: &Editor, + offset_encoding: OffsetEncoding, + response: Option, + ) -> Result { + match response { + Some(lsp::PrepareRenameResponse::Range(range)) => { + let text = doc!(editor).text(); + + Ok(lsp_range_to_range(text, range, offset_encoding) + .ok_or("lsp sent invalid selection range for rename")? + .fragment(text.slice(..)) + .into()) } + Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { placeholder, .. }) => { + Ok(placeholder) + } + Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { + Ok(get_prefill_from_word_boundary(editor)) + } + None => Err("lsp did not respond to prepare rename request"), + } + } - let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); + fn create_rename_prompt(editor: &Editor, prefill: String) -> Box { + let prompt = ui::Prompt::new( + "rename-to:".into(), + None, + ui::completers::none, + move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } - let pos = doc.position(view.id, offset_encoding); + let (view, doc) = current!(cx.editor); + let language_server = language_server!(cx.editor, doc); + let offset_encoding = language_server.offset_encoding(); + + let pos = doc.position(view.id, offset_encoding); + + let future = + match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) { + Some(future) => future, + None => { + cx.editor + .set_error("Language server does not support symbol renaming"); + return; + } + }; + match block_on(future) { + Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits), + Err(err) => cx.editor.set_error(err.to_string()), + } + }, + ) + .with_line(prefill, editor); - let future = - match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support symbol renaming"); + Box::new(prompt) + } + + let (view, doc) = current!(cx.editor); + let language_server = language_server!(cx.editor, doc); + let offset_encoding = language_server.offset_encoding(); + + let pos = doc.position(view.id, offset_encoding); + + match language_server.prepare_rename(doc.identifier(), pos) { + // Language server supports textDocument/prepareRename, use it. + Some(future) => cx.callback( + future, + move |editor, compositor, response: Option| { + let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response) + { + Ok(p) => p, + Err(e) => { + editor.set_error(e); return; } }; - match block_on(future) { - Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits), - Err(err) => cx.editor.set_error(err.to_string()), - } - }, - ); + + let prompt = create_rename_prompt(editor, prefill); + + compositor.push(prompt); + }, + ), + // Language server does not support textDocument/prepareRename, fall back + // to word boundary selection. + None => { + let prefill = get_prefill_from_word_boundary(cx.editor); + + let prompt = create_rename_prompt(cx.editor, prefill); + + cx.push_layer(prompt); + } + }; } pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { @@ -1320,3 +1397,174 @@ pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { }, ); } + +pub fn compute_inlay_hints_for_all_views(editor: &mut Editor, jobs: &mut crate::job::Jobs) { + if !editor.config().lsp.display_inlay_hints { + return; + } + + for (view, _) in editor.tree.views() { + let doc = match editor.documents.get(&view.doc) { + Some(doc) => doc, + None => continue, + }; + if let Some(callback) = compute_inlay_hints_for_view(view, doc) { + jobs.callback(callback); + } + } +} + +fn compute_inlay_hints_for_view( + view: &View, + doc: &Document, +) -> Option>>>> { + let view_id = view.id; + let doc_id = view.doc; + + let language_server = doc.language_server()?; + + let capabilities = language_server.capabilities(); + + let (future, new_doc_inlay_hints_id) = match capabilities.inlay_hint_provider { + Some( + lsp::OneOf::Left(true) + | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), + ) => { + let doc_text = doc.text(); + let len_lines = doc_text.len_lines(); + + // Compute ~3 times the current view height of inlay hints, that way some scrolling + // will not show half the view with hints and half without while still being faster + // than computing all the hints for the full file (which could be dozens of time + // longer than the view is). + let view_height = view.inner_height(); + let first_visible_line = doc_text.char_to_line(view.offset.anchor); + let first_line = first_visible_line.saturating_sub(view_height); + let last_line = first_visible_line + .saturating_add(view_height.saturating_mul(2)) + .min(len_lines); + + let new_doc_inlay_hint_id = DocumentInlayHintsId { + first_line, + last_line, + }; + // Don't recompute the annotations in case nothing has changed about the view + if !doc.inlay_hints_oudated + && doc + .inlay_hints(view_id) + .map_or(false, |dih| dih.id == new_doc_inlay_hint_id) + { + return None; + } + + let doc_slice = doc_text.slice(..); + let first_char_in_range = doc_slice.line_to_char(first_line); + let last_char_in_range = doc_slice.line_to_char(last_line); + + let range = helix_lsp::util::range_to_lsp_range( + doc_text, + helix_core::Range::new(first_char_in_range, last_char_in_range), + language_server.offset_encoding(), + ); + + ( + language_server.text_document_range_inlay_hints(doc.identifier(), range, None), + new_doc_inlay_hint_id, + ) + } + _ => return None, + }; + + let callback = super::make_job_callback( + future?, + move |editor, _compositor, response: Option>| { + // The config was modified or the window was closed while the request was in flight + if !editor.config().lsp.display_inlay_hints || editor.tree.try_get(view_id).is_none() { + return; + } + + // Add annotations to relevant document, not the current one (it may have changed in between) + let doc = match editor.documents.get_mut(&doc_id) { + Some(doc) => doc, + None => return, + }; + + // If we have neither hints nor an LSP, empty the inlay hints since they're now oudated + let (mut hints, offset_encoding) = match (response, doc.language_server()) { + (Some(h), Some(ls)) if !h.is_empty() => (h, ls.offset_encoding()), + _ => { + doc.set_inlay_hints( + view_id, + DocumentInlayHints::empty_with_id(new_doc_inlay_hints_id), + ); + doc.inlay_hints_oudated = false; + return; + } + }; + + // Most language servers will already send them sorted but ensure this is the case to + // avoid errors on our end. + hints.sort_unstable_by_key(|inlay_hint| inlay_hint.position); + + let mut padding_before_inlay_hints = Vec::new(); + let mut type_inlay_hints = Vec::new(); + let mut parameter_inlay_hints = Vec::new(); + let mut other_inlay_hints = Vec::new(); + let mut padding_after_inlay_hints = Vec::new(); + + let doc_text = doc.text(); + + for hint in hints { + let char_idx = + match helix_lsp::util::lsp_pos_to_pos(doc_text, hint.position, offset_encoding) + { + Some(pos) => pos, + // Skip inlay hints that have no "real" position + None => continue, + }; + + let label = match hint.label { + lsp::InlayHintLabel::String(s) => s, + lsp::InlayHintLabel::LabelParts(parts) => parts + .into_iter() + .map(|p| p.value) + .collect::>() + .join(""), + }; + + let inlay_hints_vec = match hint.kind { + Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints, + Some(lsp::InlayHintKind::PARAMETER) => &mut parameter_inlay_hints, + // We can't warn on unknown kind here since LSPs are free to set it or not, for + // example Rust Analyzer does not: every kind will be `None`. + _ => &mut other_inlay_hints, + }; + + if let Some(true) = hint.padding_left { + padding_before_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); + } + + inlay_hints_vec.push(InlineAnnotation::new(char_idx, label)); + + if let Some(true) = hint.padding_right { + padding_after_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); + } + } + + doc.set_inlay_hints( + view_id, + DocumentInlayHints { + id: new_doc_inlay_hints_id, + type_inlay_hints: type_inlay_hints.into(), + parameter_inlay_hints: parameter_inlay_hints.into(), + other_inlay_hints: other_inlay_hints.into(), + padding_before_inlay_hints: padding_before_inlay_hints.into(), + padding_after_inlay_hints: padding_after_inlay_hints.into(), + }, + ); + doc.inlay_hints_oudated = false; + }, + ); + + Some(callback) +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index ec7100a63740..e9a7222586a9 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1,10 +1,14 @@ +use std::fmt::Write; use std::ops::Deref; use crate::job::Job; use super::*; +use helix_core::encoding; +use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::editor::{Action, CloseError, ConfigEvent}; +use serde_json::Value; use ui::completers::{self, Completer}; #[derive(Clone)] @@ -913,6 +917,7 @@ fn replace_selections_with_clipboard_impl( cx: &mut compositor::Context, clipboard_type: ClipboardType, ) -> anyhow::Result<()> { + let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); match cx.editor.clipboard_provider.get_contents(clipboard_type) { @@ -924,6 +929,7 @@ fn replace_selections_with_clipboard_impl( doc.apply(&transaction, view.id); doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } Err(e) => Err(e.context("Couldn't get system clipboard contents")), @@ -1031,6 +1037,131 @@ fn set_encoding( } } +/// Shows info about the character under the primary cursor. +fn get_character_info( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + let (view, doc) = current_ref!(cx.editor); + let text = doc.text().slice(..); + + let grapheme_start = doc.selection(view.id).primary().cursor(text); + let grapheme_end = graphemes::next_grapheme_boundary(text, grapheme_start); + + if grapheme_start == grapheme_end { + return Ok(()); + } + + let grapheme = text.slice(grapheme_start..grapheme_end).to_string(); + let encoding = doc.encoding(); + + let printable = grapheme.chars().fold(String::new(), |mut s, c| { + match c { + '\0' => s.push_str("\\0"), + '\t' => s.push_str("\\t"), + '\n' => s.push_str("\\n"), + '\r' => s.push_str("\\r"), + _ => s.push(c), + } + + s + }); + + // Convert to Unicode codepoints if in UTF-8 + let unicode = if encoding == encoding::UTF_8 { + let mut unicode = " (".to_owned(); + + for (i, char) in grapheme.chars().enumerate() { + if i != 0 { + unicode.push(' '); + } + + unicode.push_str("U+"); + + let codepoint: u32 = if char.is_ascii() { + char.into() + } else { + // Not ascii means it will be multi-byte, so strip out the extra + // bits that encode the length & mark continuation bytes + + let s = String::from(char); + let bytes = s.as_bytes(); + + // First byte starts with 2-4 ones then a zero, so strip those off + let first = bytes[0]; + let codepoint = first & (0xFF >> (first.leading_ones() + 1)); + let mut codepoint = u32::from(codepoint); + + // Following bytes start with 10 + for byte in bytes.iter().skip(1) { + codepoint <<= 6; + codepoint += u32::from(*byte) & 0x3F; + } + + codepoint + }; + + write!(unicode, "{codepoint:0>4x}").unwrap(); + } + + unicode.push(')'); + unicode + } else { + String::new() + }; + + // Give the decimal value for ascii characters + let dec = if encoding.is_ascii_compatible() && grapheme.len() == 1 { + format!(" Dec {}", grapheme.as_bytes()[0]) + } else { + String::new() + }; + + let hex = { + let mut encoder = encoding.new_encoder(); + let max_encoded_len = encoder + .max_buffer_length_from_utf8_without_replacement(grapheme.len()) + .unwrap(); + let mut bytes = Vec::with_capacity(max_encoded_len); + let mut current_byte = 0; + let mut hex = String::new(); + + for (i, char) in grapheme.chars().enumerate() { + if i != 0 { + hex.push_str(" +"); + } + + let (result, _input_bytes_read) = encoder.encode_from_utf8_to_vec_without_replacement( + &char.to_string(), + &mut bytes, + true, + ); + + if let encoding::EncoderResult::Unmappable(char) = result { + bail!("{char:?} cannot be mapped to {}", encoding.name()); + } + + for byte in &bytes[current_byte..] { + write!(hex, " {byte:0>2x}").unwrap(); + } + + current_byte = bytes.len(); + } + + hex + }; + + cx.editor + .set_status(format!("\"{printable}\"{unicode}{dec} Hex{hex}")); + + Ok(()) +} + /// Reload the [`Document`] from its source file. fn reload( cx: &mut compositor::Context, @@ -1223,6 +1354,37 @@ fn lsp_restart( Ok(()) } +fn lsp_stop( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + let doc = doc!(cx.editor); + + let ls_id = doc + .language_server() + .map(|ls| ls.id()) + .context("LSP not running for the current document")?; + + let config = doc + .language_config() + .context("LSP not defined for the current document")?; + cx.editor.language_servers.stop(config); + + for doc in cx.editor.documents_mut() { + if doc.language_server().map_or(false, |ls| ls.id() == ls_id) { + doc.set_language_server(None); + doc.set_diagnostics(Default::default()); + } + } + + Ok(()) +} + fn tree_sitter_scopes( cx: &mut compositor::Context, _args: &[Cow], @@ -1403,54 +1565,74 @@ fn tutor( return Ok(()); } - let path = helix_loader::runtime_dir().join("tutor"); + let path = helix_loader::runtime_file(Path::new("tutor")); cx.editor.open(&path, Action::Replace)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(cx.editor).set_path(None)?; Ok(()) } +fn abort_goto_line_number_preview(cx: &mut compositor::Context) { + if let Some(last_selection) = cx.editor.last_selection.take() { + let scrolloff = cx.editor.config().scrolloff; + + let (view, doc) = current!(cx.editor); + doc.set_selection(view.id, last_selection); + view.ensure_cursor_in_view(doc, scrolloff); + } +} + +fn update_goto_line_number_preview( + cx: &mut compositor::Context, + args: &[Cow], +) -> anyhow::Result<()> { + cx.editor.last_selection.get_or_insert_with(|| { + let (view, doc) = current!(cx.editor); + doc.selection(view.id).clone() + }); + + let scrolloff = cx.editor.config().scrolloff; + let line = args[0].parse::()?; + goto_line_without_jumplist(cx.editor, NonZeroUsize::new(line)); + + let (view, doc) = current!(cx.editor); + view.ensure_cursor_in_view(doc, scrolloff); + + Ok(()) +} + pub(super) fn goto_line_number( cx: &mut compositor::Context, args: &[Cow], event: PromptEvent, ) -> anyhow::Result<()> { match event { - PromptEvent::Abort => { - if let Some(line_number) = cx.editor.last_line_number { - goto_line_impl(cx.editor, NonZeroUsize::new(line_number)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line_number); - cx.editor.last_line_number = None; - } - return Ok(()); - } + PromptEvent::Abort => abort_goto_line_number_preview(cx), PromptEvent::Validate => { ensure!(!args.is_empty(), "Line number required"); - cx.editor.last_line_number = None; - } - PromptEvent::Update => { - if args.is_empty() { - if let Some(line_number) = cx.editor.last_line_number { - // When a user hits backspace and there are no numbers left, - // we can bring them back to their original line - goto_line_impl(cx.editor, NonZeroUsize::new(line_number)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line_number); - cx.editor.last_line_number = None; - } - return Ok(()); - } + + // If we are invoked directly via a keybinding, Validate is + // sent without any prior Update events. Ensure the cursor + // is moved to the appropriate location. + update_goto_line_number_preview(cx, args)?; + + let last_selection = cx + .editor + .last_selection + .take() + .expect("update_goto_line_number_preview should always set last_selection"); + let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - let line = doc.selection(view.id).primary().cursor_line(text); - cx.editor.last_line_number.get_or_insert(line + 1); + view.jumps.push((doc.id(), last_selection)); } + + // When a user hits backspace and there are no numbers left, + // we can bring them back to their original selection. If they + // begin typing numbers again, we'll start a new preview session. + PromptEvent::Update if args.is_empty() => abort_goto_line_number_preview(cx), + PromptEvent::Update => update_goto_line_number_preview(cx, args)?, } - let line = args[0].parse::()?; - goto_line_impl(cx.editor, NonZeroUsize::new(line)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line); + Ok(()) } @@ -1517,6 +1699,46 @@ fn set_option( Ok(()) } +/// Toggle boolean config option at runtime. Access nested values by dot +/// syntax, for example to toggle smart case search, use `:toggle search.smart- +/// case`. +fn toggle_option( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + if args.len() != 1 { + anyhow::bail!("Bad arguments. Usage: `:toggle key`"); + } + let key = &args[0].to_lowercase(); + + let key_error = || anyhow::anyhow!("Unknown key `{}`", key); + + let mut config = serde_json::json!(&cx.editor.config().deref()); + let pointer = format!("/{}", key.replace('.', "/")); + let value = config.pointer_mut(&pointer).ok_or_else(key_error)?; + + if let Value::Bool(b) = *value { + *value = Value::Bool(!b); + } else { + anyhow::bail!("Key `{}` is not toggle-able", key) + } + + // This unwrap should never fail because we only replace one boolean value + // with another, maintaining a valid json config + let config = serde_json::from_value(config).unwrap(); + + cx.editor + .config_events + .0 + .send(ConfigEvent::Update(config))?; + Ok(()) +} + /// Change the language of the current buffer at runtime. fn language( cx: &mut compositor::Context, @@ -1527,13 +1749,20 @@ fn language( return Ok(()); } + if args.is_empty() { + let doc = doc!(cx.editor); + let language = &doc.language_name().unwrap_or(DEFAULT_LANGUAGE_NAME); + cx.editor.set_status(language.to_string()); + return Ok(()); + } + if args.len() != 1 { anyhow::bail!("Bad arguments. Usage: `:set-language language`"); } let doc = doc_mut!(cx.editor); - if args[0] == "text" { + if args[0] == DEFAULT_LANGUAGE_NAME { doc.set_language(None, None) } else { doc.set_language_by_language_id(&args[0], cx.editor.syn_loader.clone())?; @@ -1570,6 +1799,7 @@ fn sort_impl( _args: &[Cow], reverse: bool, ) -> anyhow::Result<()> { + let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); @@ -1595,6 +1825,7 @@ fn sort_impl( doc.apply(&transaction, view.id); doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } @@ -1609,30 +1840,26 @@ fn reflow( } let scrolloff = cx.editor.config().scrolloff; + let cfg_text_width: usize = cx.editor.config().text_width; let (view, doc) = current!(cx.editor); - const DEFAULT_MAX_LEN: usize = 79; - - // Find the max line length by checking the following sources in order: + // Find the text_width by checking the following sources in order: // - The passed argument in `args` - // - The configured max_line_len for this language in languages.toml - // - The const default we set above - let max_line_len: usize = args + // - The configured text-width for this language in languages.toml + // - The configured text-width in the config.toml + let text_width: usize = args .get(0) .map(|num| num.parse::()) .transpose()? - .or_else(|| { - doc.language_config() - .and_then(|config| config.max_line_length) - }) - .unwrap_or(DEFAULT_MAX_LEN); + .or_else(|| doc.language_config().and_then(|config| config.text_width)) + .unwrap_or(cfg_text_width); let rope = doc.text(); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(rope, selection, |range| { let fragment = range.fragment(rope.slice(..)); - let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, max_line_len); + let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, text_width); (range.from(), range.to(), Some(reflowed_text)) }); @@ -1822,6 +2049,64 @@ fn run_shell_command( Ok(()) } +fn reset_diff_change( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + ensure!(args.is_empty(), ":reset-diff-change takes no arguments"); + + let editor = &mut cx.editor; + let scrolloff = editor.config().scrolloff; + + let (view, doc) = current!(editor); + // TODO refactor to use let..else once MSRV is raised to 1.65 + let handle = match doc.diff_handle() { + Some(handle) => handle, + None => bail!("Diff is not available in the current buffer"), + }; + + let diff = handle.load(); + let doc_text = doc.text().slice(..); + let line = doc.selection(view.id).primary().cursor_line(doc_text); + + // TODO refactor to use let..else once MSRV is raised to 1.65 + let hunk_idx = match diff.hunk_at(line as u32, true) { + Some(hunk_idx) => hunk_idx, + None => bail!("There is no change at the cursor"), + }; + let hunk = diff.nth_hunk(hunk_idx); + let diff_base = diff.diff_base(); + let before_start = diff_base.line_to_char(hunk.before.start as usize); + let before_end = diff_base.line_to_char(hunk.before.end as usize); + let text: Tendril = diff + .diff_base() + .slice(before_start..before_end) + .chunks() + .collect(); + let anchor = doc_text.line_to_char(hunk.after.start as usize); + let transaction = Transaction::change( + doc.text(), + [( + anchor, + doc_text.line_to_char(hunk.after.end as usize), + (!text.is_empty()).then_some(text), + )] + .into_iter(), + ); + drop(diff); // make borrow check happy + doc.apply(&transaction, view.id); + // select inserted text + let text_len = before_end - before_start; + doc.set_selection(view.id, Selection::single(anchor, anchor + text_len)); + doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); + Ok(()) +} + pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", @@ -2127,6 +2412,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: set_encoding, completer: None, }, + TypableCommand { + name: "character-info", + aliases: &["char"], + doc: "Get info about the character under the primary cursor.", + fun: get_character_info, + completer: None, + }, TypableCommand { name: "reload", aliases: &[], @@ -2162,6 +2454,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: lsp_restart, completer: None, }, + TypableCommand { + name: "lsp-stop", + aliases: &[], + doc: "Stops the Language Server that is in use by the current doc", + fun: lsp_stop, + completer: None, + }, TypableCommand { name: "tree-sitter-scopes", aliases: &[], @@ -2235,7 +2534,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "set-language", aliases: &["lang"], - doc: "Set the language of current buffer.", + doc: "Set the language of current buffer (show current language if no value specified).", fun: language, completer: Some(completers::language), }, @@ -2246,6 +2545,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: set_option, completer: Some(completers::setting), }, + TypableCommand { + name: "toggle-option", + aliases: &["toggle"], + doc: "Toggle a boolean config option at runtime.\nFor example to toggle smart case search, use `:toggle search.smart-case`.", + fun: toggle_option, + completer: Some(completers::setting), + }, TypableCommand { name: "get-option", aliases: &["get"], @@ -2337,6 +2643,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: run_shell_command, completer: Some(completers::filename), }, + TypableCommand { + name: "reset-diff-change", + aliases: &["diffget", "diffg"], + doc: "Reset the diff change at the cursor position.", + fun: reset_diff_change, + completer: None, + }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = diff --git a/helix-term/src/compositor.rs b/helix-term/src/compositor.rs index 2e4a2e20e9f4..bcb3e44904e4 100644 --- a/helix-term/src/compositor.rs +++ b/helix-term/src/compositor.rs @@ -7,6 +7,7 @@ use helix_view::graphics::{CursorKind, Rect}; use tui::buffer::Buffer as Surface; pub type Callback = Box; +pub type SyncCallback = Box; // Cursive-inspired pub enum EventResult { diff --git a/helix-term/src/health.rs b/helix-term/src/health.rs index 6558fe19fb4c..480c2c67579b 100644 --- a/helix-term/src/health.rs +++ b/helix-term/src/health.rs @@ -52,7 +52,7 @@ pub fn general() -> std::io::Result<()> { let config_file = helix_loader::config_file(); let lang_file = helix_loader::lang_config_file(); let log_file = helix_loader::log_file(); - let rt_dir = helix_loader::runtime_dir(); + let rt_dirs = helix_loader::runtime_dirs(); let clipboard_provider = get_clipboard_provider(); if config_file.exists() { @@ -66,17 +66,31 @@ pub fn general() -> std::io::Result<()> { writeln!(stdout, "Language file: default")?; } writeln!(stdout, "Log file: {}", log_file.display())?; - writeln!(stdout, "Runtime directory: {}", rt_dir.display())?; - - if let Ok(path) = std::fs::read_link(&rt_dir) { - let msg = format!("Runtime directory is symlinked to {}", path.display()); - writeln!(stdout, "{}", msg.yellow())?; - } - if !rt_dir.exists() { - writeln!(stdout, "{}", "Runtime directory does not exist.".red())?; - } - if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { - writeln!(stdout, "{}", "Runtime directory is empty.".red())?; + writeln!( + stdout, + "Runtime directories: {}", + rt_dirs + .iter() + .map(|d| d.to_string_lossy()) + .collect::>() + .join(";") + )?; + for rt_dir in rt_dirs.iter() { + if let Ok(path) = std::fs::read_link(rt_dir) { + let msg = format!( + "Runtime directory {} is symlinked to: {}", + rt_dir.display(), + path.display() + ); + writeln!(stdout, "{}", msg.yellow())?; + } + if !rt_dir.exists() { + let msg = format!("Runtime directory does not exist: {}", rt_dir.display()); + writeln!(stdout, "{}", msg.yellow())?; + } else if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { + let msg = format!("Runtime directory is empty: {}", rt_dir.display()); + writeln!(stdout, "{}", msg.yellow())?; + } } writeln!(stdout, "Clipboard provider: {}", clipboard_provider.name())?; diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs index 2888b6eb1565..19f2521a5231 100644 --- a/helix-term/src/job.rs +++ b/helix-term/src/job.rs @@ -5,9 +5,12 @@ use crate::compositor::Compositor; use futures_util::future::{BoxFuture, Future, FutureExt}; use futures_util::stream::{FuturesUnordered, StreamExt}; +pub type EditorCompositorCallback = Box; +pub type EditorCallback = Box; + pub enum Callback { - EditorCompositor(Box), - Editor(Box), + EditorCompositor(EditorCompositorCallback), + Editor(EditorCallback), } pub type JobFuture = BoxFuture<'static, anyhow::Result>>; diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index 01184f80edcd..9bd002809d61 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -223,6 +223,7 @@ pub fn default() -> HashMap { "'" => last_picker, "g" => { "Debug (experimental)" sticky=true "l" => dap_launch, + "r" => dap_restart, "b" => dap_toggle_breakpoint, "c" => dap_continue, "h" => dap_pause, @@ -363,7 +364,7 @@ pub fn default() -> HashMap { "A-d" | "A-del" => delete_word_forward, "C-u" => kill_to_line_start, "C-k" => kill_to_line_end, - "C-h" | "backspace" => delete_char_backward, + "C-h" | "backspace" | "S-backspace" => delete_char_backward, "C-d" | "del" => delete_char_forward, "C-j" | "ret" => insert_newline, "tab" => insert_tab, diff --git a/helix-term/src/lib.rs b/helix-term/src/lib.rs index a945b20dedaf..2f6ec12b13fd 100644 --- a/helix-term/src/lib.rs +++ b/helix-term/src/lib.rs @@ -10,6 +10,9 @@ pub mod health; pub mod job; pub mod keymap; pub mod ui; +use std::path::Path; + +use ignore::DirEntry; pub use keymap::macros::*; #[cfg(not(windows))] @@ -22,3 +25,25 @@ fn true_color() -> bool { fn true_color() -> bool { true } + +/// Function used for filtering dir entries in the various file pickers. +fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> bool { + // We always want to ignore the .git directory, otherwise if + // `ignore` is turned off, we end up with a lot of noise + // in our picker. + if entry.file_name() == ".git" { + return false; + } + + // We also ignore symlinks that point inside the current directory + // if `dedup_links` is enabled. + if dedup_symlinks && entry.path_is_symlink() { + return entry + .path() + .canonicalize() + .ok() + .map_or(false, |path| !path.starts_with(root)); + } + + true +} diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 90e2fed04d78..da6b5ddcbc1f 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -1,15 +1,16 @@ use crate::compositor::{Component, Context, Event, EventResult}; -use helix_view::{editor::CompleteAction, ViewId}; -use tui::buffer::Buffer as Surface; +use helix_view::{ + document::SavePoint, + editor::CompleteAction, + theme::{Modifier, Style}, + ViewId, +}; +use tui::{buffer::Buffer as Surface, text::Span}; -use std::borrow::Cow; +use std::{borrow::Cow, sync::Arc}; use helix_core::{Change, Transaction}; -use helix_view::{ - graphics::Rect, - input::{KeyCode, KeyEvent}, - Document, Editor, -}; +use helix_view::{graphics::Rect, Document, Editor}; use crate::commands; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; @@ -33,8 +34,19 @@ impl menu::Item for CompletionItem { } fn format(&self, _data: &Self::Data) -> menu::Row { + let deprecated = self.deprecated.unwrap_or_default() + || self.tags.as_ref().map_or(false, |tags| { + tags.contains(&lsp::CompletionItemTag::DEPRECATED) + }); menu::Row::new(vec![ - menu::Cell::from(self.label.as_str()), + menu::Cell::from(Span::styled( + self.label.as_str(), + if deprecated { + Style::default().add_modifier(Modifier::CROSSED_OUT) + } else { + Style::default() + }, + )), menu::Cell::from(match self.kind { Some(lsp::CompletionItemKind::TEXT) => "text", Some(lsp::CompletionItemKind::METHOD) => "method", @@ -90,11 +102,13 @@ impl Completion { pub fn new( editor: &Editor, + savepoint: Arc, mut items: Vec, offset_encoding: helix_lsp::OffsetEncoding, start_offset: usize, trigger_offset: usize, ) -> Self { + let replace_mode = editor.config().completion_replace; // Sort completion items according to their preselect status (given by the LSP server) items.sort_by_key(|item| !item.preselect.unwrap_or(false)); @@ -105,50 +119,89 @@ impl Completion { view_id: ViewId, item: &CompletionItem, offset_encoding: helix_lsp::OffsetEncoding, - start_offset: usize, trigger_offset: usize, + include_placeholder: bool, + replace_mode: bool, ) -> Transaction { - let transaction = if let Some(edit) = &item.text_edit { + use helix_lsp::snippet; + let selection = doc.selection(view_id); + let text = doc.text().slice(..); + let primary_cursor = selection.primary().cursor(text); + + let (edit_offset, new_text) = if let Some(edit) = &item.text_edit { let edit = match edit { lsp::CompletionTextEdit::Edit(edit) => edit.clone(), lsp::CompletionTextEdit::InsertAndReplace(item) => { - // TODO: support using "insert" instead of "replace" via user config - lsp::TextEdit::new(item.replace, item.new_text.clone()) + let range = if replace_mode { + item.replace + } else { + item.insert + }; + lsp::TextEdit::new(range, item.new_text.clone()) } }; - util::generate_transaction_from_completion_edit( - doc.text(), - doc.selection(view_id), - edit, - offset_encoding, // TODO: should probably transcode in Client - ) + let start_offset = + match util::lsp_pos_to_pos(doc.text(), edit.range.start, offset_encoding) { + Some(start) => start as i128 - primary_cursor as i128, + None => return Transaction::new(doc.text()), + }; + let end_offset = + match util::lsp_pos_to_pos(doc.text(), edit.range.end, offset_encoding) { + Some(end) => end as i128 - primary_cursor as i128, + None => return Transaction::new(doc.text()), + }; + + (Some((start_offset, end_offset)), edit.new_text) } else { - let text = item.insert_text.as_ref().unwrap_or(&item.label); - // Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯ - // in these cases we need to check for a common prefix and remove it - let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset)); - let text = text.trim_start_matches::<&str>(&prefix); - - // TODO: this needs to be true for the numbers to work out correctly - // in the closure below. It's passed in to a callback as this same - // formula, but can the value change between the LSP request and - // response? If it does, can we recover? - debug_assert!( - doc.selection(view_id) - .primary() - .cursor(doc.text().slice(..)) - == trigger_offset - ); - - Transaction::change_by_selection(doc.text(), doc.selection(view_id), |range| { - let cursor = range.cursor(doc.text().slice(..)); - - (cursor, cursor, Some(text.into())) - }) + let new_text = item + .insert_text + .clone() + .unwrap_or_else(|| item.label.clone()); + // check that we are still at the correct savepoint + // we can still generate a transaction regardless but if the + // document changed (and not just the selection) then we will + // likely delete the wrong text (same if we applied an edit sent by the LS) + debug_assert!(primary_cursor == trigger_offset); + (None, new_text) }; - transaction + if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET)) + || matches!( + item.insert_text_format, + Some(lsp::InsertTextFormat::SNIPPET) + ) + { + match snippet::parse(&new_text) { + Ok(snippet) => util::generate_transaction_from_snippet( + doc.text(), + selection, + edit_offset, + replace_mode, + snippet, + doc.line_ending.as_str(), + include_placeholder, + doc.tab_width(), + doc.indent_width(), + ), + Err(err) => { + log::error!( + "Failed to parse snippet: {:?}, remaining output: {}", + &new_text, + err + ); + Transaction::new(doc.text()) + } + } + } else { + util::generate_transaction_from_completion_edit( + doc.text(), + selection, + edit_offset, + replace_mode, + new_text, + ) + } } fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec { @@ -161,11 +214,10 @@ impl Completion { let (view, doc) = current!(editor); // if more text was entered, remove it - doc.restore(view); + doc.restore(view, &savepoint); match event { PromptEvent::Abort => { - doc.restore(view); editor.last_completion = None; } PromptEvent::Update => { @@ -177,12 +229,12 @@ impl Completion { view.id, item, offset_encoding, - start_offset, trigger_offset, + true, + replace_mode, ); // initialize a savepoint - doc.savepoint(); doc.apply(&transaction, view.id); editor.last_completion = Some(CompleteAction { @@ -199,8 +251,9 @@ impl Completion { view.id, item, offset_encoding, - start_offset, trigger_offset, + false, + replace_mode, ); doc.apply(&transaction, view.id); @@ -239,7 +292,9 @@ impl Completion { } }; }); - let popup = Popup::new(Self::ID, menu).with_scrollbar(false); + let popup = Popup::new(Self::ID, menu) + .with_scrollbar(false) + .ignore_escape_key(true); let mut completion = Self { popup, start_offset, @@ -363,13 +418,6 @@ impl Completion { impl Component for Completion { fn handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult { - // let the Editor handle Esc instead - if let Event::Key(KeyEvent { - code: KeyCode::Esc, .. - }) = event - { - return EventResult::Ignored(None); - } self.popup.handle_event(event, cx) } @@ -381,104 +429,102 @@ impl Component for Completion { self.popup.render(area, surface, cx); // if we have a selection, render a markdown popup on top/below with info - if let Some(option) = self.popup.contents().selection() { - // need to render: - // option.detail - // --- - // option.documentation - - let (view, doc) = current!(cx.editor); - let language = doc.language_name().unwrap_or(""); - let text = doc.text().slice(..); - let cursor_pos = doc.selection(view.id).primary().cursor(text); - let coords = view - .screen_coords_at_pos(doc, text, cursor_pos) - .expect("cursor must be in view"); - let cursor_pos = coords.row as u16; - - let mut markdown_doc = match &option.documentation { - Some(lsp::Documentation::String(contents)) - | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::PlainText, - value: contents, - })) => { - // TODO: convert to wrapped text - Markdown::new( - format!( - "```{}\n{}\n```\n{}", - language, - option.detail.as_deref().unwrap_or_default(), - contents - ), - cx.editor.syn_loader.clone(), - ) - } - Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::Markdown, - value: contents, - })) => { - // TODO: set language based on doc scope - if let Some(detail) = &option.detail.as_deref() { - Markdown::new( - format!("```{}\n{}\n```\n{}", language, detail, contents), - cx.editor.syn_loader.clone(), - ) - } else { - Markdown::new(contents.to_string(), cx.editor.syn_loader.clone()) - } - } - None if option.detail.is_some() => { - // TODO: copied from above - - // TODO: set language based on doc scope - Markdown::new( - format!( - "```{}\n{}\n```", - language, - option.detail.as_deref().unwrap_or_default(), - ), - cx.editor.syn_loader.clone(), - ) - } - None => return, + let option = match self.popup.contents().selection() { + Some(option) => option, + None => return, + }; + // need to render: + // option.detail + // --- + // option.documentation + + let (view, doc) = current!(cx.editor); + let language = doc.language_name().unwrap_or(""); + let text = doc.text().slice(..); + let cursor_pos = doc.selection(view.id).primary().cursor(text); + let coords = view + .screen_coords_at_pos(doc, text, cursor_pos) + .expect("cursor must be in view"); + let cursor_pos = coords.row as u16; + + let markdowned = |lang: &str, detail: Option<&str>, doc: Option<&str>| { + let md = match (detail, doc) { + (Some(detail), Some(doc)) => format!("```{lang}\n{detail}\n```\n{doc}"), + (Some(detail), None) => format!("```{lang}\n{detail}\n```"), + (None, Some(doc)) => doc.to_string(), + (None, None) => String::new(), }; + Markdown::new(md, cx.editor.syn_loader.clone()) + }; + + let mut markdown_doc = match &option.documentation { + Some(lsp::Documentation::String(contents)) + | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::PlainText, + value: contents, + })) => { + // TODO: convert to wrapped text + markdowned(language, option.detail.as_deref(), Some(contents)) + } + Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: contents, + })) => { + // TODO: set language based on doc scope + markdowned(language, option.detail.as_deref(), Some(contents)) + } + None if option.detail.is_some() => { + // TODO: set language based on doc scope + markdowned(language, option.detail.as_deref(), None) + } + None => return, + }; + let popup_area = { let (popup_x, popup_y) = self.popup.get_rel_position(area, cx); - let (popup_width, _popup_height) = self.popup.get_size(); - let mut width = area - .width - .saturating_sub(popup_x) - .saturating_sub(popup_width); - let area = if width > 30 { - let mut height = area.height.saturating_sub(popup_y); - let x = popup_x + popup_width; - let y = popup_y; - - if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) { - width = rel_width.min(width); - height = rel_height.min(height); - } - Rect::new(x, y, width, height) - } else { - let half = area.height / 2; - let height = 15.min(half); - // we want to make sure the cursor is visible (not hidden behind the documentation) - let y = if cursor_pos + area.y - >= (cx.editor.tree.area().height - height - 2/* statusline + commandline */) - { - 0 - } else { - // -2 to subtract command line + statusline. a bit of a hack, because of splits. - area.height.saturating_sub(height).saturating_sub(2) - }; + let (popup_width, popup_height) = self.popup.get_size(); + Rect::new(popup_x, popup_y, popup_width, popup_height) + }; - Rect::new(0, y, area.width, height) + let doc_width_available = area.width.saturating_sub(popup_area.right()); + let doc_area = if doc_width_available > 30 { + let mut doc_width = doc_width_available; + let mut doc_height = area.height.saturating_sub(popup_area.top()); + let x = popup_area.right(); + let y = popup_area.top(); + + if let Some((rel_width, rel_height)) = + markdown_doc.required_size((doc_width, doc_height)) + { + doc_width = rel_width.min(doc_width); + doc_height = rel_height.min(doc_height); + } + Rect::new(x, y, doc_width, doc_height) + } else { + // Documentation should not cover the cursor or the completion popup + // Completion popup could be above or below the current line + let avail_height_above = cursor_pos.min(popup_area.top()).saturating_sub(1); + let avail_height_below = area + .height + .saturating_sub(cursor_pos.max(popup_area.bottom()) + 1 /* padding */); + let (y, avail_height) = if avail_height_below >= avail_height_above { + ( + area.height.saturating_sub(avail_height_below), + avail_height_below, + ) + } else { + (0, avail_height_above) }; + if avail_height <= 1 { + return; + } - // clear area - let background = cx.editor.theme.get("ui.popup"); - surface.clear_with(area, background); - markdown_doc.render(area, surface, cx); - } + Rect::new(0, y, area.width, avail_height.min(15)) + }; + + // clear area + let background = cx.editor.theme.get("ui.popup"); + surface.clear_with(doc_area, background); + markdown_doc.render(doc_area, surface, cx); } } diff --git a/helix-term/src/ui/document.rs b/helix-term/src/ui/document.rs index 663324100d47..28a52f74d3f5 100644 --- a/helix-term/src/ui/document.rs +++ b/helix-term/src/ui/document.rs @@ -202,28 +202,27 @@ pub fn render_text<'t>( // formattter.line_pos returns to line index of the next grapheme // so it must be called before formatter.next let doc_line = formatter.line_pos(); - // TODO refactor with let .. else once MSRV reaches 1.65 - let (grapheme, mut pos) = if let Some(it) = formatter.next() { - it - } else { + let Some((grapheme, mut pos)) = formatter.next() else { let mut last_pos = formatter.visual_pos(); - last_pos.col -= 1; - // check if any positions translated on the fly (like cursor) are at the EOF - translate_positions( - char_pos + 1, - first_visible_char_idx, - translated_positions, - text_fmt, - renderer, - last_pos, - ); + if last_pos.row >= row_off { + last_pos.col -= 1; + last_pos.row -= row_off; + // check if any positions translated on the fly (like cursor) are at the EOF + translate_positions( + char_pos + 1, + first_visible_char_idx, + translated_positions, + text_fmt, + renderer, + last_pos, + ); + } break; }; // skip any graphemes on visual lines before the block start if pos.row < row_off { if char_pos >= style_span.1 { - // TODO refactor using let..else once MSRV reaches 1.65 style_span = if let Some(style_span) = styles.next() { style_span } else { @@ -263,12 +262,7 @@ pub fn render_text<'t>( // aquire the correct grapheme style if char_pos >= style_span.1 { - // TODO refactor using let..else once MSRV reaches 1.65 - style_span = if let Some(style_span) = styles.next() { - style_span - } else { - (Style::default(), usize::MAX) - } + style_span = styles.next().unwrap_or((Style::default(), usize::MAX)); } char_pos += grapheme.doc_chars(); @@ -319,7 +313,7 @@ pub struct TextRenderer<'a> { pub nbsp: String, pub space: String, pub tab: String, - pub tab_width: u16, + pub indent_width: u16, pub starting_indent: usize, pub draw_indent_guides: bool, pub col_offset: usize, @@ -367,16 +361,19 @@ impl<'a> TextRenderer<'a> { let text_style = theme.get("ui.text"); + let indent_width = doc.indent_style.indent_width(tab_width) as u16; + TextRenderer { surface, indent_guide_char: editor_config.indent_guides.character.into(), newline, nbsp, space, - tab_width: tab_width as u16, tab, whitespace_style: theme.get("ui.virtual.whitespace"), - starting_indent: (col_offset / tab_width) + indent_width, + starting_indent: col_offset / indent_width as usize + + (col_offset % indent_width as usize != 0) as usize + editor_config.indent_guides.skip_levels as usize, indent_guide_style: text_style.patch( theme @@ -399,7 +396,7 @@ impl<'a> TextRenderer<'a> { is_in_indent_area: &mut bool, position: Position, ) { - let cut_off_start = self.col_offset.saturating_sub(position.col as usize); + let cut_off_start = self.col_offset.saturating_sub(position.col); let is_whitespace = grapheme.is_whitespace(); // TODO is it correct to apply the whitspace style to all unicode white spaces? @@ -410,18 +407,18 @@ impl<'a> TextRenderer<'a> { let width = grapheme.width(); let grapheme = match grapheme { Grapheme::Tab { width } => { - let grapheme_tab_width = char_to_byte_idx(&self.tab, width as usize); + let grapheme_tab_width = char_to_byte_idx(&self.tab, width); &self.tab[..grapheme_tab_width] } // TODO special rendering for other whitespaces? Grapheme::Other { ref g } if g == " " => &self.space, Grapheme::Other { ref g } if g == "\u{00A0}" => &self.nbsp, - Grapheme::Other { ref g } => &*g, + Grapheme::Other { ref g } => g, Grapheme::Newline => &self.newline, }; - let in_bounds = self.col_offset <= (position.col as usize) - && (position.col as usize) < self.viewport.width as usize + self.col_offset; + let in_bounds = self.col_offset <= position.col + && position.col < self.viewport.width as usize + self.col_offset; if in_bounds { self.surface.set_string( @@ -430,10 +427,10 @@ impl<'a> TextRenderer<'a> { grapheme, style, ); - } else if cut_off_start != 0 && cut_off_start < width as usize { + } else if cut_off_start != 0 && cut_off_start < width { // partially on screen let rect = Rect::new( - self.viewport.x as u16, + self.viewport.x, self.viewport.y + position.row as u16, (width - cut_off_start) as u16, 1, @@ -458,14 +455,14 @@ impl<'a> TextRenderer<'a> { // Don't draw indent guides outside of view let end_indent = min( indent_level, - // Add tab_width - 1 to round up, since the first visible + // Add indent_width - 1 to round up, since the first visible // indent might be a bit after offset.col - self.col_offset + self.viewport.width as usize + (self.tab_width - 1) as usize, - ) / self.tab_width as usize; + self.col_offset + self.viewport.width as usize + (self.indent_width as usize - 1), + ) / self.indent_width as usize; for i in self.starting_indent..end_indent { - let x = - (self.viewport.x as usize + (i * self.tab_width as usize) - self.col_offset) as u16; + let x = (self.viewport.x as usize + (i * self.indent_width as usize) - self.col_offset) + as u16; let y = self.viewport.y + row; debug_assert!(self.surface.in_bounds(x, y)); self.surface diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index f297b44eb08b..7c22df747642 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1,5 +1,5 @@ use crate::{ - commands, + commands::{self, OnKeyCallback}, compositor::{Component, Context, Event, EventResult}, job::{self, Callback}, key, @@ -21,14 +21,14 @@ use helix_core::{ visual_offset_from_block, Position, Range, Selection, Transaction, }; use helix_view::{ - document::{Mode, SCRATCH_BUFFER_NAME}, + document::{Mode, SavePoint, SCRATCH_BUFFER_NAME}, editor::{CompleteAction, CursorShapeConfig}, graphics::{Color, CursorKind, Modifier, Rect, Style}, input::{KeyEvent, MouseButton, MouseEvent, MouseEventKind}, keyboard::{KeyCode, KeyModifiers}, Document, Editor, Theme, View, }; -use std::{num::NonZeroUsize, path::PathBuf, rc::Rc}; +use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc}; use tui::buffer::Buffer as Surface; @@ -37,9 +37,9 @@ use super::{document::LineDecoration, lsp::SignatureHelp}; pub struct EditorView { pub keymaps: Keymaps, - on_next_key: Option>, + on_next_key: Option, pseudo_pending: Vec, - last_insert: (commands::MappableCommand, Vec), + pub(crate) last_insert: (commands::MappableCommand, Vec), pub(crate) completion: Option, spinners: ProgressSpinners, } @@ -49,6 +49,7 @@ pub enum InsertEvent { Key(KeyEvent), CompletionApply(CompleteAction), TriggerCompletion, + RequestCompletion, } impl Default for EditorView { @@ -206,7 +207,7 @@ impl EditorView { highlights, theme, &mut line_decorations, - &mut *translated_positions, + &mut translated_positions, ); Self::render_rulers(editor, doc, view, inner, surface, theme); @@ -723,12 +724,7 @@ impl EditorView { let viewport = view.area; let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { - let area = Rect::new( - viewport.x, - viewport.y + pos.visual_line as u16, - viewport.width, - 1, - ); + let area = Rect::new(viewport.x, viewport.y + pos.visual_line, viewport.width, 1); if primary_line == pos.doc_line { renderer.surface.set_style(area, primary_style); } else if secondary_lines.binary_search(&pos.doc_line).is_ok() { @@ -825,6 +821,7 @@ impl EditorView { (Mode::Insert, Mode::Normal) => { // if exiting insert mode, remove completion self.completion = None; + cxt.editor.completion_request_handle = None; // TODO: Use an on_mode_change hook to remove signature help cxt.jobs.callback(async { @@ -895,6 +892,8 @@ impl EditorView { for _ in 0..cxt.editor.count.map_or(1, NonZeroUsize::into) { // first execute whatever put us into insert mode self.last_insert.0.execute(cxt); + let mut last_savepoint = None; + let mut last_request_savepoint = None; // then replay the inputs for key in self.last_insert.1.clone() { match key { @@ -902,7 +901,9 @@ impl EditorView { InsertEvent::CompletionApply(compl) => { let (view, doc) = current!(cxt.editor); - doc.restore(view); + if let Some(last_savepoint) = last_savepoint.as_deref() { + doc.restore(view, last_savepoint); + } let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); @@ -919,8 +920,11 @@ impl EditorView { doc.apply(&tx, view.id); } InsertEvent::TriggerCompletion => { - let (_, doc) = current!(cxt.editor); - doc.savepoint(); + last_savepoint = take(&mut last_request_savepoint); + } + InsertEvent::RequestCompletion => { + let (view, doc) = current!(cxt.editor); + last_request_savepoint = Some(doc.savepoint(view)); } } } @@ -945,26 +949,31 @@ impl EditorView { } } + #[allow(clippy::too_many_arguments)] pub fn set_completion( &mut self, editor: &mut Editor, + savepoint: Arc, items: Vec, offset_encoding: helix_lsp::OffsetEncoding, start_offset: usize, trigger_offset: usize, size: Rect, ) { - let mut completion = - Completion::new(editor, items, offset_encoding, start_offset, trigger_offset); + let mut completion = Completion::new( + editor, + savepoint, + items, + offset_encoding, + start_offset, + trigger_offset, + ); if completion.is_empty() { // skip if we got no completion results return; } - // Immediately initialize a savepoint - doc_mut!(editor).savepoint(); - editor.last_completion = None; self.last_insert.1.push(InsertEvent::TriggerCompletion); @@ -977,12 +986,12 @@ impl EditorView { self.completion = None; // Clear any savepoints - let doc = doc_mut!(editor); - doc.savepoint = None; editor.clear_idle_timer(); // don't retrigger } pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { + commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs); + if let Some(completion) = &mut self.completion { return if completion.ensure_item_resolved(cx) { EventResult::Consumed(None) @@ -1007,6 +1016,10 @@ impl EditorView { event: &MouseEvent, cxt: &mut commands::Context, ) -> EventResult { + if event.kind != MouseEventKind::Moved { + cxt.editor.reset_idle_timer(); + } + let config = cxt.editor.config(); let MouseEvent { kind, diff --git a/helix-term/src/ui/fuzzy_match.rs b/helix-term/src/ui/fuzzy_match.rs index e25d7328527d..b406702ff95e 100644 --- a/helix-term/src/ui/fuzzy_match.rs +++ b/helix-term/src/ui/fuzzy_match.rs @@ -4,41 +4,209 @@ use fuzzy_matcher::FuzzyMatcher; #[cfg(test)] mod test; +struct QueryAtom { + kind: QueryAtomKind, + atom: String, + ignore_case: bool, + inverse: bool, +} +impl QueryAtom { + fn new(atom: &str) -> Option { + let mut atom = atom.to_string(); + let inverse = atom.starts_with('!'); + if inverse { + atom.remove(0); + } + + let mut kind = match atom.chars().next() { + Some('^') => QueryAtomKind::Prefix, + Some('\'') => QueryAtomKind::Substring, + _ if inverse => QueryAtomKind::Substring, + _ => QueryAtomKind::Fuzzy, + }; + + if atom.starts_with(['^', '\'']) { + atom.remove(0); + } + + if atom.is_empty() { + return None; + } + + if atom.ends_with('$') && !atom.ends_with("\\$") { + atom.pop(); + kind = if kind == QueryAtomKind::Prefix { + QueryAtomKind::Exact + } else { + QueryAtomKind::Postfix + } + } + + Some(QueryAtom { + kind, + atom: atom.replace('\\', ""), + // not ideal but fuzzy_matches only knows ascii uppercase so more consistent + // to behave the same + ignore_case: kind != QueryAtomKind::Fuzzy + && atom.chars().all(|c| c.is_ascii_lowercase()), + inverse, + }) + } + + fn indices(&self, matcher: &Matcher, item: &str, indices: &mut Vec) -> bool { + // for inverse there are no indicies to return + // just return whether we matched + if self.inverse { + return self.matches(matcher, item); + } + let buf; + let item = if self.ignore_case { + buf = item.to_ascii_lowercase(); + &buf + } else { + item + }; + let off = match self.kind { + QueryAtomKind::Fuzzy => { + if let Some((_, fuzzy_indices)) = matcher.fuzzy_indices(item, &self.atom) { + indices.extend_from_slice(&fuzzy_indices); + return true; + } else { + return false; + } + } + QueryAtomKind::Substring => { + if let Some(off) = item.find(&self.atom) { + off + } else { + return false; + } + } + QueryAtomKind::Prefix if item.starts_with(&self.atom) => 0, + QueryAtomKind::Postfix if item.ends_with(&self.atom) => item.len() - self.atom.len(), + QueryAtomKind::Exact if item == self.atom => 0, + _ => return false, + }; + + indices.extend(off..(off + self.atom.len())); + true + } + + fn matches(&self, matcher: &Matcher, item: &str) -> bool { + let buf; + let item = if self.ignore_case { + buf = item.to_ascii_lowercase(); + &buf + } else { + item + }; + let mut res = match self.kind { + QueryAtomKind::Fuzzy => matcher.fuzzy_match(item, &self.atom).is_some(), + QueryAtomKind::Substring => item.contains(&self.atom), + QueryAtomKind::Prefix => item.starts_with(&self.atom), + QueryAtomKind::Postfix => item.ends_with(&self.atom), + QueryAtomKind::Exact => item == self.atom, + }; + if self.inverse { + res = !res; + } + res + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum QueryAtomKind { + /// Item is a fuzzy match of this behaviour + /// + /// Usage: `foo` + Fuzzy, + /// Item contains query atom as a continous substring + /// + /// Usage `'foo` + Substring, + /// Item starts with query atom + /// + /// Usage: `^foo` + Prefix, + /// Item ends with query atom + /// + /// Usage: `foo$` + Postfix, + /// Item is equal to query atom + /// + /// Usage `^foo$` + Exact, +} + +#[derive(Default)] pub struct FuzzyQuery { - queries: Vec, + first_fuzzy_atom: Option, + query_atoms: Vec, +} + +fn query_atoms(query: &str) -> impl Iterator + '_ { + let mut saw_backslash = false; + query.split(move |c| { + saw_backslash = match c { + ' ' if !saw_backslash => return true, + '\\' => true, + _ => false, + }; + false + }) } impl FuzzyQuery { + pub fn refine(&self, query: &str, old_query: &str) -> (FuzzyQuery, bool) { + // TODO: we could be a lot smarter about this + let new_query = Self::new(query); + let mut is_refinement = query.starts_with(old_query); + + // if the last atom is an inverse atom adding more text to it + // will actually increase the number of matches and we can not refine + // the matches. + if is_refinement && !self.query_atoms.is_empty() { + let last_idx = self.query_atoms.len() - 1; + if self.query_atoms[last_idx].inverse + && self.query_atoms[last_idx].atom != new_query.query_atoms[last_idx].atom + { + is_refinement = false; + } + } + + (new_query, is_refinement) + } + pub fn new(query: &str) -> FuzzyQuery { - let mut saw_backslash = false; - let queries = query - .split(|c| { - saw_backslash = match c { - ' ' if !saw_backslash => return true, - '\\' => true, - _ => false, - }; - false - }) - .filter_map(|query| { - if query.is_empty() { + let mut first_fuzzy_query = None; + let query_atoms = query_atoms(query) + .filter_map(|atom| { + let atom = QueryAtom::new(atom)?; + if atom.kind == QueryAtomKind::Fuzzy && first_fuzzy_query.is_none() { + first_fuzzy_query = Some(atom.atom); None } else { - Some(query.replace("\\ ", " ")) + Some(atom) } }) .collect(); - FuzzyQuery { queries } + FuzzyQuery { + first_fuzzy_atom: first_fuzzy_query, + query_atoms, + } } pub fn fuzzy_match(&self, item: &str, matcher: &Matcher) -> Option { - // use the rank of the first query for the rank, because merging ranks is not really possible + // use the rank of the first fuzzzy query for the rank, because merging ranks is not really possible // this behaviour matches fzf and skim - let score = matcher.fuzzy_match(item, self.queries.get(0)?)?; + let score = self + .first_fuzzy_atom + .as_ref() + .map_or(Some(0), |atom| matcher.fuzzy_match(item, atom))?; if self - .queries + .query_atoms .iter() - .any(|query| matcher.fuzzy_match(item, query).is_none()) + .any(|atom| !atom.matches(matcher, item)) { return None; } @@ -46,29 +214,26 @@ impl FuzzyQuery { } pub fn fuzzy_indicies(&self, item: &str, matcher: &Matcher) -> Option<(i64, Vec)> { - if self.queries.len() == 1 { - return matcher.fuzzy_indices(item, &self.queries[0]); - } - - // use the rank of the first query for the rank, because merging ranks is not really possible - // this behaviour matches fzf and skim - let (score, mut indicies) = matcher.fuzzy_indices(item, self.queries.get(0)?)?; + let (score, mut indices) = self.first_fuzzy_atom.as_ref().map_or_else( + || Some((0, Vec::new())), + |atom| matcher.fuzzy_indices(item, atom), + )?; - // fast path for the common case of not using a space - // during matching this branch should be free thanks to branch prediction - if self.queries.len() == 1 { - return Some((score, indicies)); + // fast path for the common case of just a single atom + if self.query_atoms.is_empty() { + return Some((score, indices)); } - for query in &self.queries[1..] { - let (_, matched_indicies) = matcher.fuzzy_indices(item, query)?; - indicies.extend_from_slice(&matched_indicies); + for atom in &self.query_atoms { + if !atom.indices(matcher, item, &mut indices) { + return None; + } } // deadup and remove duplicate matches - indicies.sort_unstable(); - indicies.dedup(); + indices.sort_unstable(); + indices.dedup(); - Some((score, indicies)) + Some((score, indices)) } } diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 923dd73a16ab..87136992c43b 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -342,13 +342,10 @@ impl Component for Markdown { fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { let padding = 2; - if padding >= viewport.1 || padding >= viewport.0 { - return None; - } let contents = self.parse(None); // TODO: account for tab width - let max_text_width = (viewport.0 - padding).min(120); + let max_text_width = (viewport.0.saturating_sub(padding)).min(120); let (width, height) = crate::ui::text::required_size(&contents, max_text_width); Some((width + padding, height + padding)) diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs index da00aa89f9b1..30625acee60c 100644 --- a/helix-term/src/ui/menu.rs +++ b/helix-term/src/ui/menu.rs @@ -43,6 +43,8 @@ impl Item for PathBuf { } } +pub type MenuCallback = Box, MenuEvent)>; + pub struct Menu { options: Vec, editor_data: T::Data, @@ -55,7 +57,7 @@ pub struct Menu { widths: Vec, - callback_fn: Box, MenuEvent)>, + callback_fn: MenuCallback, scroll: usize, size: (u16, u16), @@ -77,7 +79,7 @@ impl Menu { Self { options, editor_data, - matcher: Box::default(), + matcher: Box::new(Matcher::default().ignore_case()), matches, cursor: None, widths: Vec::new(), @@ -252,12 +254,12 @@ impl Component for Menu { return EventResult::Consumed(close_fn); } // arrow up/ctrl-p/shift-tab prev completion choice (including updating the doc) - shift!(Tab) | key!(Up) | ctrl!('p') | ctrl!('k') => { + shift!(Tab) | key!(Up) | ctrl!('p') => { self.move_up(); (self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update); return EventResult::Consumed(None); } - key!(Tab) | key!(Down) | ctrl!('n') | ctrl!('j') => { + key!(Tab) | key!(Down) | ctrl!('n') => { // arrow down/ctrl-n/tab advances completion choice (including updating the doc) self.move_down(); (self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update); diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 5e7f8c36f458..3e9a14b06307 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -15,6 +15,7 @@ mod statusline; mod text; use crate::compositor::{Component, Compositor}; +use crate::filter_picker_entry; use crate::job::{self, Callback}; pub use completion::Completion; pub use editor::EditorView; @@ -163,6 +164,9 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi let now = Instant::now(); + let dedup_symlinks = config.file_picker.deduplicate_links; + let absolute_root = root.canonicalize().unwrap_or_else(|_| root.clone()); + let mut walk_builder = WalkBuilder::new(&root); walk_builder .hidden(config.file_picker.hidden) @@ -173,10 +177,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi .git_global(config.file_picker.git_global) .git_exclude(config.file_picker.git_exclude) .max_depth(config.file_picker.max_depth) - // We always want to ignore the .git directory, otherwise if - // `ignore` is turned off above, we end up with a lot of noise - // in our picker. - .filter_entry(|entry| entry.file_name() != ".git"); + .filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks)); // We want to exclude files that the editor can't handle yet let mut type_builder = TypesBuilder::new(); @@ -195,14 +196,11 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi // We want files along with their modification date for sorting let files = walk_builder.build().filter_map(|entry| { let entry = entry.ok()?; - // This is faster than entry.path().is_dir() since it uses cached fs::Metadata fetched by ignore/walkdir - let is_dir = entry.file_type().map_or(false, |ft| ft.is_dir()); - if is_dir { - // Will give a false positive if metadata cannot be read (eg. permission error) - None - } else { + if entry.file_type()?.is_file() { Some(entry.into_path()) + } else { + None } }); @@ -282,10 +280,10 @@ pub mod completers { } pub fn theme(_editor: &Editor, input: &str) -> Vec { - let mut names = theme::Loader::read_names(&helix_loader::runtime_dir().join("themes")); - names.extend(theme::Loader::read_names( - &helix_loader::config_dir().join("themes"), - )); + let mut names = theme::Loader::read_names(&helix_loader::config_dir().join("themes")); + for rt_dir in helix_loader::runtime_dirs() { + names.extend(theme::Loader::read_names(&rt_dir.join("themes"))); + } names.push("default".into()); names.push("base16_default".into()); names.sort(); diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 5fa75136f104..bc2f98ee6cac 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -70,6 +70,8 @@ impl From for PathOrId { } } +type FileCallback = Box Option>; + /// File path and range of lines (used to align and highlight lines) pub type FileLocation = (PathOrId, Option<(usize, usize)>); @@ -80,7 +82,7 @@ pub struct FilePicker { preview_cache: HashMap, read_buffer: Vec, /// Given an item in the picker, return the file path and line number to display. - file_fn: Box Option>, + file_fn: FileCallback, } pub enum CachedPreview { @@ -223,6 +225,9 @@ impl FilePicker { let loader = cx.editor.syn_loader.clone(); doc.detect_language(loader); } + + // QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now + // but it could be interesting in the future } EventResult::Consumed(None) @@ -337,6 +342,7 @@ impl Component for FilePicker { inner, doc, offset, + // TODO: compute text annotations asynchronously here (like inlay hints) &TextAnnotations::default(), highlights, &cx.editor.theme, @@ -394,6 +400,8 @@ impl Ord for PickerMatch { } } +type PickerCallback = Box; + pub struct Picker { options: Vec, editor_data: T::Data, @@ -407,7 +415,7 @@ pub struct Picker { cursor: usize, // pattern: String, prompt: Prompt, - previous_pattern: String, + previous_pattern: (String, FuzzyQuery), /// Whether to truncate the start (default true) pub truncate_start: bool, /// Whether to show the preview panel (default true) @@ -415,7 +423,7 @@ pub struct Picker { /// Constraints for tabular formatting widths: Vec, - callback_fn: Box, + callback_fn: PickerCallback, } impl Picker { @@ -431,26 +439,6 @@ impl Picker { |_editor: &mut Context, _pattern: &str, _event: PromptEvent| {}, ); - let n = options - .first() - .map(|option| option.format(&editor_data).cells.len()) - .unwrap_or_default(); - let max_lens = options.iter().fold(vec![0; n], |mut acc, option| { - let row = option.format(&editor_data); - // maintain max for each column - for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) { - let width = cell.content.width(); - if width > *acc { - *acc = width; - } - } - acc - }); - let widths = max_lens - .into_iter() - .map(|len| Constraint::Length(len as u16)) - .collect(); - let mut picker = Self { options, editor_data, @@ -458,15 +446,17 @@ impl Picker { matches: Vec::new(), cursor: 0, prompt, - previous_pattern: String::new(), + previous_pattern: (String::new(), FuzzyQuery::default()), truncate_start: true, show_preview: true, callback_fn: Box::new(callback_fn), completion_height: 0, - widths, + widths: Vec::new(), }; - // scoring on empty input: + picker.calculate_column_widths(); + + // scoring on empty input // TODO: just reuse score() picker .matches @@ -482,13 +472,50 @@ impl Picker { picker } + pub fn set_options(&mut self, new_options: Vec) { + self.options = new_options; + self.cursor = 0; + self.force_score(); + self.calculate_column_widths(); + } + + /// Calculate the width constraints using the maximum widths of each column + /// for the current options. + fn calculate_column_widths(&mut self) { + let n = self + .options + .first() + .map(|option| option.format(&self.editor_data).cells.len()) + .unwrap_or_default(); + let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| { + let row = option.format(&self.editor_data); + // maintain max for each column + for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) { + let width = cell.content.width(); + if width > *acc { + *acc = width; + } + } + acc + }); + self.widths = max_lens + .into_iter() + .map(|len| Constraint::Length(len as u16)) + .collect(); + } + pub fn score(&mut self) { let pattern = self.prompt.line(); - if pattern == &self.previous_pattern { + if pattern == &self.previous_pattern.0 { return; } + let (query, is_refined) = self + .previous_pattern + .1 + .refine(pattern, &self.previous_pattern.0); + if pattern.is_empty() { // Fast path for no pattern. self.matches.clear(); @@ -501,8 +528,7 @@ impl Picker { len: text.chars().count(), } })); - } else if pattern.starts_with(&self.previous_pattern) { - let query = FuzzyQuery::new(pattern); + } else if is_refined { // optimization: if the pattern is a more specific version of the previous one // then we can score the filtered set. self.matches.retain_mut(|pmatch| { @@ -527,7 +553,8 @@ impl Picker { // reset cursor position self.cursor = 0; let pattern = self.prompt.line(); - self.previous_pattern.clone_from(pattern); + self.previous_pattern.0.clone_from(pattern); + self.previous_pattern.1 = query; } pub fn force_score(&mut self) { @@ -922,9 +949,7 @@ impl Component for DynamicPicker { Some(overlay) => &mut overlay.content.file_picker.picker, None => return, }; - picker.options = new_options; - picker.cursor = 0; - picker.force_score(); + picker.set_options(new_options); editor.reset_idle_timer(); })); anyhow::Ok(callback) diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs index 5fb6745a90e5..35ae8c2a8e0b 100644 --- a/helix-term/src/ui/prompt.rs +++ b/helix-term/src/ui/prompt.rs @@ -14,8 +14,11 @@ use helix_view::{ Editor, }; -pub type Completion = (RangeFrom, Cow<'static, str>); type PromptCharHandler = Box; +pub type Completion = (RangeFrom, Cow<'static, str>); +type CompletionFn = Box Vec>; +type CallbackFn = Box; +pub type DocFn = Box Option>>; pub struct Prompt { prompt: Cow<'static, str>, @@ -25,9 +28,9 @@ pub struct Prompt { selection: Option, history_register: Option, history_pos: Option, - completion_fn: Box Vec>, - callback_fn: Box, - pub doc_fn: Box Option>>, + completion_fn: CompletionFn, + callback_fn: CallbackFn, + pub doc_fn: DocFn, next_char_handler: Option, } @@ -513,7 +516,7 @@ impl Component for Prompt { alt!('d') | alt!(Delete) | ctrl!(Delete) => self.delete_word_forwards(cx.editor), ctrl!('k') => self.kill_to_end_of_line(cx.editor), ctrl!('u') => self.kill_to_start_of_line(cx.editor), - ctrl!('h') | key!(Backspace) => { + ctrl!('h') | key!(Backspace) | shift!(Backspace) => { self.delete_char_backwards(cx.editor); (self.callback_fn)(cx, &self.line, PromptEvent::Update); } diff --git a/helix-term/src/ui/statusline.rs b/helix-term/src/ui/statusline.rs index a25b4540d1f9..887863519319 100644 --- a/helix-term/src/ui/statusline.rs +++ b/helix-term/src/ui/statusline.rs @@ -1,5 +1,6 @@ use helix_core::{coords_at_pos, encoding, Position}; use helix_lsp::lsp::DiagnosticSeverity; +use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::{ document::{Mode, SCRATCH_BUFFER_NAME}, graphics::Rect, @@ -141,6 +142,9 @@ where helix_view::editor::StatusLineElement::Spinner => render_lsp_spinner, helix_view::editor::StatusLineElement::FileBaseName => render_file_base_name, helix_view::editor::StatusLineElement::FileName => render_file_name, + helix_view::editor::StatusLineElement::FileModificationIndicator => { + render_file_modification_indicator + } helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding, helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending, helix_view::editor::StatusLineElement::FileType => render_file_type, @@ -155,6 +159,7 @@ where helix_view::editor::StatusLineElement::TotalLineNumbers => render_total_line_numbers, helix_view::editor::StatusLineElement::Separator => render_separator, helix_view::editor::StatusLineElement::Spacer => render_spacer, + helix_view::editor::StatusLineElement::VersionControl => render_version_control, } } @@ -402,7 +407,7 @@ fn render_file_type(context: &mut RenderContext, write: F) where F: Fn(&mut RenderContext, String, Option