Merge branch 'haskell-support' into add-haskell-grammar

This commit is contained in:
Pseudomata 2024-01-26 10:55:43 -05:00 committed by GitHub
commit 2b9ba46cb6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
146 changed files with 2847 additions and 1749 deletions

View File

@ -1,4 +1,4 @@
[[PR Description]]
Release Notes:

View File

@ -29,7 +29,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"
@ -55,7 +55,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"
@ -88,7 +88,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"

View File

@ -26,7 +26,7 @@ jobs:
run: docker system prune
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: 'recursive'

View File

@ -28,7 +28,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"

View File

@ -21,7 +21,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"
@ -38,7 +38,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"
@ -66,7 +66,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
clean: false
submodules: "recursive"

View File

@ -7,7 +7,7 @@ jobs:
update_top_ranking_issues:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.10.5"

View File

@ -11,6 +11,8 @@
Antonio Scandurra <me@as-cii.com>
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
Conrad Irwin <conrad@zed.dev>
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
Joseph T. Lyons <JosephTLyons@gmail.com>
Joseph T. Lyons <JosephTLyons@gmail.com> <JosephTLyons@users.noreply.github.com>
Julia <floc@unpromptedtirade.com>
@ -37,3 +39,6 @@ Nathan Sobo <nathan@zed.dev> <nathan@warp.dev>
Nathan Sobo <nathan@zed.dev> <nathansobo@gmail.com>
Piotr Osiewicz <piotr@zed.dev>
Piotr Osiewicz <piotr@zed.dev> <24362066+osiewicz@users.noreply.github.com>
Thorsten Ball <thorsten@zed.dev>
Thorsten Ball <thorsten@zed.dev> <me@thorstenball.com>
Thorsten Ball <thorsten@zed.dev> <mrnugget@gmail.com>

View File

@ -37,8 +37,9 @@ We plan to set aside time each week to pair program with contributors on promisi
- Add test coverage and documentation
- Choose tasks that align with our roadmap
- Pair with us and watch us code to learn the codebase
- Low effort PRs, such as those that just re-arrange syntax, won't be merged without a compelling justification
## Bird-eye's view of Zed
## Bird's-eye view of Zed
Zed is made up of several smaller crates - let's go over those you're most likely to interact with:

385
Cargo.lock generated
View File

@ -101,50 +101,25 @@ dependencies = [
"util",
]
[[package]]
name = "alacritty_config"
version = "0.1.2-dev"
source = "git+https://github.com/zed-industries/alacritty?rev=33306142195b354ef3485ca2b1d8a85dfc6605ca#33306142195b354ef3485ca2b1d8a85dfc6605ca"
dependencies = [
"log",
"serde",
"toml 0.7.8",
]
[[package]]
name = "alacritty_config_derive"
version = "0.2.2-dev"
source = "git+https://github.com/zed-industries/alacritty?rev=33306142195b354ef3485ca2b1d8a85dfc6605ca#33306142195b354ef3485ca2b1d8a85dfc6605ca"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.37",
]
[[package]]
name = "alacritty_terminal"
version = "0.20.0-dev"
source = "git+https://github.com/zed-industries/alacritty?rev=33306142195b354ef3485ca2b1d8a85dfc6605ca#33306142195b354ef3485ca2b1d8a85dfc6605ca"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35229555d7cc7e83392dfc27c96bec560b1076d756184893296cd60125f4a264"
dependencies = [
"alacritty_config",
"alacritty_config_derive",
"base64 0.13.1",
"base64 0.21.4",
"bitflags 2.4.1",
"home",
"libc",
"log",
"mio 0.6.23",
"mio-anonymous-pipes",
"mio-extras",
"miow 0.3.7",
"nix 0.26.4",
"miow 0.6.0",
"parking_lot 0.12.1",
"regex-automata 0.1.10",
"piper",
"polling 3.3.2",
"regex-automata 0.4.5",
"rustix-openpty",
"serde",
"serde_yaml",
"signal-hook",
"signal-hook-mio",
"toml 0.7.8",
"unicode-width",
"vte",
"windows-sys 0.48.0",
@ -444,7 +419,7 @@ dependencies = [
"futures-lite",
"log",
"parking",
"polling",
"polling 2.8.0",
"rustix 0.37.23",
"slab",
"socket2 0.4.9",
@ -1155,7 +1130,7 @@ dependencies = [
"serde_json",
"syn 1.0.109",
"tempfile",
"toml 0.5.11",
"toml",
]
[[package]]
@ -1423,36 +1398,37 @@ dependencies = [
[[package]]
name = "cocoa"
version = "0.24.0"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6140449f97a6e97f9511815c5632d84c8aacf8ac271ad77c559218161a1373c"
dependencies = [
"bitflags 1.3.2",
"block",
"cocoa-foundation",
"core-foundation",
"core-graphics",
"foreign-types",
"core-graphics 0.23.1",
"foreign-types 0.5.0",
"libc",
"objc",
]
[[package]]
name = "cocoa-foundation"
version = "0.1.1"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c6234cbb2e4c785b456c0644748b1ac416dd045799740356f8363dfe00c93f7"
dependencies = [
"bitflags 1.3.2",
"block",
"core-foundation",
"core-graphics-types",
"foreign-types",
"libc",
"objc",
]
[[package]]
name = "collab"
version = "0.40.1"
version = "0.41.0"
dependencies = [
"anyhow",
"async-trait",
@ -1514,7 +1490,7 @@ dependencies = [
"time",
"tokio",
"tokio-tungstenite",
"toml 0.5.11",
"toml",
"tonic",
"tower",
"tracing",
@ -1586,22 +1562,9 @@ dependencies = [
name = "color"
version = "0.1.0"
dependencies = [
"anyhow",
"fs",
"indexmap 1.9.3",
"itertools 0.11.0",
"palette",
"parking_lot 0.11.2",
"refineable",
"schemars",
"serde",
"serde_derive",
"serde_json",
"settings",
"story",
"toml 0.5.11",
"util",
"uuid 1.4.1",
]
[[package]]
@ -1750,10 +1713,11 @@ dependencies = [
[[package]]
name = "core-foundation"
version = "0.9.3"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"libc",
"uuid 0.5.1",
]
@ -1766,29 +1730,44 @@ checksum = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b"
[[package]]
name = "core-foundation-sys"
version = "0.8.3"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
[[package]]
name = "core-graphics"
version = "0.22.3"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"core-graphics-types",
"foreign-types",
"foreign-types 0.3.2",
"libc",
]
[[package]]
name = "core-graphics"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "970a29baf4110c26fedbc7f82107d42c23f7e88e404c4577ed73fe99ff85a212"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"core-graphics-types",
"foreign-types 0.5.0",
"libc",
]
[[package]]
name = "core-graphics-types"
version = "0.1.1"
source = "git+https://github.com/servo/core-foundation-rs?rev=079665882507dd5e2ff77db3de5070c1f6c0fb85#079665882507dd5e2ff77db3de5070c1f6c0fb85"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"foreign-types",
"libc",
]
@ -1808,8 +1787,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d74ada66e07c1cefa18f8abfba765b486f250de2e4a999e5727fc0dd4b4a25"
dependencies = [
"core-foundation",
"core-graphics",
"foreign-types",
"core-graphics 0.22.3",
"foreign-types 0.3.2",
"libc",
]
@ -1840,7 +1819,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d959d90e938c5493000514b446987c07aed46c668faaa7d34d6c7a67b1a578c"
dependencies = [
"alsa",
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"coreaudio-rs",
"dasp_sample",
"jni 0.19.0",
@ -2014,6 +1993,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "cursor-icon"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96a6ac251f4a2aca6b3f91340350eab87ae57c3f127ffeb585e92bd336717991"
[[package]]
name = "dashmap"
version = "5.5.3"
@ -2639,7 +2624,7 @@ dependencies = [
"bitflags 1.3.2",
"byteorder",
"core-foundation",
"core-graphics",
"core-graphics 0.22.3",
"core-text",
"dirs-next",
"dwrote",
@ -2672,7 +2657,28 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
"foreign-types-shared 0.1.1",
]
[[package]]
name = "foreign-types"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965"
dependencies = [
"foreign-types-macros",
"foreign-types-shared 0.3.1",
]
[[package]]
name = "foreign-types-macros"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.37",
]
[[package]]
@ -2681,6 +2687,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "foreign-types-shared"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
[[package]]
name = "form_urlencoded"
version = "1.2.0"
@ -3063,7 +3075,7 @@ dependencies = [
"cocoa",
"collections",
"core-foundation",
"core-graphics",
"core-graphics 0.22.3",
"core-text",
"ctor",
"derive_more",
@ -3071,7 +3083,7 @@ dependencies = [
"env_logger",
"etagere",
"font-kit",
"foreign-types",
"foreign-types 0.3.2",
"futures 0.3.28",
"gpui_macros",
"image",
@ -3397,7 +3409,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613"
dependencies = [
"android_system_properties",
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
@ -3580,7 +3592,7 @@ dependencies = [
"log",
"mime",
"once_cell",
"polling",
"polling 2.8.0",
"slab",
"sluice",
"tracing",
@ -3920,12 +3932,6 @@ dependencies = [
"safemem",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linkme"
version = "0.3.17"
@ -3981,8 +3987,8 @@ dependencies = [
"cocoa",
"collections",
"core-foundation",
"core-graphics",
"foreign-types",
"core-graphics 0.22.3",
"foreign-types 0.3.2",
"futures 0.3.28",
"gpui",
"hmac 0.12.1",
@ -4142,7 +4148,7 @@ dependencies = [
"block",
"bytes 1.5.0",
"core-foundation",
"foreign-types",
"foreign-types 0.3.2",
"metal",
"objc",
]
@ -4197,7 +4203,7 @@ dependencies = [
"bitflags 1.3.2",
"block",
"cocoa-foundation",
"foreign-types",
"foreign-types 0.3.2",
"log",
"objc",
]
@ -4282,19 +4288,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "mio-anonymous-pipes"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bc513025fe5005a3aa561b50fdb2cda5a150b84800ae02acd8aa9ed62ca1a6b"
dependencies = [
"mio 0.6.23",
"miow 0.3.7",
"parking_lot 0.11.2",
"spsc-buffer",
"winapi 0.3.9",
]
[[package]]
name = "mio-extras"
version = "2.0.6"
@ -4307,17 +4300,6 @@ dependencies = [
"slab",
]
[[package]]
name = "mio-uds"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
dependencies = [
"iovec",
"libc",
"mio 0.6.23",
]
[[package]]
name = "miow"
version = "0.2.2"
@ -4332,11 +4314,11 @@ dependencies = [
[[package]]
name = "miow"
version = "0.3.7"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21"
checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
"winapi 0.3.9",
"windows-sys 0.48.0",
]
[[package]]
@ -4496,17 +4478,6 @@ dependencies = [
"libc",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if 1.0.0",
"libc",
]
[[package]]
name = "nix"
version = "0.27.1"
@ -4865,7 +4836,7 @@ checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
dependencies = [
"bitflags 2.4.1",
"cfg-if 1.0.0",
"foreign-types",
"foreign-types 0.3.2",
"libc",
"once_cell",
"openssl-macros",
@ -5277,6 +5248,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "piper"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4"
dependencies = [
"atomic-waker",
"fastrand 2.0.0",
"futures-io",
]
[[package]]
name = "pkcs1"
version = "0.7.5"
@ -5368,6 +5350,20 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "polling"
version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41"
dependencies = [
"cfg-if 1.0.0",
"concurrent-queue",
"pin-project-lite 0.2.13",
"rustix 0.38.30",
"tracing",
"windows-sys 0.52.0",
]
[[package]]
name = "pollster"
version = "0.2.5"
@ -5444,7 +5440,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785"
dependencies = [
"toml 0.5.11",
"toml",
]
[[package]]
@ -5551,7 +5547,7 @@ dependencies = [
"terminal",
"text",
"thiserror",
"toml 0.5.11",
"toml",
"unindent",
"util",
]
@ -5994,6 +5990,17 @@ dependencies = [
"regex-syntax 0.7.5",
]
[[package]]
name = "regex-automata"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.2",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
@ -6006,6 +6013,12 @@ version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
[[package]]
name = "regex-syntax"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rend"
version = "0.4.0"
@ -6374,11 +6387,23 @@ checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca"
dependencies = [
"bitflags 2.4.1",
"errno",
"itoa",
"libc",
"linux-raw-sys 0.4.12",
"windows-sys 0.52.0",
]
[[package]]
name = "rustix-openpty"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a25c3aad9fc1424eb82c88087789a7d938e1829724f3e4043163baf0d13cfc12"
dependencies = [
"errno",
"libc",
"rustix 0.38.30",
]
[[package]]
name = "rustls"
version = "0.19.1"
@ -6694,7 +6719,7 @@ checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"libc",
"security-framework-sys",
]
@ -6705,7 +6730,7 @@ version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"
dependencies = [
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"libc",
]
@ -6847,15 +6872,6 @@ dependencies = [
"syn 2.0.37",
]
[[package]]
name = "serde_spanned"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186"
dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -6868,18 +6884,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_yaml"
version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b"
dependencies = [
"indexmap 1.9.3",
"ryu",
"serde",
"yaml-rust",
]
[[package]]
name = "settings"
version = "0.1.0"
@ -6901,7 +6905,7 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"smallvec",
"toml 0.5.11",
"toml",
"tree-sitter",
"tree-sitter-json 0.19.0",
"unindent",
@ -7007,18 +7011,6 @@ dependencies = [
"signal-hook-registry",
]
[[package]]
name = "signal-hook-mio"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af"
dependencies = [
"libc",
"mio 0.6.23",
"mio-uds",
"signal-hook",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
@ -7209,12 +7201,6 @@ dependencies = [
"der",
]
[[package]]
name = "spsc-buffer"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be6c3f39c37a4283ee4b43d1311c828f2e1fb0541e76ea0cb1a2abd9ef2f5b3b"
[[package]]
name = "sqlez"
version = "0.1.0"
@ -7712,7 +7698,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a18d114d420ada3a891e6bc8e96a2023402203296a47cdd65083377dad18ba5"
dependencies = [
"cfg-if 1.0.0",
"core-foundation-sys 0.8.3",
"core-foundation-sys 0.8.6",
"libc",
"ntapi 0.4.1",
"once_cell",
@ -7878,7 +7864,7 @@ dependencies = [
"serde_json",
"settings",
"story",
"toml 0.5.11",
"toml",
"util",
"uuid 1.4.1",
]
@ -8183,26 +8169,11 @@ dependencies = [
"serde",
]
[[package]]
name = "toml"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
@ -8211,8 +8182,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap 2.0.0",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
@ -8459,6 +8428,15 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-gleam"
version = "0.34.0"
source = "git+https://github.com/gleam-lang/tree-sitter-gleam?rev=58b7cac8fc14c92b0677c542610d8738c373fa81#58b7cac8fc14c92b0677c542610d8738c373fa81"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-glsl"
version = "0.1.4"
@ -8672,6 +8650,15 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-zig"
version = "0.0.1"
source = "git+https://github.com/maxxnino/tree-sitter-zig?rev=0d08703e4c3f426ec61695d7617415fff97029bd#0d08703e4c3f426ec61695d7617415fff97029bd"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "try-lock"
version = "0.2.4"
@ -9043,10 +9030,12 @@ dependencies = [
[[package]]
name = "vte"
version = "0.11.1"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
checksum = "40eb22ae96f050e0c0d6f7ce43feeae26c348fc4dea56928ca81537cfaa6188b"
dependencies = [
"bitflags 2.4.1",
"cursor-icon",
"log",
"serde",
"utf8parse",
@ -9620,15 +9609,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "yansi"
version = "0.5.1"
@ -9671,7 +9651,6 @@ dependencies = [
"client",
"collab_ui",
"collections",
"color",
"command_palette",
"copilot",
"copilot_ui",
@ -9738,7 +9717,7 @@ dependencies = [
"theme_selector",
"thiserror",
"tiny_http",
"toml 0.5.11",
"toml",
"tree-sitter",
"tree-sitter-bash",
"tree-sitter-c",
@ -9747,6 +9726,7 @@ dependencies = [
"tree-sitter-elixir",
"tree-sitter-elm",
"tree-sitter-embedded-template",
"tree-sitter-gleam",
"tree-sitter-glsl",
"tree-sitter-go",
"tree-sitter-haskell",
@ -9769,6 +9749,7 @@ dependencies = [
"tree-sitter-uiua",
"tree-sitter-vue",
"tree-sitter-yaml",
"tree-sitter-zig",
"unindent",
"url",
"urlencoding",

View File

@ -140,6 +140,7 @@ tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir"
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
tree-sitter-gleam = { git = "https://github.com/gleam-lang/tree-sitter-gleam", rev = "58b7cac8fc14c92b0677c542610d8738c373fa81" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
@ -161,18 +162,12 @@ tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", re
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "26bbaecda0039df4067861ab38ea8ea169f7f5aa"}
tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42"}
tree-sitter-uiua = {git = "https://github.com/shnarazk/tree-sitter-uiua", rev = "9260f11be5900beda4ee6d1a24ab8ddfaf5a19b2"}
tree-sitter-zig = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "31c40449749c4263a91a43593831b82229049a4c" }
# wasmtime = { git = "https://github.com/bytecodealliance/wasmtime", rev = "v16.0.0" }
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457
cocoa = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
cocoa-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
core-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
core-foundation-sys = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
core-graphics = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
[profile.dev]
split-debuginfo = "unpacked"
debug = "limited"

View File

@ -447,6 +447,10 @@
//
"lsp": "elixir_ls"
},
// Settings specific to our deno integration
"deno": {
"enable": false
},
// Different settings for specific languages.
"languages": {
"Plain Text": {

View File

@ -2959,6 +2959,7 @@ impl InlineAssistant {
cx.prompt(
PromptLevel::Info,
prompt_text.as_str(),
None,
&["Continue", "Cancel"],
)
})?;

View File

@ -130,7 +130,8 @@ pub fn check(_: &Check, cx: &mut WindowContext) {
} else {
drop(cx.prompt(
gpui::PromptLevel::Info,
"Auto-updates disabled for non-bundled app.",
"Could not check for updates",
Some("Auto-updates disabled for non-bundled app."),
&["Ok"],
));
}

View File

@ -689,12 +689,7 @@ impl Client {
Ok(())
}
Err(error) => {
client.respond_with_error(
receipt,
proto::Error {
message: format!("{:?}", error),
},
)?;
client.respond_with_error(receipt, error.to_proto())?;
Err(error)
}
}

View File

@ -287,7 +287,7 @@ impl UserStore {
load_users.await?;
// Users are fetched in parallel above and cached in call to get_users
// No need to paralellize here
// No need to parallelize here
let mut updated_contacts = Vec::new();
let this = this
.upgrade()

View File

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
version = "0.40.1"
version = "0.41.0"
publish = false
license = "AGPL-3.0-only"

View File

@ -169,6 +169,30 @@ impl Database {
self.run(body).await
}
pub async fn weak_transaction<F, Fut, T>(&self, f: F) -> Result<T>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
let (tx, result) = self.with_weak_transaction(&f).await?;
match result {
Ok(result) => match tx.commit().await.map_err(Into::into) {
Ok(()) => return Ok(result),
Err(error) => {
return Err(error);
}
},
Err(error) => {
tx.rollback().await?;
return Err(error);
}
}
};
self.run(body).await
}
/// The same as room_transaction, but if you need to only optionally return a Room.
async fn optional_room_transaction<F, Fut, T>(&self, f: F) -> Result<Option<RoomGuard<T>>>
where
@ -284,6 +308,30 @@ impl Database {
Ok((tx, result))
}
async fn with_weak_transaction<F, Fut, T>(
&self,
f: &F,
) -> Result<(DatabaseTransaction, Result<T>)>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let tx = self
.pool
.begin_with_config(Some(IsolationLevel::ReadCommitted), None)
.await?;
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
return Err(anyhow!(
"couldn't complete transaction because it's still in use"
))?;
};
Ok((tx, result))
}
async fn run<F, T>(&self, future: F) -> Result<T>
where
F: Future<Output = Result<T>>,
@ -303,13 +351,14 @@ impl Database {
}
}
async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: u32) -> bool {
async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: usize) -> bool {
// If the error is due to a failure to serialize concurrent transactions, then retry
// this transaction after a delay. With each subsequent retry, double the delay duration.
// Also vary the delay randomly in order to ensure different database connections retry
// at different times.
if is_serialization_error(error) {
let base_delay = 4_u64 << prev_attempt_count.min(16);
const SLEEPS: [f32; 10] = [10., 20., 40., 80., 160., 320., 640., 1280., 2560., 5120.];
if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() {
let base_delay = SLEEPS[prev_attempt_count];
let randomized_delay = base_delay as f32 * self.rng.lock().await.gen_range(0.5..=2.0);
log::info!(
"retrying transaction after serialization error. delay: {} ms.",
@ -456,9 +505,8 @@ pub struct NewUserResult {
/// The result of moving a channel.
#[derive(Debug)]
pub struct MoveChannelResult {
pub participants_to_update: HashMap<UserId, ChannelsForUser>,
pub participants_to_remove: HashSet<UserId>,
pub moved_channels: HashSet<ChannelId>,
pub previous_participants: Vec<ChannelMember>,
pub descendent_ids: Vec<ChannelId>,
}
/// The result of renaming a channel.

View File

@ -1,5 +1,5 @@
use super::*;
use rpc::proto::channel_member::Kind;
use rpc::{proto::channel_member::Kind, ErrorCode, ErrorCodeExt};
use sea_orm::TryGetableMany;
impl Database {
@ -19,11 +19,7 @@ impl Database {
#[cfg(test)]
pub async fn create_root_channel(&self, name: &str, creator_id: UserId) -> Result<ChannelId> {
Ok(self
.create_channel(name, None, creator_id)
.await?
.channel
.id)
Ok(self.create_channel(name, None, creator_id).await?.id)
}
#[cfg(test)]
@ -36,7 +32,6 @@ impl Database {
Ok(self
.create_channel(name, Some(parent), creator_id)
.await?
.channel
.id)
}
@ -46,7 +41,7 @@ impl Database {
name: &str,
parent_channel_id: Option<ChannelId>,
admin_id: UserId,
) -> Result<CreateChannelResult> {
) -> Result<Channel> {
let name = Self::sanitize_channel_name(name)?;
self.transaction(move |tx| async move {
let mut parent = None;
@ -72,14 +67,7 @@ impl Database {
.insert(&*tx)
.await?;
let participants_to_update;
if let Some(parent) = &parent {
participants_to_update = self
.participants_to_notify_for_channel_change(parent, &*tx)
.await?;
} else {
participants_to_update = vec![];
if parent.is_none() {
channel_member::ActiveModel {
id: ActiveValue::NotSet,
channel_id: ActiveValue::Set(channel.id),
@ -89,12 +77,9 @@ impl Database {
}
.insert(&*tx)
.await?;
};
}
Ok(CreateChannelResult {
channel: Channel::from_model(channel, ChannelRole::Admin),
participants_to_update,
})
Ok(Channel::from_model(channel, ChannelRole::Admin))
})
.await
}
@ -166,7 +151,7 @@ impl Database {
}
if role.is_none() || role == Some(ChannelRole::Banned) {
Err(anyhow!("not allowed"))?
Err(ErrorCode::Forbidden.anyhow())?
}
let role = role.unwrap();
@ -718,6 +703,19 @@ impl Database {
})
}
pub async fn new_participants_to_notify(
&self,
parent_channel_id: ChannelId,
) -> Result<Vec<(UserId, ChannelsForUser)>> {
self.weak_transaction(|tx| async move {
let parent_channel = self.get_channel_internal(parent_channel_id, &*tx).await?;
self.participants_to_notify_for_channel_change(&parent_channel, &*tx)
.await
})
.await
}
// TODO: this is very expensive, and we should rethink
async fn participants_to_notify_for_channel_change(
&self,
new_parent: &channel::Model,
@ -1201,7 +1199,7 @@ impl Database {
Ok(channel::Entity::find_by_id(channel_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such channel"))?)
.ok_or_else(|| proto::ErrorCode::NoSuchChannel.anyhow())?)
}
pub(crate) async fn get_or_create_channel_room(
@ -1219,7 +1217,9 @@ impl Database {
let room_id = if let Some(room) = room {
if let Some(env) = room.environment {
if &env != environment {
Err(anyhow!("must join using the {} release", env))?;
Err(ErrorCode::WrongReleaseChannel
.with_tag("required", &env)
.anyhow())?;
}
}
room.id
@ -1285,7 +1285,7 @@ impl Database {
let mut model = channel.into_active_model();
model.parent_path = ActiveValue::Set(new_parent_path);
let channel = model.update(&*tx).await?;
model.update(&*tx).await?;
if new_parent_channel.is_none() {
channel_member::ActiveModel {
@ -1312,34 +1312,9 @@ impl Database {
.all(&*tx)
.await?;
let participants_to_update: HashMap<_, _> = self
.participants_to_notify_for_channel_change(
new_parent_channel.as_ref().unwrap_or(&channel),
&*tx,
)
.await?
.into_iter()
.collect();
let mut moved_channels: HashSet<ChannelId> = HashSet::default();
for id in descendent_ids {
moved_channels.insert(id);
}
moved_channels.insert(channel_id);
let mut participants_to_remove: HashSet<UserId> = HashSet::default();
for participant in previous_participants {
if participant.kind == proto::channel_member::Kind::AncestorMember {
if !participants_to_update.contains_key(&participant.user_id) {
participants_to_remove.insert(participant.user_id);
}
}
}
Ok(Some(MoveChannelResult {
participants_to_remove,
participants_to_update,
moved_channels,
previous_participants,
descendent_ids,
}))
})
.await

View File

@ -15,22 +15,18 @@ test_both_dbs!(
async fn test_channel_message_retrieval(db: &Arc<Database>) {
let user = new_test_user(db, "user@example.com").await;
let result = db.create_channel("channel", None, user).await.unwrap();
let channel = db.create_channel("channel", None, user).await.unwrap();
let owner_id = db.create_server("test").await.unwrap().0 as u32;
db.join_channel_chat(
result.channel.id,
rpc::ConnectionId { owner_id, id: 0 },
user,
)
.await
.unwrap();
db.join_channel_chat(channel.id, rpc::ConnectionId { owner_id, id: 0 }, user)
.await
.unwrap();
let mut all_messages = Vec::new();
for i in 0..10 {
all_messages.push(
db.create_channel_message(
result.channel.id,
channel.id,
user,
&i.to_string(),
&[],
@ -45,7 +41,7 @@ async fn test_channel_message_retrieval(db: &Arc<Database>) {
}
let messages = db
.get_channel_messages(result.channel.id, user, 3, None)
.get_channel_messages(channel.id, user, 3, None)
.await
.unwrap()
.into_iter()
@ -55,7 +51,7 @@ async fn test_channel_message_retrieval(db: &Arc<Database>) {
let messages = db
.get_channel_messages(
result.channel.id,
channel.id,
user,
4,
Some(MessageId::from_proto(all_messages[6])),
@ -100,7 +96,7 @@ async fn test_channel_message_nonces(db: &Arc<Database>) {
.await
.unwrap();
// As user A, create messages that re-use the same nonces. The requests
// As user A, create messages that reuse the same nonces. The requests
// succeed, but return the same ids.
let id1 = db
.create_channel_message(
@ -366,12 +362,7 @@ async fn test_channel_message_mentions(db: &Arc<Database>) {
let user_b = new_test_user(db, "user_b@example.com").await;
let user_c = new_test_user(db, "user_c@example.com").await;
let channel = db
.create_channel("channel", None, user_a)
.await
.unwrap()
.channel
.id;
let channel = db.create_channel("channel", None, user_a).await.unwrap().id;
db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member)
.await
.unwrap();

View File

@ -3,14 +3,13 @@ mod connection_pool;
use crate::{
auth::{self, Impersonator},
db::{
self, BufferId, ChannelId, ChannelRole, ChannelsForUser, CreateChannelResult,
CreatedChannelMessage, Database, InviteMemberResult, MembershipUpdated, MessageId,
MoveChannelResult, NotificationId, ProjectId, RemoveChannelMemberResult,
RenameChannelResult, RespondToChannelInvite, RoomId, ServerId, SetChannelVisibilityResult,
User, UserId,
self, BufferId, ChannelId, ChannelRole, ChannelsForUser, CreatedChannelMessage, Database,
InviteMemberResult, MembershipUpdated, MessageId, NotificationId, ProjectId,
RemoveChannelMemberResult, RenameChannelResult, RespondToChannelInvite, RoomId, ServerId,
SetChannelVisibilityResult, User, UserId,
},
executor::Executor,
AppState, Result,
AppState, Error, Result,
};
use anyhow::anyhow;
use async_tungstenite::tungstenite::{
@ -44,7 +43,7 @@ use rpc::{
self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo,
RequestMessage, ShareProject, UpdateChannelBufferCollaborators,
},
Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
Connection, ConnectionId, ErrorCode, ErrorCodeExt, ErrorExt, Peer, Receipt, TypedEnvelope,
};
use serde::{Serialize, Serializer};
use std::{
@ -543,12 +542,11 @@ impl Server {
}
}
Err(error) => {
peer.respond_with_error(
receipt,
proto::Error {
message: error.to_string(),
},
)?;
let proto_err = match &error {
Error::Internal(err) => err.to_proto(),
_ => ErrorCode::Internal.message(format!("{}", error)).to_proto(),
};
peer.respond_with_error(receipt, proto_err)?;
Err(error)
}
}
@ -2302,10 +2300,7 @@ async fn create_channel(
let db = session.db().await;
let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id));
let CreateChannelResult {
channel,
participants_to_update,
} = db
let channel = db
.create_channel(&request.name, parent_id, session.user_id)
.await?;
@ -2314,6 +2309,13 @@ async fn create_channel(
parent_id: request.parent_id,
})?;
let participants_to_update;
if let Some(parent) = parent_id {
participants_to_update = db.new_participants_to_notify(parent).await?;
} else {
participants_to_update = vec![];
}
let connection_pool = session.connection_pool().await;
for (user_id, channels) in participants_to_update {
let update = build_channels_update(channels, vec![]);
@ -2573,44 +2575,56 @@ async fn move_channel(
.move_channel(channel_id, to, session.user_id)
.await?;
notify_channel_moved(result, session).await?;
if let Some(result) = result {
let participants_to_update: HashMap<_, _> = session
.db()
.await
.new_participants_to_notify(to.unwrap_or(channel_id))
.await?
.into_iter()
.collect();
let mut moved_channels: HashSet<ChannelId> = HashSet::default();
for id in result.descendent_ids {
moved_channels.insert(id);
}
moved_channels.insert(channel_id);
let mut participants_to_remove: HashSet<UserId> = HashSet::default();
for participant in result.previous_participants {
if participant.kind == proto::channel_member::Kind::AncestorMember {
if !participants_to_update.contains_key(&participant.user_id) {
participants_to_remove.insert(participant.user_id);
}
}
}
let moved_channels: Vec<u64> = moved_channels.iter().map(|id| id.to_proto()).collect();
let connection_pool = session.connection_pool().await;
for (user_id, channels) in participants_to_update {
let mut update = build_channels_update(channels, vec![]);
update.delete_channels = moved_channels.clone();
for connection_id in connection_pool.user_connection_ids(user_id) {
session.peer.send(connection_id, update.clone())?;
}
}
for user_id in participants_to_remove {
let update = proto::UpdateChannels {
delete_channels: moved_channels.clone(),
..Default::default()
};
for connection_id in connection_pool.user_connection_ids(user_id) {
session.peer.send(connection_id, update.clone())?;
}
}
}
response.send(Ack {})?;
Ok(())
}
async fn notify_channel_moved(result: Option<MoveChannelResult>, session: Session) -> Result<()> {
let Some(MoveChannelResult {
participants_to_remove,
participants_to_update,
moved_channels,
}) = result
else {
return Ok(());
};
let moved_channels: Vec<u64> = moved_channels.iter().map(|id| id.to_proto()).collect();
let connection_pool = session.connection_pool().await;
for (user_id, channels) in participants_to_update {
let mut update = build_channels_update(channels, vec![]);
update.delete_channels = moved_channels.clone();
for connection_id in connection_pool.user_connection_ids(user_id) {
session.peer.send(connection_id, update.clone())?;
}
}
for user_id in participants_to_remove {
let update = proto::UpdateChannels {
delete_channels: moved_channels.clone(),
..Default::default()
};
for connection_id in connection_pool.user_connection_ids(user_id) {
session.peer.send(connection_id, update.clone())?;
}
}
Ok(())
}
/// Get the list of channel members
async fn get_channel_members(
request: proto::GetChannelMembers,

View File

@ -343,9 +343,11 @@ impl ChatPanel {
this.pt_3().child(
h_flex()
.text_ui_sm()
.child(div().absolute().child(
Avatar::new(message.sender.avatar_uri.clone()).size(cx.rem_size()),
))
.child(
div().absolute().child(
Avatar::new(message.sender.avatar_uri.clone()).size(rems(1.)),
),
)
.child(
div()
.pl(cx.rem_size() + px(6.0))

View File

@ -22,7 +22,10 @@ use gpui::{
};
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
use project::{Fs, Project};
use rpc::proto::{self, PeerId};
use rpc::{
proto::{self, PeerId},
ErrorCode, ErrorExt,
};
use serde_derive::{Deserialize, Serialize};
use settings::Settings;
use smallvec::SmallVec;
@ -35,7 +38,7 @@ use ui::{
use util::{maybe, ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
notifications::{NotifyResultExt, NotifyTaskExt},
notifications::{DetachAndPromptErr, NotifyResultExt, NotifyTaskExt},
Workspace,
};
@ -879,7 +882,7 @@ impl CollabPanel {
.update(cx, |workspace, cx| {
let app_state = workspace.app_state().clone();
workspace::join_remote_project(project_id, host_user_id, app_state, cx)
.detach_and_log_err(cx);
.detach_and_prompt_err("Failed to join project", cx, |_, _| None);
})
.ok();
}))
@ -1017,7 +1020,12 @@ impl CollabPanel {
)
})
})
.detach_and_notify_err(cx)
.detach_and_prompt_err("Failed to grant write access", cx, |e, _| {
match e.error_code() {
ErrorCode::NeedsCla => Some("This user has not yet signed the CLA at https://zed.dev/cla.".into()),
_ => None,
}
})
}),
)
} else if role == proto::ChannelRole::Member {
@ -1038,7 +1046,7 @@ impl CollabPanel {
)
})
})
.detach_and_notify_err(cx)
.detach_and_prompt_err("Failed to revoke write access", cx, |_, _| None)
}),
)
} else {
@ -1258,7 +1266,11 @@ impl CollabPanel {
app_state,
cx,
)
.detach_and_log_err(cx);
.detach_and_prompt_err(
"Failed to join project",
cx,
|_, _| None,
);
}
}
ListEntry::ParticipantScreen { peer_id, .. } => {
@ -1432,7 +1444,7 @@ impl CollabPanel {
fn leave_call(cx: &mut WindowContext) {
ActiveCall::global(cx)
.update(cx, |call, cx| call.hang_up(cx))
.detach_and_log_err(cx);
.detach_and_prompt_err("Failed to hang up", cx, |_, _| None);
}
fn toggle_contact_finder(&mut self, cx: &mut ViewContext<Self>) {
@ -1534,11 +1546,11 @@ impl CollabPanel {
cx: &mut ViewContext<CollabPanel>,
) {
if let Some(clipboard) = self.channel_clipboard.take() {
self.channel_store.update(cx, |channel_store, cx| {
channel_store
.move_channel(clipboard.channel_id, Some(to_channel_id), cx)
.detach_and_log_err(cx)
})
self.channel_store
.update(cx, |channel_store, cx| {
channel_store.move_channel(clipboard.channel_id, Some(to_channel_id), cx)
})
.detach_and_prompt_err("Failed to move channel", cx, |_, _| None)
}
}
@ -1610,7 +1622,12 @@ impl CollabPanel {
"Are you sure you want to remove the channel \"{}\"?",
channel.name
);
let answer = cx.prompt(PromptLevel::Warning, &prompt_message, &["Remove", "Cancel"]);
let answer = cx.prompt(
PromptLevel::Warning,
&prompt_message,
None,
&["Remove", "Cancel"],
);
cx.spawn(|this, mut cx| async move {
if answer.await? == 0 {
channel_store
@ -1631,7 +1648,12 @@ impl CollabPanel {
"Are you sure you want to remove \"{}\" from your contacts?",
github_login
);
let answer = cx.prompt(PromptLevel::Warning, &prompt_message, &["Remove", "Cancel"]);
let answer = cx.prompt(
PromptLevel::Warning,
&prompt_message,
None,
&["Remove", "Cancel"],
);
cx.spawn(|_, mut cx| async move {
if answer.await? == 0 {
user_store
@ -1641,7 +1663,7 @@ impl CollabPanel {
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
.detach_and_prompt_err("Failed to remove contact", cx, |_, _| None);
}
fn respond_to_contact_request(
@ -1654,7 +1676,7 @@ impl CollabPanel {
.update(cx, |store, cx| {
store.respond_to_contact_request(user_id, accept, cx)
})
.detach_and_log_err(cx);
.detach_and_prompt_err("Failed to respond to contact request", cx, |_, _| None);
}
fn respond_to_channel_invite(
@ -1675,7 +1697,7 @@ impl CollabPanel {
.update(cx, |call, cx| {
call.invite(recipient_user_id, Some(self.project.clone()), cx)
})
.detach_and_log_err(cx);
.detach_and_prompt_err("Call failed", cx, |_, _| None);
}
fn join_channel(&self, channel_id: u64, cx: &mut ViewContext<Self>) {
@ -1691,7 +1713,7 @@ impl CollabPanel {
Some(handle),
cx,
)
.detach_and_log_err(cx)
.detach_and_prompt_err("Failed to join channel", cx, |_, _| None)
}
fn join_channel_chat(&mut self, channel_id: ChannelId, cx: &mut ViewContext<Self>) {
@ -1704,7 +1726,7 @@ impl CollabPanel {
panel.update(cx, |panel, cx| {
panel
.select_channel(channel_id, None, cx)
.detach_and_log_err(cx);
.detach_and_notify_err(cx);
});
}
});
@ -1981,7 +2003,7 @@ impl CollabPanel {
.update(cx, |channel_store, cx| {
channel_store.move_channel(dragged_channel.id, None, cx)
})
.detach_and_log_err(cx)
.detach_and_prompt_err("Failed to move channel", cx, |_, _| None)
}))
})
}
@ -2257,7 +2279,7 @@ impl CollabPanel {
.update(cx, |channel_store, cx| {
channel_store.move_channel(dragged_channel.id, Some(channel_id), cx)
})
.detach_and_log_err(cx)
.detach_and_prompt_err("Failed to move channel", cx, |_, _| None)
}))
.child(
ListItem::new(channel_id as usize)

View File

@ -14,7 +14,7 @@ use rpc::proto::channel_member;
use std::sync::Arc;
use ui::{prelude::*, Avatar, Checkbox, ContextMenu, ListItem, ListItemSpacing};
use util::TryFutureExt;
use workspace::{notifications::NotifyTaskExt, ModalView};
use workspace::{notifications::DetachAndPromptErr, ModalView};
actions!(
channel_modal,
@ -498,7 +498,7 @@ impl ChannelModalDelegate {
cx.notify();
})
})
.detach_and_notify_err(cx);
.detach_and_prompt_err("Failed to update role", cx, |_, _| None);
Some(())
}
@ -530,7 +530,7 @@ impl ChannelModalDelegate {
cx.notify();
})
})
.detach_and_notify_err(cx);
.detach_and_prompt_err("Failed to remove member", cx, |_, _| None);
Some(())
}
@ -556,7 +556,7 @@ impl ChannelModalDelegate {
cx.notify();
})
})
.detach_and_notify_err(cx);
.detach_and_prompt_err("Failed to invite member", cx, |_, _| None);
}
fn show_context_menu(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>) {

View File

@ -542,7 +542,9 @@ impl CollabTitlebarItem {
})?
.clone();
Some(Avatar::new(follower.avatar_uri.clone()))
Some(div().mt(-px(4.)).child(
Avatar::new(follower.avatar_uri.clone()).size(rems(0.75)),
))
},
))
.children(if extra_count > 0 {

View File

@ -5,7 +5,6 @@ edition = "2021"
publish = false
license = "GPL-3.0-only"
[features]
default = []
stories = ["dep:itertools", "dep:story"]
@ -15,20 +14,6 @@ path = "src/color.rs"
doctest = true
[dependencies]
# TODO: Clean up dependencies
anyhow.workspace = true
fs = { path = "../fs" }
indexmap = "1.6.2"
parking_lot.workspace = true
refineable.workspace = true
schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
settings = { path = "../settings" }
story = { path = "../story", optional = true }
toml.workspace = true
uuid.workspace = true
util = { path = "../util" }
itertools = { version = "0.11.0", optional = true }
palette = "0.7.3"

View File

@ -59,10 +59,7 @@ pub fn hex_to_hsla(s: &str) -> Result<RGBAColor, String> {
// Expand shorthand formats #RGB and #RGBA to #RRGGBB and #RRGGBBAA
let hex = match hex.len() {
3 => hex
.chars()
.map(|c| c.to_string().repeat(2))
.collect::<String>(),
3 => hex.chars().map(|c| c.to_string().repeat(2)).collect(),
4 => {
let (rgb, alpha) = hex.split_at(3);
let rgb = rgb
@ -80,14 +77,12 @@ pub fn hex_to_hsla(s: &str) -> Result<RGBAColor, String> {
let hex_val =
u32::from_str_radix(&hex, 16).map_err(|_| format!("Invalid hexadecimal string: {}", s))?;
let r = ((hex_val >> 24) & 0xFF) as f32 / 255.0;
let g = ((hex_val >> 16) & 0xFF) as f32 / 255.0;
let b = ((hex_val >> 8) & 0xFF) as f32 / 255.0;
let a = (hex_val & 0xFF) as f32 / 255.0;
let color = RGBAColor { r, g, b, a };
Ok(color)
Ok(RGBAColor {
r: ((hex_val >> 24) & 0xFF) as f32 / 255.0,
g: ((hex_val >> 16) & 0xFF) as f32 / 255.0,
b: ((hex_val >> 8) & 0xFF) as f32 / 255.0,
a: (hex_val & 0xFF) as f32 / 255.0,
})
}
// These derives implement to and from palette's color types.
@ -128,8 +123,7 @@ where
Rgb<S, f32>: FromColorUnclamped<Srgb>,
{
fn from_color_unclamped(color: RGBAColor) -> Self {
let srgb = Srgb::new(color.r, color.g, color.b);
Self::from_color_unclamped(srgb)
Self::from_color_unclamped(Srgb::new(color.r, color.g, color.b))
}
}

View File

@ -88,15 +88,15 @@ impl CopilotCodeVerification {
let connect_button_label = if connect_clicked {
"Waiting for connection..."
} else {
"Connect to Github"
"Connect to GitHub"
};
v_flex()
.flex_1()
.gap_2()
.items_center()
.child(Headline::new("Use Github Copilot in Zed.").size(HeadlineSize::Large))
.child(Headline::new("Use GitHub Copilot in Zed.").size(HeadlineSize::Large))
.child(
Label::new("Using Copilot requires an active subscription on Github.")
Label::new("Using Copilot requires an active subscription on GitHub.")
.color(Color::Muted),
)
.child(Self::render_device_code(data, cx))
@ -139,7 +139,7 @@ impl CopilotCodeVerification {
"You can enable Copilot by connecting your existing license once you have subscribed or renewed your subscription.",
).color(Color::Warning))
.child(
Button::new("copilot-subscribe-button", "Subscribe on Github")
Button::new("copilot-subscribe-button", "Subscribe on GitHub")
.full_width()
.on_click(|_, cx| cx.open_url(COPILOT_SIGN_UP_URL)),
)

View File

@ -13,12 +13,6 @@ pub struct SelectPrevious {
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct SelectAllMatches {
#[serde(default)]
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct SelectToBeginningOfLine {
#[serde(default)]
@ -81,7 +75,6 @@ impl_actions!(
[
SelectNext,
SelectPrevious,
SelectAllMatches,
SelectToBeginningOfLine,
MovePageUp,
MovePageDown,
@ -128,6 +121,7 @@ gpui::actions!(
DeleteToNextWordEnd,
DeleteToPreviousSubwordStart,
DeleteToPreviousWordStart,
DisplayCursorNames,
DuplicateLine,
ExpandMacroRecursively,
FindAllReferences,
@ -185,6 +179,7 @@ gpui::actions!(
ScrollCursorCenter,
ScrollCursorTop,
SelectAll,
SelectAllMatches,
SelectDown,
SelectLargerSyntaxNode,
SelectLeft,
@ -214,6 +209,5 @@ gpui::actions!(
Undo,
UndoSelection,
UnfoldLines,
DisplayCursorNames
]
);

View File

@ -2502,34 +2502,43 @@ impl Editor {
)
});
// Comment extension on newline is allowed only for cursor selections
let comment_delimiter = language.line_comment_prefix().filter(|_| {
let comment_delimiter = language.line_comment_prefixes().filter(|_| {
let is_comment_extension_enabled =
multi_buffer.settings_at(0, cx).extend_comment_on_newline;
is_cursor && is_comment_extension_enabled
});
let comment_delimiter = if let Some(delimiter) = comment_delimiter {
buffer
.buffer_line_for_row(start_point.row)
.is_some_and(|(snapshot, range)| {
let mut index_of_first_non_whitespace = 0;
let line_starts_with_comment = snapshot
.chars_for_range(range)
.skip_while(|c| {
let should_skip = c.is_whitespace();
if should_skip {
index_of_first_non_whitespace += 1;
}
should_skip
})
.take(delimiter.len())
.eq(delimiter.chars());
let cursor_is_placed_after_comment_marker =
index_of_first_non_whitespace + delimiter.len()
<= start_point.column as usize;
line_starts_with_comment
&& cursor_is_placed_after_comment_marker
let get_comment_delimiter = |delimiters: &[Arc<str>]| {
let max_len_of_delimiter =
delimiters.iter().map(|delimiter| delimiter.len()).max()?;
let (snapshot, range) =
buffer.buffer_line_for_row(start_point.row)?;
let mut index_of_first_non_whitespace = 0;
let comment_candidate = snapshot
.chars_for_range(range)
.skip_while(|c| {
let should_skip = c.is_whitespace();
if should_skip {
index_of_first_non_whitespace += 1;
}
should_skip
})
.then(|| delimiter.clone())
.take(max_len_of_delimiter)
.collect::<String>();
let comment_prefix = delimiters.iter().find(|comment_prefix| {
comment_candidate.starts_with(comment_prefix.as_ref())
})?;
let cursor_is_placed_after_comment_marker =
index_of_first_non_whitespace + comment_prefix.len()
<= start_point.column as usize;
if cursor_is_placed_after_comment_marker {
Some(comment_prefix.clone())
} else {
None
}
};
let comment_delimiter = if let Some(delimiters) = comment_delimiter {
get_comment_delimiter(delimiters)
} else {
None
};
@ -6113,6 +6122,7 @@ impl Editor {
|| (!movement::is_inside_word(&display_map, display_range.start)
&& !movement::is_inside_word(&display_map, display_range.end))
{
// TODO: This is n^2, because we might check all the selections
if selections
.iter()
.find(|selection| selection.range().overlaps(&offset_range))
@ -6222,25 +6232,76 @@ impl Editor {
pub fn select_all_matches(
&mut self,
action: &SelectAllMatches,
_action: &SelectAllMatches,
cx: &mut ViewContext<Self>,
) -> Result<()> {
self.push_to_selection_history();
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
loop {
self.select_next_match_internal(&display_map, action.replace_newest, None, cx)?;
self.select_next_match_internal(&display_map, false, None, cx)?;
let Some(select_next_state) = self.select_next_state.as_mut() else {
return Ok(());
};
if select_next_state.done {
return Ok(());
}
if self
.select_next_state
.as_ref()
.map(|selection_state| selection_state.done)
.unwrap_or(true)
let mut new_selections = self.selections.all::<usize>(cx);
let buffer = &display_map.buffer_snapshot;
let query_matches = select_next_state
.query
.stream_find_iter(buffer.bytes_in_range(0..buffer.len()));
for query_match in query_matches {
let query_match = query_match.unwrap(); // can only fail due to I/O
let offset_range = query_match.start()..query_match.end();
let display_range = offset_range.start.to_display_point(&display_map)
..offset_range.end.to_display_point(&display_map);
if !select_next_state.wordwise
|| (!movement::is_inside_word(&display_map, display_range.start)
&& !movement::is_inside_word(&display_map, display_range.end))
{
break;
self.selections.change_with(cx, |selections| {
new_selections.push(Selection {
id: selections.new_selection_id(),
start: offset_range.start,
end: offset_range.end,
reversed: false,
goal: SelectionGoal::None,
});
});
}
}
new_selections.sort_by_key(|selection| selection.start);
let mut ix = 0;
while ix + 1 < new_selections.len() {
let current_selection = &new_selections[ix];
let next_selection = &new_selections[ix + 1];
if current_selection.range().overlaps(&next_selection.range()) {
if current_selection.id < next_selection.id {
new_selections.remove(ix + 1);
} else {
new_selections.remove(ix);
}
} else {
ix += 1;
}
}
select_next_state.done = true;
self.unfold_ranges(
new_selections.iter().map(|selection| selection.range()),
false,
false,
cx,
);
self.change_selections(Some(Autoscroll::fit()), cx, |selections| {
selections.select(new_selections)
});
Ok(())
}
@ -6509,7 +6570,10 @@ impl Editor {
}
// If the language has line comments, toggle those.
if let Some(full_comment_prefix) = language.line_comment_prefix() {
if let Some(full_comment_prefix) = language
.line_comment_prefixes()
.and_then(|prefixes| prefixes.first())
{
// Split the comment prefix's trailing whitespace into a separate string,
// as that portion won't be used for detecting if a line is a comment.
let comment_prefix = full_comment_prefix.trim_end_matches(' ');
@ -6517,7 +6581,7 @@ impl Editor {
let mut all_selection_lines_are_comments = true;
for row in start_row..=end_row {
if snapshot.is_line_blank(row) && start_row < end_row {
if start_row < end_row && snapshot.is_line_blank(row) {
continue;
}

View File

@ -1942,7 +1942,7 @@ async fn test_newline_comments(cx: &mut gpui::TestAppContext) {
let language = Arc::new(Language::new(
LanguageConfig {
line_comment: Some("//".into()),
line_comments: vec!["//".into()],
..LanguageConfig::default()
},
None,
@ -3820,6 +3820,18 @@ async fn test_select_next(cx: &mut gpui::TestAppContext) {
cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
}
#[gpui::test]
async fn test_select_all_matches(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_state("abc\nˇabc abc\ndefabc\nabc");
cx.update_editor(|e, cx| e.select_all_matches(&SelectAllMatches::default(), cx))
.unwrap();
cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
}
#[gpui::test]
async fn test_select_next_with_multiple_carets(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
@ -5724,7 +5736,7 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
let mut cx = EditorTestContext::new(cx).await;
let language = Arc::new(Language::new(
LanguageConfig {
line_comment: Some("// ".into()),
line_comments: vec!["// ".into()],
..Default::default()
},
Some(tree_sitter_rust::language()),
@ -5826,7 +5838,7 @@ async fn test_advance_downward_on_toggle_comment(cx: &mut gpui::TestAppContext)
let language = Arc::new(Language::new(
LanguageConfig {
line_comment: Some("// ".into()),
line_comments: vec!["// ".into()],
..Default::default()
},
Some(tree_sitter_rust::language()),
@ -5981,7 +5993,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
let javascript_language = Arc::new(Language::new(
LanguageConfig {
name: "JavaScript".into(),
line_comment: Some("// ".into()),
line_comments: vec!["// ".into()],
..Default::default()
},
Some(tree_sitter_typescript::language_tsx()),

View File

@ -567,7 +567,7 @@ impl EditorElement {
cx,
);
hover_at(editor, Some(point), cx);
Self::update_visible_cursor(editor, point, cx);
Self::update_visible_cursor(editor, point, position_map, cx);
}
None => {
update_inlay_link_and_hover_points(
@ -592,9 +592,10 @@ impl EditorElement {
fn update_visible_cursor(
editor: &mut Editor,
point: DisplayPoint,
position_map: &PositionMap,
cx: &mut ViewContext<Editor>,
) {
let snapshot = editor.snapshot(cx);
let snapshot = &position_map.snapshot;
let Some(hub) = editor.collaboration_hub() else {
return;
};
@ -3581,7 +3582,7 @@ mod tests {
);
// multi-buffer support
// in DisplayPoint co-ordinates, this is what we're dealing with:
// in DisplayPoint coordinates, this is what we're dealing with:
// 0: [[file
// 1: header]]
// 2: aaaaaa

View File

@ -1971,7 +1971,7 @@ pub mod tests {
assert_eq!(
lsp_request_count.load(Ordering::Relaxed),
3,
"Should query for new hints when they got reenabled"
"Should query for new hints when they got re-enabled"
);
assert_eq!(
vec![
@ -1980,7 +1980,7 @@ pub mod tests {
"type hint".to_string(),
],
cached_hint_labels(editor),
"Should get its cached hints fully repopulated after the hints got reenabled"
"Should get its cached hints fully repopulated after the hints got re-enabled"
);
assert_eq!(
vec!["parameter hint".to_string()],
@ -1990,11 +1990,11 @@ pub mod tests {
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(
inlay_cache.allowed_hint_kinds, final_allowed_hint_kinds,
"Cache should update editor settings when hints got reenabled"
"Cache should update editor settings when hints got re-enabled"
);
assert_eq!(
inlay_cache.version, edits_made,
"Cache should update its version after hints got reenabled"
"Cache should update its version after hints got re-enabled"
);
});
@ -2736,7 +2736,7 @@ pub mod tests {
assert_eq!(expected_hints, cached_hint_labels(editor),
"After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scrolled buffer");
});
editor_edited.store(true, Ordering::Release);
@ -2762,7 +2762,7 @@ pub mod tests {
assert_eq!(
expected_hints,
cached_hint_labels(editor),
"After multibuffer edit, editor gets scolled back to the last selection; \
"After multibuffer edit, editor gets scrolled back to the last selection; \
all hints should be invalidated and required for all of its visible excerpts"
);
assert_eq!(expected_hints, visible_hint_labels(editor, cx));

View File

@ -15,47 +15,53 @@ actions!(
CopySystemSpecsIntoClipboard,
FileBugReport,
RequestFeature,
OpenZedCommunityRepo
OpenZedRepo
]
);
pub fn init(cx: &mut AppContext) {
// TODO: a way to combine these two into one?
cx.observe_new_views(feedback_modal::FeedbackModal::register)
.detach();
const fn zed_repo_url() -> &'static str {
"https://github.com/zed-industries/zed"
}
cx.observe_new_views(|workspace: &mut Workspace, _| {
const fn request_feature_url() -> &'static str {
"https://github.com/zed-industries/zed/issues/new?assignees=&labels=enhancement%2Ctriage&template=0_feature_request.yml"
}
fn file_bug_report_url(specs: &SystemSpecs) -> String {
format!(
"https://github.com/zed-industries/zed/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml&environment={}",
urlencoding::encode(&specs.to_string())
)
}
pub fn init(cx: &mut AppContext) {
cx.observe_new_views(|workspace: &mut Workspace, cx| {
feedback_modal::FeedbackModal::register(workspace, cx);
workspace
.register_action(|_, _: &CopySystemSpecsIntoClipboard, cx| {
let specs = SystemSpecs::new(&cx).to_string();
let specs = SystemSpecs::new(&cx).to_string();
let prompt = cx.prompt(
PromptLevel::Info,
&format!("Copied into clipboard:\n\n{specs}"),
&["OK"],
);
cx.spawn(|_, _cx| async move {
prompt.await.ok();
})
.detach();
let item = ClipboardItem::new(specs.clone());
cx.write_to_clipboard(item);
let prompt = cx.prompt(
PromptLevel::Info,
"Copied into clipboard",
Some(&specs),
&["OK"],
);
cx.spawn(|_, _cx| async move {
prompt.await.ok();
})
.detach();
cx.write_to_clipboard(ClipboardItem::new(specs.clone()));
})
.register_action(|_, _: &RequestFeature, cx| {
let url = "https://github.com/zed-industries/zed/issues/new?assignees=&labels=enhancement%2Ctriage&template=0_feature_request.yml";
cx.open_url(url);
cx.open_url(request_feature_url());
})
.register_action(move |_, _: &FileBugReport, cx| {
let url = format!(
"https://github.com/zed-industries/zed/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml&environment={}",
urlencoding::encode(&SystemSpecs::new(&cx).to_string())
);
cx.open_url(&url);
cx.open_url(&file_bug_report_url(&SystemSpecs::new(&cx)));
})
.register_action(move |_, _: &OpenZedCommunityRepo, cx| {
let url = "https://github.com/zed-industries/zed";
cx.open_url(&url);
});
.register_action(move |_, _: &OpenZedRepo, cx| {
cx.open_url(zed_repo_url());
});
})
.detach();
}

View File

@ -19,7 +19,7 @@ use ui::{prelude::*, Button, ButtonStyle, IconPosition, Tooltip};
use util::ResultExt;
use workspace::{ModalView, Toast, Workspace};
use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedCommunityRepo};
use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedRepo};
// For UI testing purposes
const SEND_SUCCESS_IN_DEV_MODE: bool = true;
@ -97,7 +97,7 @@ impl ModalView for FeedbackModal {
return true;
}
let answer = cx.prompt(PromptLevel::Info, "Discard feedback?", &["Yes", "No"]);
let answer = cx.prompt(PromptLevel::Info, "Discard feedback?", None, &["Yes", "No"]);
cx.spawn(move |this, mut cx| async move {
if answer.await.ok() == Some(0) {
@ -222,6 +222,7 @@ impl FeedbackModal {
let answer = cx.prompt(
PromptLevel::Info,
"Ready to submit your feedback?",
None,
&["Yes, Submit!", "No"],
);
let client = cx.global::<Arc<Client>>().clone();
@ -255,6 +256,7 @@ impl FeedbackModal {
let prompt = cx.prompt(
PromptLevel::Critical,
FEEDBACK_SUBMISSION_ERROR_TEXT,
None,
&["OK"],
);
cx.spawn(|_, _cx| async move {
@ -417,8 +419,7 @@ impl Render for FeedbackModal {
"Submit"
};
let open_community_repo =
cx.listener(|_, _, cx| cx.dispatch_action(Box::new(OpenZedCommunityRepo)));
let open_zed_repo = cx.listener(|_, _, cx| cx.dispatch_action(Box::new(OpenZedRepo)));
v_flex()
.elevation_3(cx)
@ -485,12 +486,12 @@ impl Render for FeedbackModal {
.justify_between()
.gap_1()
.child(
Button::new("community_repository", "Community Repository")
Button::new("zed_repository", "Zed Repository")
.style(ButtonStyle::Transparent)
.icon(IconName::ExternalLink)
.icon_position(IconPosition::End)
.icon_size(IconSize::Small)
.on_click(open_community_repo),
.on_click(open_zed_repo),
)
.child(
h_flex()

View File

@ -11,6 +11,7 @@ use gpui::{
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
use std::{
cmp,
path::{Path, PathBuf},
sync::{
atomic::{self, AtomicBool},
@ -143,16 +144,51 @@ pub struct FileFinderDelegate {
history_items: Vec<FoundPath>,
}
/// Use a custom ordering for file finder: the regular one
/// defines max element with the highest score and the latest alphanumerical path (in case of a tie on other params), e.g:
/// `[{score: 0.5, path = "c/d" }, { score: 0.5, path = "/a/b" }]`
///
/// In the file finder, we would prefer to have the max element with the highest score and the earliest alphanumerical path, e.g:
/// `[{ score: 0.5, path = "/a/b" }, {score: 0.5, path = "c/d" }]`
/// as the files are shown in the project panel lists.
#[derive(Debug, Clone, PartialEq, Eq)]
struct ProjectPanelOrdMatch(PathMatch);
impl Ord for ProjectPanelOrdMatch {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.partial_cmp(other).unwrap()
}
}
impl PartialOrd for ProjectPanelOrdMatch {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(
self.0
.score
.partial_cmp(&other.0.score)
.unwrap_or(cmp::Ordering::Equal)
.then_with(|| self.0.worktree_id.cmp(&other.0.worktree_id))
.then_with(|| {
other
.0
.distance_to_relative_ancestor
.cmp(&self.0.distance_to_relative_ancestor)
})
.then_with(|| self.0.path.cmp(&other.0.path).reverse()),
)
}
}
#[derive(Debug, Default)]
struct Matches {
history: Vec<(FoundPath, Option<PathMatch>)>,
search: Vec<PathMatch>,
history: Vec<(FoundPath, Option<ProjectPanelOrdMatch>)>,
search: Vec<ProjectPanelOrdMatch>,
}
#[derive(Debug)]
enum Match<'a> {
History(&'a FoundPath, Option<&'a PathMatch>),
Search(&'a PathMatch),
History(&'a FoundPath, Option<&'a ProjectPanelOrdMatch>),
Search(&'a ProjectPanelOrdMatch),
}
impl Matches {
@ -176,45 +212,44 @@ impl Matches {
&mut self,
history_items: &Vec<FoundPath>,
query: &PathLikeWithPosition<FileSearchQuery>,
mut new_search_matches: Vec<PathMatch>,
new_search_matches: impl Iterator<Item = ProjectPanelOrdMatch>,
extend_old_matches: bool,
) {
let matching_history_paths = matching_history_item_paths(history_items, query);
new_search_matches
.retain(|path_match| !matching_history_paths.contains_key(&path_match.path));
let history_items_to_show = history_items
.iter()
.filter_map(|history_item| {
Some((
history_item.clone(),
Some(
matching_history_paths
.get(&history_item.project.path)?
.clone(),
),
))
})
.collect::<Vec<_>>();
self.history = history_items_to_show;
let new_search_matches = new_search_matches
.filter(|path_match| !matching_history_paths.contains_key(&path_match.0.path));
let history_items_to_show = history_items.iter().filter_map(|history_item| {
Some((
history_item.clone(),
Some(
matching_history_paths
.get(&history_item.project.path)?
.clone(),
),
))
});
self.history.clear();
util::extend_sorted(
&mut self.history,
history_items_to_show,
100,
|(_, a), (_, b)| b.cmp(a),
);
if extend_old_matches {
self.search
.retain(|path_match| !matching_history_paths.contains_key(&path_match.path));
util::extend_sorted(
&mut self.search,
new_search_matches.into_iter(),
100,
|a, b| b.cmp(a),
)
.retain(|path_match| !matching_history_paths.contains_key(&path_match.0.path));
} else {
self.search = new_search_matches;
self.search.clear();
}
util::extend_sorted(&mut self.search, new_search_matches, 100, |a, b| b.cmp(a));
}
}
fn matching_history_item_paths(
history_items: &Vec<FoundPath>,
query: &PathLikeWithPosition<FileSearchQuery>,
) -> HashMap<Arc<Path>, PathMatch> {
) -> HashMap<Arc<Path>, ProjectPanelOrdMatch> {
let history_items_by_worktrees = history_items
.iter()
.filter_map(|found_path| {
@ -257,7 +292,12 @@ fn matching_history_item_paths(
max_results,
)
.into_iter()
.map(|path_match| (Arc::clone(&path_match.path), path_match)),
.map(|path_match| {
(
Arc::clone(&path_match.path),
ProjectPanelOrdMatch(path_match),
)
}),
);
}
matching_history_paths
@ -383,7 +423,9 @@ impl FileFinderDelegate {
&cancel_flag,
cx.background_executor().clone(),
)
.await;
.await
.into_iter()
.map(ProjectPanelOrdMatch);
let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed);
picker
.update(&mut cx, |picker, cx| {
@ -401,7 +443,7 @@ impl FileFinderDelegate {
search_id: usize,
did_cancel: bool,
query: PathLikeWithPosition<FileSearchQuery>,
matches: Vec<PathMatch>,
matches: impl IntoIterator<Item = ProjectPanelOrdMatch>,
cx: &mut ViewContext<Picker<Self>>,
) {
if search_id >= self.latest_search_id {
@ -412,8 +454,12 @@ impl FileFinderDelegate {
.latest_search_query
.as_ref()
.map(|query| query.path_like.path_query());
self.matches
.push_new_matches(&self.history_items, &query, matches, extend_old_matches);
self.matches.push_new_matches(
&self.history_items,
&query,
matches.into_iter(),
extend_old_matches,
);
self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel;
cx.notify();
@ -471,12 +517,12 @@ impl FileFinderDelegate {
if let Some(found_path_match) = found_path_match {
path_match
.positions
.extend(found_path_match.positions.iter())
.extend(found_path_match.0.positions.iter())
}
self.labels_for_path_match(&path_match)
}
Match::Search(path_match) => self.labels_for_path_match(path_match),
Match::Search(path_match) => self.labels_for_path_match(&path_match.0),
};
if file_name_positions.is_empty() {
@ -556,14 +602,14 @@ impl FileFinderDelegate {
if let Some((worktree, relative_path)) =
project.find_local_worktree(query_path, cx)
{
path_matches.push(PathMatch {
score: 0.0,
path_matches.push(ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: worktree.read(cx).id().to_usize(),
path: Arc::from(relative_path),
path_prefix: "".into(),
distance_to_relative_ancestor: usize::MAX,
});
}));
}
})
.log_err();
@ -724,8 +770,8 @@ impl PickerDelegate for FileFinderDelegate {
Match::Search(m) => split_or_open(
workspace,
ProjectPath {
worktree_id: WorktreeId::from_usize(m.worktree_id),
path: m.path.clone(),
worktree_id: WorktreeId::from_usize(m.0.worktree_id),
path: m.0.path.clone(),
},
cx,
),
@ -805,3 +851,101 @@ impl PickerDelegate for FileFinderDelegate {
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_custom_project_search_ordering_in_file_finder() {
let mut file_finder_sorted_output = vec![
ProjectPanelOrdMatch(PathMatch {
score: 0.5,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("b0.5")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("c1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("a1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 0.5,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("a0.5")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("b1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
];
file_finder_sorted_output.sort_by(|a, b| b.cmp(a));
assert_eq!(
file_finder_sorted_output,
vec![
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("a1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("b1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 1.0,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("c1.0")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 0.5,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("a0.5")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
ProjectPanelOrdMatch(PathMatch {
score: 0.5,
positions: Vec::new(),
worktree_id: 0,
path: Arc::from(Path::new("b0.5")),
path_prefix: Arc::from(""),
distance_to_relative_ancestor: 0,
}),
]
);
}
}

View File

@ -114,7 +114,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) {
.await;
picker.update(cx, |picker, _| {
assert_eq!(
collect_search_results(picker),
collect_search_matches(picker).search_only(),
vec![PathBuf::from("a/b/file2.txt")],
"Matching abs path should be the only match"
)
@ -136,7 +136,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) {
.await;
picker.update(cx, |picker, _| {
assert_eq!(
collect_search_results(picker),
collect_search_matches(picker).search_only(),
Vec::<PathBuf>::new(),
"Mismatching abs path should produce no matches"
)
@ -169,7 +169,7 @@ async fn test_complex_path(cx: &mut TestAppContext) {
picker.update(cx, |picker, _| {
assert_eq!(picker.delegate.matches.len(), 1);
assert_eq!(
collect_search_results(picker),
collect_search_matches(picker).search_only(),
vec![PathBuf::from("其他/S数据表格/task.xlsx")],
)
});
@ -486,7 +486,7 @@ async fn test_single_file_worktrees(cx: &mut TestAppContext) {
assert_eq!(matches.len(), 1);
let (file_name, file_name_positions, full_path, full_path_positions) =
delegate.labels_for_path_match(&matches[0]);
delegate.labels_for_path_match(&matches[0].0);
assert_eq!(file_name, "the-file");
assert_eq!(file_name_positions, &[0, 1, 4]);
assert_eq!(full_path, "the-file");
@ -556,9 +556,9 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) {
delegate.matches.history.is_empty(),
"Search matches expected"
);
let matches = delegate.matches.search.clone();
assert_eq!(matches[0].path.as_ref(), Path::new("dir2/a.txt"));
assert_eq!(matches[1].path.as_ref(), Path::new("dir1/a.txt"));
let matches = &delegate.matches.search;
assert_eq!(matches[0].0.path.as_ref(), Path::new("dir2/a.txt"));
assert_eq!(matches[1].0.path.as_ref(), Path::new("dir1/a.txt"));
});
}
@ -957,7 +957,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
Some(PathBuf::from("/src/test/first.rs"))
));
assert_eq!(delegate.matches.search.len(), 1, "Only one non-history item contains {first_query}, it should be present");
assert_eq!(delegate.matches.search.first().unwrap().path.as_ref(), Path::new("test/fourth.rs"));
assert_eq!(delegate.matches.search.first().unwrap().0.path.as_ref(), Path::new("test/fourth.rs"));
});
let second_query = "fsdasdsa";
@ -1002,10 +1002,65 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
Some(PathBuf::from("/src/test/first.rs"))
));
assert_eq!(delegate.matches.search.len(), 1, "Only one non-history item contains {first_query_again}, it should be present, even after non-matching query");
assert_eq!(delegate.matches.search.first().unwrap().path.as_ref(), Path::new("test/fourth.rs"));
assert_eq!(delegate.matches.search.first().unwrap().0.path.as_ref(), Path::new("test/fourth.rs"));
});
}
#[gpui::test]
async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
.as_fake()
.insert_tree(
"/root",
json!({
"test": {
"1_qw": "// First file that matches the query",
"2_second": "// Second file",
"3_third": "// Third file",
"4_fourth": "// Fourth file",
"5_qwqwqw": "// A file with 3 more matches than the first one",
"6_qwqwqw": "// Same query matches as above, but closer to the end of the list due to the name",
"7_qwqwqw": "// One more, same amount of query matches as above",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx));
// generate some history to select from
open_close_queried_buffer("1", 1, "1_qw", &workspace, cx).await;
open_close_queried_buffer("2", 1, "2_second", &workspace, cx).await;
open_close_queried_buffer("3", 1, "3_third", &workspace, cx).await;
open_close_queried_buffer("2", 1, "2_second", &workspace, cx).await;
open_close_queried_buffer("6", 1, "6_qwqwqw", &workspace, cx).await;
let finder = open_file_picker(&workspace, cx);
let query = "qw";
finder
.update(cx, |finder, cx| {
finder.delegate.update_matches(query.to_string(), cx)
})
.await;
finder.update(cx, |finder, _| {
let search_matches = collect_search_matches(finder);
assert_eq!(
search_matches.history,
vec![PathBuf::from("test/1_qw"), PathBuf::from("test/6_qwqwqw"),],
);
assert_eq!(
search_matches.search,
vec![
PathBuf::from("test/5_qwqwqw"),
PathBuf::from("test/7_qwqwqw"),
],
);
});
}
#[gpui::test]
async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppContext) {
let app_state = init_test(cx);
@ -1048,14 +1103,14 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo
.matches
.search
.iter()
.map(|path_match| path_match.path.to_path_buf())
.map(|path_match| path_match.0.path.to_path_buf())
.collect::<Vec<_>>();
assert_eq!(
search_entries,
vec![
PathBuf::from("collab_ui/collab_ui.rs"),
PathBuf::from("collab_ui/third.rs"),
PathBuf::from("collab_ui/first.rs"),
PathBuf::from("collab_ui/third.rs"),
PathBuf::from("collab_ui/second.rs"),
],
"Despite all search results having the same directory name, the most matching one should be on top"
@ -1097,7 +1152,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
.matches
.history
.iter()
.map(|(_, path_match)| path_match.as_ref().expect("should have a path match").path.to_path_buf())
.map(|(_, path_match)| path_match.as_ref().expect("should have a path match").0.path.to_path_buf())
.collect::<Vec<_>>();
assert_eq!(
history_entries,
@ -1124,7 +1179,8 @@ async fn open_close_queried_buffer(
assert_eq!(
finder.delegate.matches.len(),
expected_matches,
"Unexpected number of matches found for query {input}"
"Unexpected number of matches found for query `{input}`, matches: {:?}",
finder.delegate.matches
);
finder.delegate.history_items.clone()
});
@ -1137,7 +1193,7 @@ async fn open_close_queried_buffer(
let active_editor_title = active_editor.read(cx).title(cx);
assert_eq!(
expected_editor_title, active_editor_title,
"Unexpected editor title for query {input}"
"Unexpected editor title for query `{input}`"
);
});
@ -1210,18 +1266,49 @@ fn active_file_picker(
})
}
fn collect_search_results(picker: &Picker<FileFinderDelegate>) -> Vec<PathBuf> {
let matches = &picker.delegate.matches;
assert!(
matches.history.is_empty(),
"Should have no history matches, but got: {:?}",
matches.history
);
let mut results = matches
.search
.iter()
.map(|path_match| Path::new(path_match.path_prefix.as_ref()).join(&path_match.path))
.collect::<Vec<_>>();
results.sort();
results
#[derive(Debug)]
struct SearchEntries {
history: Vec<PathBuf>,
search: Vec<PathBuf>,
}
impl SearchEntries {
#[track_caller]
fn search_only(self) -> Vec<PathBuf> {
assert!(
self.history.is_empty(),
"Should have no history matches, but got: {:?}",
self.history
);
self.search
}
}
fn collect_search_matches(picker: &Picker<FileFinderDelegate>) -> SearchEntries {
let matches = &picker.delegate.matches;
SearchEntries {
history: matches
.history
.iter()
.map(|(history_path, path_match)| {
path_match
.as_ref()
.map(|path_match| {
Path::new(path_match.0.path_prefix.as_ref()).join(&path_match.0.path)
})
.unwrap_or_else(|| {
history_path
.absolute
.as_deref()
.unwrap_or_else(|| &history_path.project.path)
.to_path_buf()
})
})
.collect(),
search: matches
.search
.iter()
.map(|path_match| Path::new(path_match.0.path_prefix.as_ref()).join(&path_match.0.path))
.collect(),
}
}

View File

@ -76,7 +76,7 @@ cbindgen = "0.26.0"
media = { path = "../media" }
anyhow.workspace = true
block = "0.1"
cocoa = "0.24"
cocoa = "0.25"
core-foundation = { version = "0.9.3", features = ["with-uuid"] }
core-graphics = "0.22.3"
core-text = "19.2"

View File

@ -0,0 +1,29 @@
use gpui::*;
struct HelloWorld {
text: SharedString,
}
impl Render for HelloWorld {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
div()
.flex()
.bg(rgb(0x2e7d32))
.size_full()
.justify_center()
.items_center()
.text_xl()
.text_color(rgb(0xffffff))
.child(format!("Hello, {}!", &self.text))
}
}
fn main() {
App::new().run(|cx: &mut AppContext| {
cx.open_window(WindowOptions::default(), |cx| {
cx.new_view(|_cx| HelloWorld {
text: "World".into(),
})
});
});
}

View File

@ -574,7 +574,7 @@ impl AppContext {
}
/// Displays a platform modal for selecting a new path where a file can be saved.
/// The provided directory will be used to set the iniital location.
/// The provided directory will be used to set the initial location.
/// When a path is selected, it is relayed asynchronously via the returned oneshot channel.
/// If cancelled, a `None` will be relayed instead.
pub fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver<Option<PathBuf>> {

View File

@ -40,7 +40,7 @@ impl Overlay {
self
}
/// Sets the position in window co-ordinates
/// Sets the position in window coordinates
/// (otherwise the location the overlay is rendered is used)
pub fn position(mut self, anchor: Point<Pixels>) -> Self {
self.anchor_position = Some(anchor);

View File

@ -150,7 +150,13 @@ pub(crate) trait PlatformWindow {
fn as_any_mut(&mut self) -> &mut dyn Any;
fn set_input_handler(&mut self, input_handler: PlatformInputHandler);
fn take_input_handler(&mut self) -> Option<PlatformInputHandler>;
fn prompt(&self, level: PromptLevel, msg: &str, answers: &[&str]) -> oneshot::Receiver<usize>;
fn prompt(
&self,
level: PromptLevel,
msg: &str,
detail: Option<&str>,
answers: &[&str],
) -> oneshot::Receiver<usize>;
fn activate(&self);
fn set_title(&mut self, title: &str);
fn set_edited(&mut self, edited: bool);

View File

@ -534,67 +534,77 @@ impl Platform for MacPlatform {
&self,
options: PathPromptOptions,
) -> oneshot::Receiver<Option<Vec<PathBuf>>> {
unsafe {
let panel = NSOpenPanel::openPanel(nil);
panel.setCanChooseDirectories_(options.directories.to_objc());
panel.setCanChooseFiles_(options.files.to_objc());
panel.setAllowsMultipleSelection_(options.multiple.to_objc());
panel.setResolvesAliases_(false.to_objc());
let (done_tx, done_rx) = oneshot::channel();
let done_tx = Cell::new(Some(done_tx));
let block = ConcreteBlock::new(move |response: NSModalResponse| {
let result = if response == NSModalResponse::NSModalResponseOk {
let mut result = Vec::new();
let urls = panel.URLs();
for i in 0..urls.count() {
let url = urls.objectAtIndex(i);
if url.isFileURL() == YES {
if let Ok(path) = ns_url_to_path(url) {
result.push(path)
let (done_tx, done_rx) = oneshot::channel();
self.foreground_executor()
.spawn(async move {
unsafe {
let panel = NSOpenPanel::openPanel(nil);
panel.setCanChooseDirectories_(options.directories.to_objc());
panel.setCanChooseFiles_(options.files.to_objc());
panel.setAllowsMultipleSelection_(options.multiple.to_objc());
panel.setResolvesAliases_(false.to_objc());
let done_tx = Cell::new(Some(done_tx));
let block = ConcreteBlock::new(move |response: NSModalResponse| {
let result = if response == NSModalResponse::NSModalResponseOk {
let mut result = Vec::new();
let urls = panel.URLs();
for i in 0..urls.count() {
let url = urls.objectAtIndex(i);
if url.isFileURL() == YES {
if let Ok(path) = ns_url_to_path(url) {
result.push(path)
}
}
}
}
}
Some(result)
} else {
None
};
Some(result)
} else {
None
};
if let Some(done_tx) = done_tx.take() {
let _ = done_tx.send(result);
if let Some(done_tx) = done_tx.take() {
let _ = done_tx.send(result);
}
});
let block = block.copy();
let _: () = msg_send![panel, beginWithCompletionHandler: block];
}
});
let block = block.copy();
let _: () = msg_send![panel, beginWithCompletionHandler: block];
done_rx
}
})
.detach();
done_rx
}
fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver<Option<PathBuf>> {
unsafe {
let panel = NSSavePanel::savePanel(nil);
let path = ns_string(directory.to_string_lossy().as_ref());
let url = NSURL::fileURLWithPath_isDirectory_(nil, path, true.to_objc());
panel.setDirectoryURL(url);
let directory = directory.to_owned();
let (done_tx, done_rx) = oneshot::channel();
self.foreground_executor()
.spawn(async move {
unsafe {
let panel = NSSavePanel::savePanel(nil);
let path = ns_string(directory.to_string_lossy().as_ref());
let url = NSURL::fileURLWithPath_isDirectory_(nil, path, true.to_objc());
panel.setDirectoryURL(url);
let (done_tx, done_rx) = oneshot::channel();
let done_tx = Cell::new(Some(done_tx));
let block = ConcreteBlock::new(move |response: NSModalResponse| {
let mut result = None;
if response == NSModalResponse::NSModalResponseOk {
let url = panel.URL();
if url.isFileURL() == YES {
result = ns_url_to_path(panel.URL()).ok()
}
}
let done_tx = Cell::new(Some(done_tx));
let block = ConcreteBlock::new(move |response: NSModalResponse| {
let mut result = None;
if response == NSModalResponse::NSModalResponseOk {
let url = panel.URL();
if url.isFileURL() == YES {
result = ns_url_to_path(panel.URL()).ok()
}
}
if let Some(done_tx) = done_tx.take() {
let _ = done_tx.send(result);
if let Some(done_tx) = done_tx.take() {
let _ = done_tx.send(result);
}
});
let block = block.copy();
let _: () = msg_send![panel, beginWithCompletionHandler: block];
}
});
let block = block.copy();
let _: () = msg_send![panel, beginWithCompletionHandler: block];
done_rx
}
})
.detach();
done_rx
}
fn reveal_path(&self, path: &Path) {

View File

@ -772,7 +772,13 @@ impl PlatformWindow for MacWindow {
self.0.as_ref().lock().input_handler.take()
}
fn prompt(&self, level: PromptLevel, msg: &str, answers: &[&str]) -> oneshot::Receiver<usize> {
fn prompt(
&self,
level: PromptLevel,
msg: &str,
detail: Option<&str>,
answers: &[&str],
) -> oneshot::Receiver<usize> {
// macOs applies overrides to modal window buttons after they are added.
// Two most important for this logic are:
// * Buttons with "Cancel" title will be displayed as the last buttons in the modal
@ -808,6 +814,9 @@ impl PlatformWindow for MacWindow {
};
let _: () = msg_send![alert, setAlertStyle: alert_style];
let _: () = msg_send![alert, setMessageText: ns_string(msg)];
if let Some(detail) = detail {
let _: () = msg_send![alert, setInformativeText: ns_string(detail)];
}
for (ix, answer) in answers
.iter()

View File

@ -185,6 +185,7 @@ impl PlatformWindow for TestWindow {
&self,
_level: crate::PromptLevel,
_msg: &str,
_detail: Option<&str>,
_answers: &[&str],
) -> futures::channel::oneshot::Receiver<usize> {
self.0

View File

@ -1478,9 +1478,12 @@ impl<'a> WindowContext<'a> {
&self,
level: PromptLevel,
message: &str,
detail: Option<&str>,
answers: &[&str],
) -> oneshot::Receiver<usize> {
self.window.platform_window.prompt(level, message, answers)
self.window
.platform_window
.prompt(level, message, detail, answers)
}
/// Returns all available actions for the focused element.

View File

@ -197,7 +197,7 @@ pub struct Diagnostic {
/// Whether this diagnostic is considered to originate from an analysis of
/// files on disk, as opposed to any unsaved buffer contents. This is a
/// property of a given diagnostic source, and is configured for a given
/// language server via the [LspAdapter::disk_based_diagnostic_sources] method
/// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
/// for the language server.
pub is_disk_based: bool,
/// Whether this diagnostic marks unnecessary code.
@ -236,7 +236,7 @@ pub async fn prepare_completion_documentation(
}
}
/// Documentation associated with a [Completion].
/// Documentation associated with a [`Completion`].
#[derive(Clone, Debug)]
pub enum Documentation {
/// There is no documentation for this completion.
@ -301,7 +301,7 @@ pub enum Operation {
lamport_timestamp: clock::Lamport,
/// Whether the selections are in 'line mode'.
line_mode: bool,
/// The [CursorShape] associated with these selections.
/// The [`CursorShape`] associated with these selections.
cursor_shape: CursorShape,
},
@ -347,7 +347,7 @@ pub enum Event {
/// The file associated with a buffer.
pub trait File: Send + Sync {
/// Returns the [LocalFile] associated with this file, if the
/// Returns the [`LocalFile`] associated with this file, if the
/// file is local.
fn as_local(&self) -> Option<&dyn LocalFile>;
@ -378,7 +378,7 @@ pub trait File: Send + Sync {
/// Returns whether the file has been deleted.
fn is_deleted(&self) -> bool;
/// Converts this file into an [Any] trait object.
/// Converts this file into an [`Any`] trait object.
fn as_any(&self) -> &dyn Any;
/// Converts this file into a protobuf message.
@ -1538,8 +1538,6 @@ impl Buffer {
/// Starts a transaction, providing the current time. Subsequent transactions
/// that occur within a short period of time will be grouped together. This
/// is controlled by the buffer's undo grouping duration.
///
/// See [`Buffer::set_group_interval`].
pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
self.transaction_depth += 1;
if self.was_dirty_before_starting_transaction.is_none() {
@ -1556,8 +1554,6 @@ impl Buffer {
/// Terminates the current transaction, providing the current time. Subsequent transactions
/// that occur within a short period of time will be grouped together. This
/// is controlled by the buffer's undo grouping duration.
///
/// See [`Buffer::set_group_interval`].
pub fn end_transaction_at(
&mut self,
now: Instant,
@ -2420,7 +2416,7 @@ impl BufferSnapshot {
}
/// Iterates over chunks of text in the given range of the buffer. Text is chunked
/// in an arbitrary way due to being stored in a [`rope::Rope`]. The text is also
/// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
/// returned in chunks where each chunk has a single syntax highlighting style and
/// diagnostic status.
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {

View File

@ -1657,7 +1657,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let language = Language::new(
LanguageConfig {
name: "JavaScript".into(),
line_comment: Some("// ".into()),
line_comments: vec!["// ".into()],
brackets: BracketPairConfig {
pairs: vec![
BracketPair {
@ -1681,7 +1681,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
overrides: [(
"element".into(),
LanguageConfigOverride {
line_comment: Override::Remove { remove: true },
line_comments: Override::Remove { remove: true },
block_comment: Override::Set(("{/*".into(), "*/}".into())),
..Default::default()
},
@ -1718,7 +1718,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let snapshot = buffer.snapshot();
let config = snapshot.language_scope_at(0).unwrap();
assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
assert_eq!(config.line_comment_prefixes().unwrap(), &[Arc::from("// ")]);
// Both bracket pairs are enabled
assert_eq!(
config.brackets().map(|e| e.1).collect::<Vec<_>>(),
@ -1728,7 +1728,10 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let string_config = snapshot
.language_scope_at(text.find("b\"").unwrap())
.unwrap();
assert_eq!(string_config.line_comment_prefix().unwrap().as_ref(), "// ");
assert_eq!(
string_config.line_comment_prefixes().unwrap(),
&[Arc::from("// ")]
);
// Second bracket pair is disabled
assert_eq!(
string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
@ -1739,7 +1742,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let element_config = snapshot
.language_scope_at(text.find("<F>").unwrap())
.unwrap();
assert_eq!(element_config.line_comment_prefix(), None);
assert_eq!(element_config.line_comment_prefixes(), None);
assert_eq!(
element_config.block_comment_delimiters(),
Some((&"{/*".into(), &"*/}".into()))
@ -1753,7 +1756,10 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let tag_config = snapshot
.language_scope_at(text.find(" d=").unwrap() + 1)
.unwrap();
assert_eq!(tag_config.line_comment_prefix().unwrap().as_ref(), "// ");
assert_eq!(
tag_config.line_comment_prefixes().unwrap(),
&[Arc::from("// ")]
);
assert_eq!(
tag_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
&[true, true]
@ -1765,10 +1771,9 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
.unwrap();
assert_eq!(
expression_in_element_config
.line_comment_prefix()
.unwrap()
.as_ref(),
"// "
.line_comment_prefixes()
.unwrap(),
&[Arc::from("// ")]
);
assert_eq!(
expression_in_element_config
@ -1884,14 +1889,17 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
let snapshot = buffer.snapshot();
let html_config = snapshot.language_scope_at(Point::new(2, 4)).unwrap();
assert_eq!(html_config.line_comment_prefix(), None);
assert_eq!(html_config.line_comment_prefixes(), Some(&vec![]));
assert_eq!(
html_config.block_comment_delimiters(),
Some((&"<!--".into(), &"-->".into()))
);
let ruby_config = snapshot.language_scope_at(Point::new(3, 12)).unwrap();
assert_eq!(ruby_config.line_comment_prefix().unwrap().as_ref(), "# ");
assert_eq!(
ruby_config.line_comment_prefixes().unwrap(),
&[Arc::from("# ")]
);
assert_eq!(ruby_config.block_comment_delimiters(), None);
buffer
@ -2293,7 +2301,7 @@ fn ruby_lang() -> Language {
LanguageConfig {
name: "Ruby".into(),
path_suffixes: vec!["rb".to_string()],
line_comment: Some("# ".into()),
line_comments: vec!["# ".into()],
..Default::default()
},
Some(tree_sitter_ruby::language()),

View File

@ -12,7 +12,7 @@ use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
/// A set of diagnostics associated with a given buffer, provided
/// by a single language server.
///
/// The diagnostics are stored in a [SumTree], which allows this struct
/// The diagnostics are stored in a [`SumTree`], which allows this struct
/// to be cheaply copied, and allows for efficient retrieval of the
/// diagnostics that intersect a given range of the buffer.
#[derive(Clone, Debug, Default)]
@ -21,9 +21,9 @@ pub struct DiagnosticSet {
}
/// A single diagnostic in a set. Generic over its range type, because
/// the diagnostics are stored internally as [Anchor]s, but can be
/// resolved to different coordinates types like [usize] byte offsets or
/// [Point]s.
/// the diagnostics are stored internally as [`Anchor`]s, but can be
/// resolved to different coordinates types like [`usize`] byte offsets or
/// [`Point`](gpui::Point)s.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticEntry<T> {
/// The range of the buffer where the diagnostic applies.
@ -52,7 +52,7 @@ pub struct Summary {
}
impl<T> DiagnosticEntry<T> {
/// Returns a raw LSP diagnostic ssed to provide diagnostic context to lsp
/// Returns a raw LSP diagnostic ssed to provide diagnostic context to LSP
/// codeAction request
pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
let code = self

View File

@ -139,12 +139,11 @@ pub struct CachedLspAdapter {
impl CachedLspAdapter {
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
let name = adapter.name().await;
let name = adapter.name();
let short_name = adapter.short_name();
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
let disk_based_diagnostics_progress_token =
adapter.disk_based_diagnostics_progress_token().await;
let language_ids = adapter.language_ids().await;
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources();
let disk_based_diagnostics_progress_token = adapter.disk_based_diagnostics_progress_token();
let language_ids = adapter.language_ids();
Arc::new(CachedLspAdapter {
name,
@ -261,7 +260,7 @@ pub trait LspAdapterDelegate: Send + Sync {
#[async_trait]
pub trait LspAdapter: 'static + Send + Sync {
async fn name(&self) -> LanguageServerName;
fn name(&self) -> LanguageServerName;
fn short_name(&self) -> &'static str;
@ -299,10 +298,12 @@ pub trait LspAdapter: 'static + Send + Sync {
delegate: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary>;
/// Returns true if a language server can be reinstalled.
/// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is false.
/// Returns `true` if a language server can be reinstalled.
///
/// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is `false`.
///
/// Implementations that rely on software already installed on user's system
/// should have [`can_be_reinstalled`] return false.
/// should have [`can_be_reinstalled`](Self::can_be_reinstalled) return `false`.
fn can_be_reinstalled(&self) -> bool {
true
}
@ -314,7 +315,7 @@ pub trait LspAdapter: 'static + Send + Sync {
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
/// A callback called for each [`lsp_types::CompletionItem`] obtained from LSP server.
/// A callback called for each [`lsp::CompletionItem`] obtained from LSP server.
/// Some LspAdapter implementations might want to modify the obtained item to
/// change how it's displayed.
async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
@ -336,8 +337,8 @@ pub trait LspAdapter: 'static + Send + Sync {
None
}
/// Returns initialization options that are going to be sent to a LSP server as a part of [`lsp_types::InitializeParams`]
async fn initialization_options(&self) -> Option<Value> {
/// Returns initialization options that are going to be sent to a LSP server as a part of [`lsp::InitializeParams`]
fn initialization_options(&self) -> Option<Value> {
None
}
@ -356,15 +357,15 @@ pub trait LspAdapter: 'static + Send + Sync {
])
}
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
fn disk_based_diagnostic_sources(&self) -> Vec<String> {
Default::default()
}
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
None
}
async fn language_ids(&self) -> HashMap<String, String> {
fn language_ids(&self) -> HashMap<String, String> {
Default::default()
}
@ -417,8 +418,10 @@ pub struct LanguageConfig {
#[serde(default)]
pub collapsed_placeholder: String,
/// A line comment string that is inserted in e.g. `toggle comments` action.
/// A language can have multiple flavours of line comments. All of the provided line comments are
/// used for comment continuations on the next line, but only the first one is used for Editor::ToggleComments.
#[serde(default)]
pub line_comment: Option<Arc<str>>,
pub line_comments: Vec<Arc<str>>,
/// Starting and closing characters of a block comment.
#[serde(default)]
pub block_comment: Option<(Arc<str>, Arc<str>)>,
@ -461,7 +464,7 @@ pub struct LanguageScope {
#[derive(Clone, Deserialize, Default, Debug)]
pub struct LanguageConfigOverride {
#[serde(default)]
pub line_comment: Override<Arc<str>>,
pub line_comments: Override<Vec<Arc<str>>>,
#[serde(default)]
pub block_comment: Override<(Arc<str>, Arc<str>)>,
#[serde(skip_deserializing)]
@ -507,7 +510,7 @@ impl Default for LanguageConfig {
increase_indent_pattern: Default::default(),
decrease_indent_pattern: Default::default(),
autoclose_before: Default::default(),
line_comment: Default::default(),
line_comments: Default::default(),
block_comment: Default::default(),
scope_opt_in_language_servers: Default::default(),
overrides: Default::default(),
@ -585,7 +588,7 @@ impl<'de> Deserialize<'de> for BracketPairConfig {
}
/// Describes a single bracket pair and how an editor should react to e.g. inserting
/// an opening bracket or to a newline character insertion inbetween `start` and `end` characters.
/// an opening bracket or to a newline character insertion in between `start` and `end` characters.
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
pub struct BracketPair {
/// Starting substring for a bracket.
@ -1711,10 +1714,10 @@ impl LanguageScope {
/// Returns line prefix that is inserted in e.g. line continuations or
/// in `toggle comments` action.
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
pub fn line_comment_prefixes(&self) -> Option<&Vec<Arc<str>>> {
Override::as_option(
self.config_override().map(|o| &o.line_comment),
self.language.config.line_comment.as_ref(),
self.config_override().map(|o| &o.line_comments),
Some(&self.language.config.line_comments),
)
}
@ -1881,7 +1884,7 @@ impl Default for FakeLspAdapter {
#[cfg(any(test, feature = "test-support"))]
#[async_trait]
impl LspAdapter for Arc<FakeLspAdapter> {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName(self.name.into())
}
@ -1919,15 +1922,15 @@ impl LspAdapter for Arc<FakeLspAdapter> {
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
fn disk_based_diagnostic_sources(&self) -> Vec<String> {
self.disk_based_diagnostics_sources.clone()
}
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
self.disk_based_diagnostics_progress_token.clone()
}
async fn initialization_options(&self) -> Option<Value> {
fn initialization_options(&self) -> Option<Value> {
self.initialization_options.clone()
}

View File

@ -103,7 +103,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
}
}
/// Serializes an [`operation::EditOperation`] to be sent over RPC.
/// Serializes an [`EditOperation`] to be sent over RPC.
pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
proto::operation::Edit {
replica_id: operation.timestamp.replica_id as u32,

View File

@ -53,7 +53,7 @@ async-trait.workspace = true
block = "0.1"
bytes = "1.2"
byteorder = "1.4"
cocoa = "0.24"
cocoa = "0.25"
core-foundation = "0.9.3"
core-graphics = "0.22.3"
foreign-types = "0.3"

View File

@ -590,6 +590,7 @@ impl Project {
client.add_model_request_handler(Self::handle_delete_project_entry);
client.add_model_request_handler(Self::handle_expand_project_entry);
client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
client.add_model_request_handler(Self::handle_resolve_completion_documentation);
client.add_model_request_handler(Self::handle_apply_code_action);
client.add_model_request_handler(Self::handle_on_type_formatting);
client.add_model_request_handler(Self::handle_inlay_hints);
@ -974,8 +975,7 @@ impl Project {
// Start all the newly-enabled language servers.
for (worktree, language) in language_servers_to_start {
let worktree_path = worktree.read(cx).abs_path();
self.start_language_servers(&worktree, worktree_path, language, cx);
self.start_language_servers(&worktree, language, cx);
}
// Restart all language servers with changed initialization options.
@ -2774,8 +2774,8 @@ impl Project {
};
if let Some(file) = buffer_file {
let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() {
self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
if worktree.read(cx).is_local() {
self.start_language_servers(&worktree, new_language, cx);
}
}
}
@ -2783,7 +2783,6 @@ impl Project {
fn start_language_servers(
&mut self,
worktree: &Model<Worktree>,
worktree_path: Arc<Path>,
language: Arc<Language>,
cx: &mut ModelContext<Self>,
) {
@ -2793,22 +2792,14 @@ impl Project {
return;
}
let worktree_id = worktree.read(cx).id();
for adapter in language.lsp_adapters() {
self.start_language_server(
worktree_id,
worktree_path.clone(),
adapter.clone(),
language.clone(),
cx,
);
self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
}
}
fn start_language_server(
&mut self,
worktree_id: WorktreeId,
worktree_path: Arc<Path>,
worktree: &Model<Worktree>,
adapter: Arc<CachedLspAdapter>,
language: Arc<Language>,
cx: &mut ModelContext<Self>,
@ -2817,6 +2808,9 @@ impl Project {
return;
}
let worktree = worktree.read(cx);
let worktree_id = worktree.id();
let worktree_path = worktree.abs_path();
let key = (worktree_id, adapter.name.clone());
if self.language_server_ids.contains_key(&key) {
return;
@ -2949,20 +2943,14 @@ impl Project {
this.update(&mut cx, |this, cx| {
let worktrees = this.worktrees.clone();
for worktree in worktrees {
let worktree = match worktree.upgrade() {
Some(worktree) => worktree.read(cx),
None => continue,
};
let worktree_id = worktree.id();
let root_path = worktree.abs_path();
this.start_language_server(
worktree_id,
root_path,
adapter.clone(),
language.clone(),
cx,
);
if let Some(worktree) = worktree.upgrade() {
this.start_language_server(
&worktree,
adapter.clone(),
language.clone(),
cx,
);
}
}
})
.ok();
@ -3176,7 +3164,7 @@ impl Project {
}
})
.detach();
let mut initialization_options = adapter.adapter.initialization_options().await;
let mut initialization_options = adapter.adapter.initialization_options();
match (&mut initialization_options, override_options) {
(Some(initialization_options), Some(override_options)) => {
merge_json_value_into(override_options, initialization_options);
@ -3332,7 +3320,7 @@ impl Project {
worktree_id: WorktreeId,
adapter_name: LanguageServerName,
cx: &mut ModelContext<Self>,
) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
) -> Task<Vec<WorktreeId>> {
let key = (worktree_id, adapter_name);
if let Some(server_id) = self.language_server_ids.remove(&key) {
log::info!("stopping language server {}", key.1 .0);
@ -3370,8 +3358,6 @@ impl Project {
let server_state = self.language_servers.remove(&server_id);
cx.emit(Event::LanguageServerRemoved(server_id));
cx.spawn(move |this, mut cx| async move {
let mut root_path = None;
let server = match server_state {
Some(LanguageServerState::Starting(task)) => task.await,
Some(LanguageServerState::Running { server, .. }) => Some(server),
@ -3379,7 +3365,6 @@ impl Project {
};
if let Some(server) = server {
root_path = Some(server.root_path().clone());
if let Some(shutdown) = server.shutdown() {
shutdown.await;
}
@ -3393,10 +3378,10 @@ impl Project {
.ok();
}
(root_path, orphaned_worktrees)
orphaned_worktrees
})
} else {
Task::ready((None, Vec::new()))
Task::ready(Vec::new())
}
}
@ -3426,7 +3411,6 @@ impl Project {
None
}
// TODO This will break in the case where the adapter's root paths and worktrees are not equal
fn restart_language_servers(
&mut self,
worktree: Model<Worktree>,
@ -3434,50 +3418,42 @@ impl Project {
cx: &mut ModelContext<Self>,
) {
let worktree_id = worktree.read(cx).id();
let fallback_path = worktree.read(cx).abs_path();
let mut stops = Vec::new();
for adapter in language.lsp_adapters() {
stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
}
if stops.is_empty() {
let stop_tasks = language
.lsp_adapters()
.iter()
.map(|adapter| {
let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
(stop_task, adapter.name.clone())
})
.collect::<Vec<_>>();
if stop_tasks.is_empty() {
return;
}
let mut stops = stops.into_iter();
cx.spawn(move |this, mut cx| async move {
let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
for stop in stops {
let (_, worktrees) = stop.await;
orphaned_worktrees.extend_from_slice(&worktrees);
// For each stopped language server, record all of the worktrees with which
// it was associated.
let mut affected_worktrees = Vec::new();
for (stop_task, language_server_name) in stop_tasks {
for affected_worktree_id in stop_task.await {
affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
}
}
let this = match this.upgrade() {
Some(this) => this,
None => return,
};
this.update(&mut cx, |this, cx| {
// Attempt to restart using original server path. Fallback to passed in
// path if we could not retrieve the root path
let root_path = original_root_path
.map(|path_buf| Arc::from(path_buf.as_path()))
.unwrap_or(fallback_path);
this.start_language_servers(&worktree, root_path, language.clone(), cx);
// Restart the language server for the given worktree.
this.start_language_servers(&worktree, language.clone(), cx);
// Lookup new server ids and set them for each of the orphaned worktrees
for adapter in language.lsp_adapters() {
for (affected_worktree_id, language_server_name) in affected_worktrees {
if let Some(new_server_id) = this
.language_server_ids
.get(&(worktree_id, adapter.name.clone()))
.get(&(worktree_id, language_server_name.clone()))
.cloned()
{
for &orphaned_worktree in &orphaned_worktrees {
this.language_server_ids
.insert((orphaned_worktree, adapter.name.clone()), new_server_id);
}
this.language_server_ids
.insert((affected_worktree_id, language_server_name), new_server_id);
}
}
})
@ -7751,6 +7727,40 @@ impl Project {
})
}
async fn handle_resolve_completion_documentation(
this: Model<Self>,
envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::ResolveCompletionDocumentationResponse> {
let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
let completion = this
.read_with(&mut cx, |this, _| {
let id = LanguageServerId(envelope.payload.language_server_id as usize);
let Some(server) = this.language_server_for_id(id) else {
return Err(anyhow!("No language server {id}"));
};
Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
})??
.await?;
let mut is_markdown = false;
let text = match completion.documentation {
Some(lsp::Documentation::String(text)) => text,
Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
is_markdown = kind == lsp::MarkupKind::Markdown;
value
}
_ => String::new(),
};
Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
}
async fn handle_apply_code_action(
this: Model<Self>,
envelope: TypedEnvelope<proto::ApplyCodeAction>,

View File

@ -194,12 +194,14 @@ impl AsRef<Path> for RepositoryWorkDirectory {
pub struct WorkDirectoryEntry(ProjectEntryId);
impl WorkDirectoryEntry {
pub(crate) fn relativize(&self, worktree: &Snapshot, path: &Path) -> Option<RepoPath> {
worktree.entry_for_id(self.0).and_then(|entry| {
path.strip_prefix(&entry.path)
.ok()
.map(move |path| path.into())
})
pub(crate) fn relativize(&self, worktree: &Snapshot, path: &Path) -> Result<RepoPath> {
let entry = worktree
.entry_for_id(self.0)
.ok_or_else(|| anyhow!("entry not found"))?;
let path = path
.strip_prefix(&entry.path)
.map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, entry.path))?;
Ok(path.into())
}
}
@ -970,13 +972,15 @@ impl LocalWorktree {
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap();
if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let repo = repo.repo_ptr.clone();
index_task = Some(
cx.background_executor()
.spawn(async move { repo.lock().load_index_text(&repo_path) }),
);
if let Some(repo_path) = repo.work_directory.relativize(&snapshot, &path).log_err()
{
if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let git_repo = git_repo.repo_ptr.clone();
index_task = Some(
cx.background_executor()
.spawn(async move { git_repo.lock().load_index_text(&repo_path) }),
);
}
}
}

View File

@ -778,6 +778,7 @@ impl ProjectPanel {
let answer = cx.prompt(
PromptLevel::Info,
&format!("Delete {file_name:?}?"),
None,
&["Delete", "Cancel"],
);

View File

@ -197,6 +197,19 @@ message Ack {}
message Error {
string message = 1;
ErrorCode code = 2;
repeated string tags = 3;
}
enum ErrorCode {
Internal = 0;
NoSuchChannel = 1;
Disconnected = 2;
SignedOut = 3;
UpgradeRequired = 4;
Forbidden = 5;
WrongReleaseChannel = 6;
NeedsCla = 7;
}
message Test {

223
crates/rpc/src/error.rs Normal file
View File

@ -0,0 +1,223 @@
/// Some helpers for structured error handling.
///
/// The helpers defined here allow you to pass type-safe error codes from
/// the collab server to the client; and provide a mechanism for additional
/// structured data alongside the message.
///
/// When returning an error, it can be as simple as:
///
/// `return Err(Error::Forbidden.into())`
///
/// If you'd like to log more context, you can set a message. These messages
/// show up in our logs, but are not shown visibly to users.
///
/// `return Err(Error::Forbidden.message("not an admin").into())`
///
/// If you'd like to provide enough context that the UI can render a good error
/// message (or would be helpful to see in a structured format in the logs), you
/// can use .with_tag():
///
/// `return Err(Error::WrongReleaseChannel.with_tag("required", "stable").into())`
///
/// When handling an error you can use .error_code() to match which error it was
/// and .error_tag() to read any tags.
///
/// ```
/// match err.error_code() {
/// ErrorCode::Forbidden => alert("I'm sorry I can't do that.")
/// ErrorCode::WrongReleaseChannel =>
/// alert(format!("You need to be on the {} release channel.", err.error_tag("required").unwrap()))
/// ErrorCode::Internal => alert("Sorry, something went wrong")
/// }
/// ```
///
use crate::proto;
pub use proto::ErrorCode;
/// ErrorCodeExt provides some helpers for structured error handling.
///
/// The primary implementation is on the proto::ErrorCode to easily convert
/// that into an anyhow::Error, which we use pervasively.
///
/// The RpcError struct provides support for further metadata if needed.
pub trait ErrorCodeExt {
/// Return an anyhow::Error containing this.
/// (useful in places where .into() doesn't have enough type information)
fn anyhow(self) -> anyhow::Error;
/// Add a message to the error (by default the error code is used)
fn message(self, msg: String) -> RpcError;
/// Add a tag to the error. Tags are key value pairs that can be used
/// to send semi-structured data along with the error.
fn with_tag(self, k: &str, v: &str) -> RpcError;
}
impl ErrorCodeExt for proto::ErrorCode {
fn anyhow(self) -> anyhow::Error {
self.into()
}
fn message(self, msg: String) -> RpcError {
let err: RpcError = self.into();
err.message(msg)
}
fn with_tag(self, k: &str, v: &str) -> RpcError {
let err: RpcError = self.into();
err.with_tag(k, v)
}
}
/// ErrorExt provides helpers for structured error handling.
///
/// The primary implementation is on the anyhow::Error, which is
/// what we use throughout our codebase. Though under the hood this
pub trait ErrorExt {
/// error_code() returns the ErrorCode (or ErrorCode::Internal if there is none)
fn error_code(&self) -> proto::ErrorCode;
/// error_tag() returns the value of the tag with the given key, if any.
fn error_tag(&self, k: &str) -> Option<&str>;
/// to_proto() converts the error into a proto::Error
fn to_proto(&self) -> proto::Error;
}
impl ErrorExt for anyhow::Error {
fn error_code(&self) -> proto::ErrorCode {
if let Some(rpc_error) = self.downcast_ref::<RpcError>() {
rpc_error.code
} else {
proto::ErrorCode::Internal
}
}
fn error_tag(&self, k: &str) -> Option<&str> {
if let Some(rpc_error) = self.downcast_ref::<RpcError>() {
rpc_error.error_tag(k)
} else {
None
}
}
fn to_proto(&self) -> proto::Error {
if let Some(rpc_error) = self.downcast_ref::<RpcError>() {
rpc_error.to_proto()
} else {
ErrorCode::Internal.message(format!("{}", self)).to_proto()
}
}
}
impl From<proto::ErrorCode> for anyhow::Error {
fn from(value: proto::ErrorCode) -> Self {
RpcError {
request: None,
code: value,
msg: format!("{:?}", value).to_string(),
tags: Default::default(),
}
.into()
}
}
#[derive(Clone, Debug)]
pub struct RpcError {
request: Option<String>,
msg: String,
code: proto::ErrorCode,
tags: Vec<String>,
}
/// RpcError is a structured error type that is returned by the collab server.
/// In addition to a message, it lets you set a specific ErrorCode, and attach
/// small amounts of metadata to help the client handle the error appropriately.
///
/// This struct is not typically used directly, as we pass anyhow::Error around
/// in the app; however it is useful for chaining .message() and .with_tag() on
/// ErrorCode.
impl RpcError {
/// from_proto converts a proto::Error into an anyhow::Error containing
/// an RpcError.
pub fn from_proto(error: &proto::Error, request: &str) -> anyhow::Error {
RpcError {
request: Some(request.to_string()),
code: error.code(),
msg: error.message.clone(),
tags: error.tags.clone(),
}
.into()
}
}
impl ErrorCodeExt for RpcError {
fn message(mut self, msg: String) -> RpcError {
self.msg = msg;
self
}
fn with_tag(mut self, k: &str, v: &str) -> RpcError {
self.tags.push(format!("{}={}", k, v));
self
}
fn anyhow(self) -> anyhow::Error {
self.into()
}
}
impl ErrorExt for RpcError {
fn error_tag(&self, k: &str) -> Option<&str> {
for tag in &self.tags {
let mut parts = tag.split('=');
if let Some(key) = parts.next() {
if key == k {
return parts.next();
}
}
}
None
}
fn error_code(&self) -> proto::ErrorCode {
self.code
}
fn to_proto(&self) -> proto::Error {
proto::Error {
code: self.code as i32,
message: self.msg.clone(),
tags: self.tags.clone(),
}
}
}
impl std::error::Error for RpcError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
impl std::fmt::Display for RpcError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(request) = &self.request {
write!(f, "RPC request {} failed: {}", request, self.msg)?
} else {
write!(f, "{}", self.msg)?
}
for tag in &self.tags {
write!(f, " {}", tag)?
}
Ok(())
}
}
impl From<proto::ErrorCode> for RpcError {
fn from(code: proto::ErrorCode) -> Self {
RpcError {
request: None,
code,
msg: format!("{:?}", code).to_string(),
tags: Default::default(),
}
}
}

View File

@ -1,3 +1,5 @@
use crate::{ErrorCode, ErrorCodeExt, ErrorExt, RpcError};
use super::{
proto::{self, AnyTypedEnvelope, EnvelopedMessage, MessageStream, PeerId, RequestMessage},
Connection,
@ -423,11 +425,7 @@ impl Peer {
let (response, _barrier) = rx.await.map_err(|_| anyhow!("connection was closed"))?;
if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
Err(anyhow!(
"RPC request {} failed - {}",
T::NAME,
error.message
))
Err(RpcError::from_proto(&error, T::NAME))
} else {
Ok(TypedEnvelope {
message_id: response.id,
@ -516,9 +514,12 @@ impl Peer {
envelope: Box<dyn AnyTypedEnvelope>,
) -> Result<()> {
let connection = self.connection_state(envelope.sender_id())?;
let response = proto::Error {
message: format!("message {} was not handled", envelope.payload_type_name()),
};
let response = ErrorCode::Internal
.message(format!(
"message {} was not handled",
envelope.payload_type_name()
))
.to_proto();
let message_id = connection
.next_message_id
.fetch_add(1, atomic::Ordering::SeqCst);
@ -692,17 +693,17 @@ mod tests {
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 1".to_string(),
},
ErrorCode::Internal
.message("message 1".to_string())
.to_proto(),
)
.unwrap();
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 2".to_string(),
},
ErrorCode::Internal
.message("message 2".to_string())
.to_proto(),
)
.unwrap();
server.respond(request.receipt(), proto::Ack {}).unwrap();
@ -797,17 +798,17 @@ mod tests {
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 1".to_string(),
},
ErrorCode::Internal
.message("message 1".to_string())
.to_proto(),
)
.unwrap();
server
.send(
server_to_client_conn_id,
proto::Error {
message: "message 2".to_string(),
},
ErrorCode::Internal
.message("message 2".to_string())
.to_proto(),
)
.unwrap();
server.respond(request1.receipt(), proto::Ack {}).unwrap();

View File

@ -1,10 +1,12 @@
pub mod auth;
mod conn;
mod error;
mod notification;
mod peer;
pub mod proto;
pub use conn::Connection;
pub use error::*;
pub use notification::*;
pub use peer::*;
mod macros;

View File

@ -1,3 +1,5 @@
mod registrar;
use crate::{
history::SearchHistory,
mode::{next_mode, SearchMode},
@ -29,6 +31,9 @@ use workspace::{
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
};
pub use registrar::DivRegistrar;
use registrar::{ForDeployed, ForDismissed, SearchActionsRegistrar, WithResults};
#[derive(PartialEq, Clone, Deserialize)]
pub struct Deploy {
pub focus: bool,
@ -422,230 +427,59 @@ impl ToolbarItemView for BufferSearchBar {
}
}
/// Registrar inverts the dependency between search and its downstream user, allowing said downstream user to register search action without knowing exactly what those actions are.
pub trait SearchActionsRegistrar {
fn register_handler<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
);
fn register_handler_for_dismissed_search<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
);
}
type GetSearchBar<T> =
for<'a, 'b> fn(&'a T, &'a mut ViewContext<'b, T>) -> Option<View<BufferSearchBar>>;
/// Registers search actions on a div that can be taken out.
pub struct DivRegistrar<'a, 'b, T: 'static> {
div: Option<Div>,
cx: &'a mut ViewContext<'b, T>,
search_getter: GetSearchBar<T>,
}
impl<'a, 'b, T: 'static> DivRegistrar<'a, 'b, T> {
pub fn new(search_getter: GetSearchBar<T>, cx: &'a mut ViewContext<'b, T>) -> Self {
Self {
div: Some(div()),
cx,
search_getter,
}
}
pub fn into_div(self) -> Div {
// This option is always Some; it's an option in the first place because we want to call methods
// on div that require ownership.
self.div.unwrap()
}
}
impl<T: 'static> SearchActionsRegistrar for DivRegistrar<'_, '_, T> {
fn register_handler<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
) {
let getter = self.search_getter;
self.div = self.div.take().map(|div| {
div.on_action(self.cx.listener(move |this, action, cx| {
let should_notify = (getter)(this, cx)
.clone()
.map(|search_bar| {
search_bar.update(cx, |search_bar, cx| {
if search_bar.is_dismissed()
|| search_bar.active_searchable_item.is_none()
{
false
} else {
callback(search_bar, action, cx);
true
}
})
})
.unwrap_or(false);
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}))
});
}
fn register_handler_for_dismissed_search<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
) {
let getter = self.search_getter;
self.div = self.div.take().map(|div| {
div.on_action(self.cx.listener(move |this, action, cx| {
let should_notify = (getter)(this, cx)
.clone()
.map(|search_bar| {
search_bar.update(cx, |search_bar, cx| {
if search_bar.is_dismissed() {
callback(search_bar, action, cx);
true
} else {
false
}
})
})
.unwrap_or(false);
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}))
});
}
}
/// Register actions for an active pane.
impl SearchActionsRegistrar for Workspace {
fn register_handler<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
) {
self.register_action(move |workspace, action: &A, cx| {
if workspace.has_active_modal(cx) {
cx.propagate();
return;
}
let pane = workspace.active_pane();
pane.update(cx, move |this, cx| {
this.toolbar().update(cx, move |this, cx| {
if let Some(search_bar) = this.item_of_type::<BufferSearchBar>() {
let should_notify = search_bar.update(cx, move |search_bar, cx| {
if search_bar.is_dismissed()
|| search_bar.active_searchable_item.is_none()
{
false
} else {
callback(search_bar, action, cx);
true
}
});
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}
})
});
});
}
fn register_handler_for_dismissed_search<A: Action>(
&mut self,
callback: fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>),
) {
self.register_action(move |workspace, action: &A, cx| {
if workspace.has_active_modal(cx) {
cx.propagate();
return;
}
let pane = workspace.active_pane();
pane.update(cx, move |this, cx| {
this.toolbar().update(cx, move |this, cx| {
if let Some(search_bar) = this.item_of_type::<BufferSearchBar>() {
let should_notify = search_bar.update(cx, move |search_bar, cx| {
if search_bar.is_dismissed() {
callback(search_bar, action, cx);
true
} else {
false
}
});
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}
})
});
});
}
}
impl BufferSearchBar {
pub fn register(registrar: &mut impl SearchActionsRegistrar) {
registrar.register_handler(|this, action: &ToggleCaseSensitive, cx| {
registrar.register_handler(ForDeployed(|this, action: &ToggleCaseSensitive, cx| {
if this.supported_options().case {
this.toggle_case_sensitive(action, cx);
}
});
registrar.register_handler(|this, action: &ToggleWholeWord, cx| {
}));
registrar.register_handler(ForDeployed(|this, action: &ToggleWholeWord, cx| {
if this.supported_options().word {
this.toggle_whole_word(action, cx);
}
});
registrar.register_handler(|this, action: &ToggleReplace, cx| {
}));
registrar.register_handler(ForDeployed(|this, action: &ToggleReplace, cx| {
if this.supported_options().replacement {
this.toggle_replace(action, cx);
}
});
registrar.register_handler(|this, _: &ActivateRegexMode, cx| {
}));
registrar.register_handler(ForDeployed(|this, _: &ActivateRegexMode, cx| {
if this.supported_options().regex {
this.activate_search_mode(SearchMode::Regex, cx);
}
});
registrar.register_handler(|this, _: &ActivateTextMode, cx| {
}));
registrar.register_handler(ForDeployed(|this, _: &ActivateTextMode, cx| {
this.activate_search_mode(SearchMode::Text, cx);
});
registrar.register_handler(|this, action: &CycleMode, cx| {
}));
registrar.register_handler(ForDeployed(|this, action: &CycleMode, cx| {
if this.supported_options().regex {
// If regex is not supported then search has just one mode (text) - in that case there's no point in supporting
// cycling.
this.cycle_mode(action, cx)
}
});
registrar.register_handler(|this, action: &SelectNextMatch, cx| {
}));
registrar.register_handler(WithResults(|this, action: &SelectNextMatch, cx| {
this.select_next_match(action, cx);
});
registrar.register_handler(|this, action: &SelectPrevMatch, cx| {
}));
registrar.register_handler(WithResults(|this, action: &SelectPrevMatch, cx| {
this.select_prev_match(action, cx);
});
registrar.register_handler(|this, action: &SelectAllMatches, cx| {
}));
registrar.register_handler(WithResults(|this, action: &SelectAllMatches, cx| {
this.select_all_matches(action, cx);
});
registrar.register_handler(|this, _: &editor::actions::Cancel, cx| {
}));
registrar.register_handler(ForDeployed(|this, _: &editor::actions::Cancel, cx| {
this.dismiss(&Dismiss, cx);
});
}));
// register deploy buffer search for both search bar states, since we want to focus into the search bar
// when the deploy action is triggered in the buffer.
registrar.register_handler(|this, deploy, cx| {
registrar.register_handler(ForDeployed(|this, deploy, cx| {
this.deploy(deploy, cx);
});
registrar.register_handler_for_dismissed_search(|this, deploy, cx| {
}));
registrar.register_handler(ForDismissed(|this, deploy, cx| {
this.deploy(deploy, cx);
})
}))
}
pub fn new(cx: &mut ViewContext<Self>) -> Self {
@ -930,7 +764,7 @@ impl BufferSearchBar {
event: &editor::EditorEvent,
cx: &mut ViewContext<Self>,
) {
if let editor::EditorEvent::Edited { .. } = event {
if let editor::EditorEvent::Edited = event {
self.query_contains_error = false;
self.clear_matches(cx);
let search = self.update_matches(cx);

View File

@ -0,0 +1,172 @@
use gpui::{div, Action, Div, InteractiveElement, View, ViewContext};
use workspace::Workspace;
use crate::BufferSearchBar;
/// Registrar inverts the dependency between search and its downstream user, allowing said downstream user to register search action without knowing exactly what those actions are.
pub trait SearchActionsRegistrar {
fn register_handler<A: Action>(&mut self, callback: impl ActionExecutor<A>);
}
type SearchBarActionCallback<A> = fn(&mut BufferSearchBar, &A, &mut ViewContext<BufferSearchBar>);
type GetSearchBar<T> =
for<'a, 'b> fn(&'a T, &'a mut ViewContext<'b, T>) -> Option<View<BufferSearchBar>>;
/// Registers search actions on a div that can be taken out.
pub struct DivRegistrar<'a, 'b, T: 'static> {
div: Option<Div>,
cx: &'a mut ViewContext<'b, T>,
search_getter: GetSearchBar<T>,
}
impl<'a, 'b, T: 'static> DivRegistrar<'a, 'b, T> {
pub fn new(search_getter: GetSearchBar<T>, cx: &'a mut ViewContext<'b, T>) -> Self {
Self {
div: Some(div()),
cx,
search_getter,
}
}
pub fn into_div(self) -> Div {
// This option is always Some; it's an option in the first place because we want to call methods
// on div that require ownership.
self.div.unwrap()
}
}
impl<T: 'static> SearchActionsRegistrar for DivRegistrar<'_, '_, T> {
fn register_handler<A: Action>(&mut self, callback: impl ActionExecutor<A>) {
let getter = self.search_getter;
self.div = self.div.take().map(|div| {
div.on_action(self.cx.listener(move |this, action, cx| {
let should_notify = (getter)(this, cx)
.clone()
.map(|search_bar| {
search_bar.update(cx, |search_bar, cx| {
callback.execute(search_bar, action, cx)
})
})
.unwrap_or(false);
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}))
});
}
}
/// Register actions for an active pane.
impl SearchActionsRegistrar for Workspace {
fn register_handler<A: Action>(&mut self, callback: impl ActionExecutor<A>) {
self.register_action(move |workspace, action: &A, cx| {
if workspace.has_active_modal(cx) {
cx.propagate();
return;
}
let pane = workspace.active_pane();
let callback = callback.clone();
pane.update(cx, |this, cx| {
this.toolbar().update(cx, move |this, cx| {
if let Some(search_bar) = this.item_of_type::<BufferSearchBar>() {
let should_notify = search_bar.update(cx, move |search_bar, cx| {
callback.execute(search_bar, action, cx)
});
if should_notify {
cx.notify();
} else {
cx.propagate();
}
}
})
});
});
}
}
type DidHandleAction = bool;
/// Potentially executes the underlying action if some preconditions are met (e.g. buffer search bar is visible)
pub trait ActionExecutor<A: Action>: 'static + Clone {
fn execute(
&self,
search_bar: &mut BufferSearchBar,
action: &A,
cx: &mut ViewContext<BufferSearchBar>,
) -> DidHandleAction;
}
/// Run an action when the search bar has been dismissed from the panel.
pub struct ForDismissed<A>(pub(super) SearchBarActionCallback<A>);
impl<A> Clone for ForDismissed<A> {
fn clone(&self) -> Self {
Self(self.0)
}
}
impl<A: Action> ActionExecutor<A> for ForDismissed<A> {
fn execute(
&self,
search_bar: &mut BufferSearchBar,
action: &A,
cx: &mut ViewContext<BufferSearchBar>,
) -> DidHandleAction {
if search_bar.is_dismissed() {
self.0(search_bar, action, cx);
true
} else {
false
}
}
}
/// Run an action when the search bar is deployed.
pub struct ForDeployed<A>(pub(super) SearchBarActionCallback<A>);
impl<A> Clone for ForDeployed<A> {
fn clone(&self) -> Self {
Self(self.0)
}
}
impl<A: Action> ActionExecutor<A> for ForDeployed<A> {
fn execute(
&self,
search_bar: &mut BufferSearchBar,
action: &A,
cx: &mut ViewContext<BufferSearchBar>,
) -> DidHandleAction {
if search_bar.is_dismissed() || search_bar.active_searchable_item.is_none() {
false
} else {
self.0(search_bar, action, cx);
true
}
}
}
/// Run an action when the search bar has any matches, regardless of whether it
/// is visible or not.
pub struct WithResults<A>(pub(super) SearchBarActionCallback<A>);
impl<A> Clone for WithResults<A> {
fn clone(&self) -> Self {
Self(self.0)
}
}
impl<A: Action> ActionExecutor<A> for WithResults<A> {
fn execute(
&self,
search_bar: &mut BufferSearchBar,
action: &A,
cx: &mut ViewContext<BufferSearchBar>,
) -> DidHandleAction {
if search_bar.active_match_index.is_some() {
self.0(search_bar, action, cx);
true
} else {
false
}
}
}

View File

@ -88,6 +88,12 @@ pub fn init(cx: &mut AppContext) {
register_workspace_action(workspace, move |search_bar, action: &CycleMode, cx| {
search_bar.cycle_mode(action, cx)
});
register_workspace_action(
workspace,
move |search_bar, action: &SelectPrevMatch, cx| {
search_bar.select_prev_match(action, cx)
},
);
register_workspace_action(
workspace,
move |search_bar, action: &SelectNextMatch, cx| {
@ -746,6 +752,7 @@ impl ProjectSearchView {
cx.prompt(
PromptLevel::Info,
prompt_text.as_str(),
None,
&["Continue", "Cancel"],
)
})?;
@ -1549,7 +1556,7 @@ impl ProjectSearchBar {
}
}
pub fn select_next_match(&mut self, _: &SelectNextMatch, cx: &mut ViewContext<Self>) {
fn select_next_match(&mut self, _: &SelectNextMatch, cx: &mut ViewContext<Self>) {
if let Some(search) = self.active_project_search.as_ref() {
search.update(cx, |this, cx| {
this.select_match(Direction::Next, cx);

View File

@ -18,7 +18,7 @@ db = { path = "../db" }
theme = { path = "../theme" }
util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "33306142195b354ef3485ca2b1d8a85dfc6605ca" }
alacritty_terminal = "0.21"
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
smallvec.workspace = true
smol.workspace = true

View File

@ -1,5 +1,4 @@
use alacritty_terminal::term::color::Rgb as AlacRgb;
use alacritty_terminal::vte::ansi::Rgb as AlacRgb;
use gpui::Rgba;
//Convenience method to convert from a GPUI color to an alacritty Rgb
@ -8,5 +7,5 @@ pub fn to_alac_rgb(color: impl Into<Rgba>) -> AlacRgb {
let r = ((color.r * color.a) * 255.) as u8;
let g = ((color.g * color.a) * 255.) as u8;
let b = ((color.b * color.a) * 255.) as u8;
AlacRgb::new(r, g, b)
AlacRgb { r, g, b }
}

View File

@ -3,8 +3,6 @@ pub use alacritty_terminal;
pub mod terminal_settings;
use alacritty_terminal::{
ansi::{ClearMode, Handler},
config::{Config, Program, PtyConfig, Scrolling},
event::{Event as AlacTermEvent, EventListener, Notify, WindowSize},
event_loop::{EventLoop, Msg, Notifier},
grid::{Dimensions, Scroll as AlacScroll},
@ -13,11 +11,11 @@ use alacritty_terminal::{
sync::FairMutex,
term::{
cell::Cell,
color::Rgb,
search::{Match, RegexIter, RegexSearch},
RenderableCursor, TermMode,
Config, RenderableCursor, TermMode,
},
tty::{self, setup_env},
vte::ansi::{ClearMode, Handler, NamedPrivateMode, PrivateMode, Rgb},
Term,
};
use anyhow::{bail, Result};
@ -58,7 +56,6 @@ use gpui::{
};
use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
use lazy_static::lazy_static;
actions!(
terminal,
@ -75,15 +72,6 @@ const DEBUG_TERMINAL_HEIGHT: Pixels = px(30.);
const DEBUG_CELL_WIDTH: Pixels = px(5.);
const DEBUG_LINE_HEIGHT: Pixels = px(5.);
lazy_static! {
// Regex Copied from alacritty's ui_config.rs and modified its declaration slightly:
// * avoid Rust-specific escaping.
// * use more strict regex for `file://` protocol matching: original regex has `file:` inside, but we want to avoid matching `some::file::module` strings.
static ref URL_REGEX: RegexSearch = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.\[\]:/@\-~]+"#).unwrap();
}
///Upward flowing events, for changing the title and such
#[derive(Clone, Debug)]
pub enum Event {
@ -289,66 +277,70 @@ impl TerminalBuilder {
pub fn new(
working_directory: Option<PathBuf>,
shell: Shell,
mut env: HashMap<String, String>,
env: HashMap<String, String>,
blink_settings: Option<TerminalBlink>,
alternate_scroll: AlternateScroll,
window: AnyWindowHandle,
) -> Result<TerminalBuilder> {
let pty_config = {
let pty_options = {
let alac_shell = match shell.clone() {
Shell::System => None,
Shell::Program(program) => Some(Program::Just(program)),
Shell::WithArguments { program, args } => Some(Program::WithArgs { program, args }),
Shell::Program(program) => {
Some(alacritty_terminal::tty::Shell::new(program, Vec::new()))
}
Shell::WithArguments { program, args } => {
Some(alacritty_terminal::tty::Shell::new(program, args))
}
};
PtyConfig {
alacritty_terminal::tty::Options {
shell: alac_shell,
working_directory: working_directory.clone(),
hold: false,
}
};
//TODO: Properly set the current locale,
env.insert("LC_ALL".to_string(), "en_US.UTF-8".to_string());
env.insert("ZED_TERM".to_string(), true.to_string());
// First, setup Alacritty's env
setup_env();
let alac_scrolling = Scrolling::default();
// alac_scrolling.set_history((BACK_BUFFER_SIZE * 2) as u32);
// Then setup configured environment variables
for (key, value) in env {
std::env::set_var(key, value);
}
//TODO: Properly set the current locale,
std::env::set_var("LC_ALL", "en_US.UTF-8");
std::env::set_var("ZED_TERM", "true");
let config = Config {
pty_config: pty_config.clone(),
env,
scrolling: alac_scrolling,
scrolling_history: 10000,
..Default::default()
};
setup_env(&config);
//Spawn a task so the Alacritty EventLoop can communicate with us in a view context
//TODO: Remove with a bounded sender which can be dispatched on &self
let (events_tx, events_rx) = unbounded();
//Set up the terminal...
let mut term = Term::new(
&config,
config,
&TerminalSize::default(),
ZedListener(events_tx.clone()),
);
//Start off blinking if we need to
if let Some(TerminalBlink::On) = blink_settings {
term.set_mode(alacritty_terminal::ansi::Mode::BlinkingCursor)
term.set_private_mode(PrivateMode::Named(NamedPrivateMode::BlinkingCursor));
}
//Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
if let AlternateScroll::Off = alternate_scroll {
term.unset_mode(alacritty_terminal::ansi::Mode::AlternateScroll)
term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll));
}
let term = Arc::new(FairMutex::new(term));
//Setup the pty...
let pty = match tty::new(
&pty_config,
&pty_options,
TerminalSize::default().into(),
window.window_id().as_u64(),
) {
@ -370,13 +362,16 @@ impl TerminalBuilder {
term.clone(),
ZedListener(events_tx.clone()),
pty,
pty_config.hold,
pty_options.hold,
false,
);
//Kick things off
let pty_tx = event_loop.channel();
let _io_thread = event_loop.spawn();
let _io_thread = event_loop.spawn(); // DANGER
let url_regex = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
let word_regex = RegexSearch::new(r#"[\w.\[\]:/@\-~]+"#).unwrap();
let terminal = Terminal {
pty_tx: Notifier(pty_tx),
@ -396,6 +391,8 @@ impl TerminalBuilder {
selection_phase: SelectionPhase::Ended,
cmd_pressed: false,
hovered_word: false,
url_regex,
word_regex,
};
Ok(TerminalBuilder {
@ -514,7 +511,7 @@ impl Default for TerminalContent {
selection_text: Default::default(),
selection: Default::default(),
cursor: RenderableCursor {
shape: alacritty_terminal::ansi::CursorShape::Block,
shape: alacritty_terminal::vte::ansi::CursorShape::Block,
point: AlacPoint::new(Line(0), Column(0)),
},
cursor_char: Default::default(),
@ -550,6 +547,8 @@ pub struct Terminal {
selection_phase: SelectionPhase,
cmd_pressed: bool,
hovered_word: bool,
url_regex: RegexSearch,
word_regex: RegexSearch,
}
impl Terminal {
@ -760,7 +759,7 @@ impl Terminal {
let url_match = min_index..=max_index;
Some((url, true, url_match))
} else if let Some(word_match) = regex_match_at(term, point, &WORD_REGEX) {
} else if let Some(word_match) = regex_match_at(term, point, &mut self.word_regex) {
let maybe_url_or_path =
term.bounds_to_string(*word_match.start(), *word_match.end());
let original_match = word_match.clone();
@ -777,7 +776,7 @@ impl Terminal {
(word_match, maybe_url_or_path)
};
let is_url = match regex_match_at(term, point, &URL_REGEX) {
let is_url = match regex_match_at(term, point, &mut self.url_regex) {
Some(url_match) => {
// `]` is a valid symbol in the `file://` URL, so the regex match will include it
// consider that when ensuring that the URL match is the same as the original word
@ -1275,14 +1274,14 @@ impl Terminal {
pub fn find_matches(
&mut self,
searcher: RegexSearch,
mut searcher: RegexSearch,
cx: &mut ModelContext<Self>,
) -> Task<Vec<RangeInclusive<AlacPoint>>> {
let term = self.term.clone();
cx.background_executor().spawn(async move {
let term = term.lock();
all_search_matches(&term, &searcher).collect()
all_search_matches(&term, &mut searcher).collect()
})
}
@ -1332,7 +1331,7 @@ impl EventEmitter<Event> for Terminal {}
/// Based on alacritty/src/display/hint.rs > regex_match_at
/// Retrieve the match, if the specified point is inside the content matching the regex.
fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &RegexSearch) -> Option<Match> {
fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &mut RegexSearch) -> Option<Match> {
visible_regex_match_iter(term, regex).find(|rm| rm.contains(&point))
}
@ -1340,7 +1339,7 @@ fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &RegexSearch) -> O
/// Iterate over all visible regex matches.
pub fn visible_regex_match_iter<'a, T>(
term: &'a Term<T>,
regex: &'a RegexSearch,
regex: &'a mut RegexSearch,
) -> impl Iterator<Item = Match> + 'a {
let viewport_start = Line(-(term.grid().display_offset() as i32));
let viewport_end = viewport_start + term.bottommost_line();
@ -1362,7 +1361,7 @@ fn make_selection(range: &RangeInclusive<AlacPoint>) -> Selection {
fn all_search_matches<'a, T>(
term: &'a Term<T>,
regex: &'a RegexSearch,
regex: &'a mut RegexSearch,
) -> impl Iterator<Item = Match> + 'a {
let start = AlacPoint::new(term.grid().topmost_line(), Column(0));
let end = AlacPoint::new(term.grid().bottommost_line(), term.grid().last_column());

View File

@ -11,12 +11,11 @@ use itertools::Itertools;
use language::CursorShape;
use settings::Settings;
use terminal::{
alacritty_terminal::ansi::NamedColor,
alacritty_terminal::{
ansi::{Color as AnsiColor, Color::Named, CursorShape as AlacCursorShape},
grid::Dimensions,
index::Point as AlacPoint,
term::{cell::Flags, TermMode},
vte::ansi::{Color as AnsiColor, Color::Named, CursorShape as AlacCursorShape, NamedColor},
},
terminal_settings::TerminalSettings,
IndexedCell, Terminal, TerminalContent, TerminalSize,
@ -308,7 +307,7 @@ impl TerminalElement {
/// Converts the Alacritty cell styles to GPUI text styles and background color.
fn cell_style(
indexed: &IndexedCell,
fg: terminal::alacritty_terminal::ansi::Color,
fg: terminal::alacritty_terminal::vte::ansi::Color,
// bg: terminal::alacritty_terminal::ansi::Color,
colors: &Theme,
text_style: &TextStyle,
@ -998,11 +997,11 @@ fn to_highlighted_range_lines(
}
/// Converts a 2, 8, or 24 bit color ANSI color to the GPUI equivalent.
fn convert_color(fg: &terminal::alacritty_terminal::ansi::Color, theme: &Theme) -> Hsla {
fn convert_color(fg: &terminal::alacritty_terminal::vte::ansi::Color, theme: &Theme) -> Hsla {
let colors = theme.colors();
match fg {
// Named and theme defined colors
terminal::alacritty_terminal::ansi::Color::Named(n) => match n {
terminal::alacritty_terminal::vte::ansi::Color::Named(n) => match n {
NamedColor::Black => colors.terminal_ansi_black,
NamedColor::Red => colors.terminal_ansi_red,
NamedColor::Green => colors.terminal_ansi_green,
@ -1034,11 +1033,11 @@ fn convert_color(fg: &terminal::alacritty_terminal::ansi::Color, theme: &Theme)
NamedColor::DimForeground => colors.terminal_dim_foreground,
},
// 'True' colors
terminal::alacritty_terminal::ansi::Color::Spec(rgb) => {
terminal::alacritty_terminal::vte::ansi::Color::Spec(rgb) => {
terminal::rgba_color(rgb.r, rgb.g, rgb.b)
}
// 8 bit, indexed colors
terminal::alacritty_terminal::ansi::Color::Indexed(i) => {
terminal::alacritty_terminal::vte::ansi::Color::Indexed(i) => {
terminal::get_color_at_index(*i as usize, theme)
}
}

View File

@ -911,7 +911,7 @@ impl SearchableItem for TerminalView {
}
}
///Get's the working directory for the given workspace, respecting the user's settings.
///Gets the working directory for the given workspace, respecting the user's settings.
pub fn get_working_directory(
workspace: &Workspace,
cx: &AppContext,
@ -932,7 +932,7 @@ pub fn get_working_directory(
res.or_else(home_dir)
}
///Get's the first project's home directory, or the home directory
///Gets the first project's home directory, or the home directory
fn first_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<PathBuf> {
workspace
.worktrees(cx)

View File

@ -2634,7 +2634,7 @@ impl Default for LineEnding {
return Self::Unix;
#[cfg(not(unix))]
return Self::CRLF;
return Self::Windows;
}
}

View File

@ -236,7 +236,7 @@ pub struct ThemeColors {
#[derive(Refineable, Clone)]
pub struct ThemeStyles {
pub system: SystemColors,
/// An array of colors used for theme elements that iterrate through a series of colors.
/// An array of colors used for theme elements that iterate through a series of colors.
///
/// Example: Player colors, rainbow brackets and indent guides, etc.
pub accents: Vec<Hsla>,

View File

@ -66,7 +66,7 @@ Copy that json file into the theme family directory and tidy up the filenames as
A LICENSE file is required to import a theme family. Failing to provide a complete text license will cause it to be skipped when the import is run.
If the theme only provices a license code (e.g. MIT, Apache 2.0, etc.) then put that code into the LICENSE file.
If the theme only provides a license code (e.g. MIT, Apache 2.0, etc.) then put that code into the LICENSE file.
If no license is provided, either contact the theme creator or don't add the theme.

View File

@ -27,7 +27,7 @@ pub enum AvatarShape {
#[derive(IntoElement)]
pub struct Avatar {
image: Img,
size: Option<Pixels>,
size: Option<AbsoluteLength>,
border_color: Option<Hsla>,
indicator: Option<AnyElement>,
}
@ -82,8 +82,8 @@ impl Avatar {
}
/// Size overrides the avatar size. By default they are 1rem.
pub fn size(mut self, size: impl Into<Option<Pixels>>) -> Self {
self.size = size.into();
pub fn size<L: Into<AbsoluteLength>>(mut self, size: impl Into<Option<L>>) -> Self {
self.size = size.into().map(Into::into);
self
}
@ -105,8 +105,8 @@ impl RenderOnce for Avatar {
px(0.)
};
let image_size = self.size.unwrap_or_else(|| cx.rem_size());
let container_size = image_size + border_width * 2.;
let image_size = self.size.unwrap_or_else(|| rems(1.).into());
let container_size = image_size.to_pixels(cx.rem_size()) + border_width * 2.;
div()
.size(container_size)

View File

@ -59,7 +59,7 @@ pub async fn latest_github_release(
Err(_) => {
log::error!(
"Error deserializing Github API response text: {:?}",
"Error deserializing GitHub API response text: {:?}",
String::from_utf8_lossy(body.as_slice())
);
return Err(anyhow!("error deserializing latest release"));

View File

@ -5,7 +5,7 @@ use serde_derive::Deserialize;
use workspace::{SaveIntent, Workspace};
use crate::{
motion::{EndOfDocument, Motion},
motion::{EndOfDocument, Motion, StartOfDocument},
normal::{
move_cursor,
search::{FindCommand, ReplaceCommand},
@ -235,6 +235,8 @@ pub fn command_interceptor(mut query: &str, _: &AppContext) -> Option<CommandInt
// goto (other ranges handled under _ => )
"$" => ("$", EndOfDocument.boxed_clone()),
"%" => ("%", EndOfDocument.boxed_clone()),
"0" => ("0", StartOfDocument.boxed_clone()),
_ => {
if query.starts_with("/") || query.starts_with("?") {
@ -280,7 +282,7 @@ fn generate_positions(string: &str, query: &str) -> Vec<usize> {
return positions;
};
for (i, c) in string.chars().enumerate() {
for (i, c) in string.char_indices() {
if c == current {
positions.push(i);
if let Some(c) = chars.next() {

View File

@ -1,8 +1,8 @@
use crate::{Toast, Workspace};
use collections::HashMap;
use gpui::{
AnyView, AppContext, AsyncWindowContext, DismissEvent, Entity, EntityId, EventEmitter, Render,
Task, View, ViewContext, VisualContext, WindowContext,
AnyView, AppContext, AsyncWindowContext, DismissEvent, Entity, EntityId, EventEmitter,
PromptLevel, Render, Task, View, ViewContext, VisualContext, WindowContext,
};
use std::{any::TypeId, ops::DerefMut};
@ -299,7 +299,7 @@ pub trait NotifyTaskExt {
impl<R, E> NotifyTaskExt for Task<Result<R, E>>
where
E: std::fmt::Debug + 'static,
E: std::fmt::Debug + Sized + 'static,
R: 'static,
{
fn detach_and_notify_err(self, cx: &mut WindowContext) {
@ -307,3 +307,39 @@ where
.detach();
}
}
pub trait DetachAndPromptErr {
fn detach_and_prompt_err(
self,
msg: &str,
cx: &mut WindowContext,
f: impl FnOnce(&anyhow::Error, &mut WindowContext) -> Option<String> + 'static,
);
}
impl<R> DetachAndPromptErr for Task<anyhow::Result<R>>
where
R: 'static,
{
fn detach_and_prompt_err(
self,
msg: &str,
cx: &mut WindowContext,
f: impl FnOnce(&anyhow::Error, &mut WindowContext) -> Option<String> + 'static,
) {
let msg = msg.to_owned();
cx.spawn(|mut cx| async move {
if let Err(err) = self.await {
log::error!("{err:?}");
if let Ok(prompt) = cx.update(|cx| {
let detail = f(&err, cx)
.unwrap_or_else(|| format!("{err:?}. Please try again.", err = err));
cx.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"])
}) {
prompt.await.ok();
}
}
})
.detach();
}
}

View File

@ -870,7 +870,7 @@ impl Pane {
items: &mut dyn Iterator<Item = &Box<dyn ItemHandle>>,
all_dirty_items: usize,
cx: &AppContext,
) -> String {
) -> (String, String) {
/// Quantity of item paths displayed in prompt prior to cutoff..
const FILE_NAMES_CUTOFF_POINT: usize = 10;
let mut file_names: Vec<_> = items
@ -894,10 +894,12 @@ impl Pane {
file_names.push(format!(".. {} files not shown", not_shown_files).into());
}
}
let file_names = file_names.join("\n");
format!(
"Do you want to save changes to the following {} files?\n{file_names}",
all_dirty_items
(
format!(
"Do you want to save changes to the following {} files?",
all_dirty_items
),
file_names.join("\n"),
)
}
@ -929,11 +931,12 @@ impl Pane {
cx.spawn(|pane, mut cx| async move {
if save_intent == SaveIntent::Close && dirty_items.len() > 1 {
let answer = pane.update(&mut cx, |_, cx| {
let prompt =
let (prompt, detail) =
Self::file_names_for_prompt(&mut dirty_items.iter(), dirty_items.len(), cx);
cx.prompt(
PromptLevel::Warning,
&prompt,
Some(&detail),
&["Save all", "Discard all", "Cancel"],
)
})?;
@ -1131,6 +1134,7 @@ impl Pane {
cx.prompt(
PromptLevel::Warning,
CONFLICT_MESSAGE,
None,
&["Overwrite", "Discard", "Cancel"],
)
})?;
@ -1154,6 +1158,7 @@ impl Pane {
cx.prompt(
PromptLevel::Warning,
&prompt,
None,
&["Save", "Don't Save", "Cancel"],
)
})?;

View File

@ -14,8 +14,8 @@ mod workspace_settings;
use anyhow::{anyhow, Context as _, Result};
use call::ActiveCall;
use client::{
proto::{self, PeerId},
Client, Status, TypedEnvelope, UserStore,
proto::{self, ErrorCode, PeerId},
Client, ErrorExt, Status, TypedEnvelope, UserStore,
};
use collections::{hash_map, HashMap, HashSet};
use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle};
@ -30,8 +30,8 @@ use gpui::{
DragMoveEvent, Element, ElementContext, Entity, EntityId, EventEmitter, FocusHandle,
FocusableView, GlobalPixels, InteractiveElement, IntoElement, KeyContext, LayoutId,
ManagedView, Model, ModelContext, ParentElement, PathPromptOptions, Pixels, Point, PromptLevel,
Render, Size, Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView,
WindowBounds, WindowContext, WindowHandle, WindowOptions,
Render, SharedString, Size, Styled, Subscription, Task, View, ViewContext, VisualContext,
WeakView, WindowBounds, WindowContext, WindowHandle, WindowOptions,
};
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem};
use itertools::Itertools;
@ -1159,6 +1159,7 @@ impl Workspace {
cx.prompt(
PromptLevel::Warning,
"Do you want to leave the current call?",
None,
&["Close window and hang up", "Cancel"],
)
})?;
@ -1214,7 +1215,7 @@ impl Workspace {
// Override save mode and display "Save all files" prompt
if save_intent == SaveIntent::Close && dirty_items.len() > 1 {
let answer = workspace.update(&mut cx, |_, cx| {
let prompt = Pane::file_names_for_prompt(
let (prompt, detail) = Pane::file_names_for_prompt(
&mut dirty_items.iter().map(|(_, handle)| handle),
dirty_items.len(),
cx,
@ -1222,6 +1223,7 @@ impl Workspace {
cx.prompt(
PromptLevel::Warning,
&prompt,
Some(&detail),
&["Save all", "Discard all", "Cancel"],
)
})?;
@ -3887,13 +3889,16 @@ async fn join_channel_internal(
if should_prompt {
if let Some(workspace) = requesting_window {
let answer = workspace.update(cx, |_, cx| {
cx.prompt(
PromptLevel::Warning,
"Leaving this call will unshare your current project.\nDo you want to switch channels?",
&["Yes, Join Channel", "Cancel"],
)
})?.await;
let answer = workspace
.update(cx, |_, cx| {
cx.prompt(
PromptLevel::Warning,
"Do you want to switch channels?",
Some("Leaving this call will unshare your current project."),
&["Yes, Join Channel", "Cancel"],
)
})?
.await;
if answer == Ok(1) {
return Ok(false);
@ -3919,10 +3924,10 @@ async fn join_channel_internal(
| Status::Reconnecting
| Status::Reauthenticating => continue,
Status::Connected { .. } => break 'outer,
Status::SignedOut => return Err(anyhow!("not signed in")),
Status::UpgradeRequired => return Err(anyhow!("zed is out of date")),
Status::SignedOut => return Err(ErrorCode::SignedOut.into()),
Status::UpgradeRequired => return Err(ErrorCode::UpgradeRequired.into()),
Status::ConnectionError | Status::ConnectionLost | Status::ReconnectionError { .. } => {
return Err(anyhow!("zed is offline"))
return Err(ErrorCode::Disconnected.into())
}
}
}
@ -3995,9 +4000,27 @@ pub fn join_channel(
if let Some(active_window) = active_window {
active_window
.update(&mut cx, |_, cx| {
let detail: SharedString = match err.error_code() {
ErrorCode::SignedOut => {
"Please sign in to continue.".into()
},
ErrorCode::UpgradeRequired => {
"Your are running an unsupported version of Zed. Please update to continue.".into()
},
ErrorCode::NoSuchChannel => {
"No matching channel was found. Please check the link and try again.".into()
},
ErrorCode::Forbidden => {
"This channel is private, and you do not have access. Please ask someone to add you and try again.".into()
},
ErrorCode::Disconnected => "Please check your internet connection and try again.".into(),
ErrorCode::WrongReleaseChannel => format!("Others in the channel are using the {} release of Zed. Please switch to join this call.", err.error_tag("required").unwrap_or("other")).into(),
_ => format!("{}\n\nPlease try again.", err).into(),
};
cx.prompt(
PromptLevel::Critical,
&format!("Failed to join channel: {}", err),
"Failed to join channel",
Some(&detail),
&["Ok"],
)
})?
@ -4224,6 +4247,7 @@ pub fn restart(_: &Restart, cx: &mut AppContext) {
cx.prompt(
PromptLevel::Info,
"Are you sure you want to restart?",
None,
&["Restart", "Cancel"],
)
})

View File

@ -121,6 +121,7 @@ tree-sitter-elixir.workspace = true
tree-sitter-elm.workspace = true
tree-sitter-embedded-template.workspace = true
tree-sitter-glsl.workspace = true
tree-sitter-gleam.workspace = true
tree-sitter-go.workspace = true
tree-sitter-heex.workspace = true
tree-sitter-json.workspace = true
@ -142,6 +143,7 @@ tree-sitter-nix.workspace = true
tree-sitter-nu.workspace = true
tree-sitter-vue.workspace = true
tree-sitter-uiua.workspace = true
tree-sitter-zig.workspace = true
url = "2.2"
urlencoding = "2.1.2"

View File

@ -7,11 +7,13 @@ use settings::Settings;
use std::{borrow::Cow, str, sync::Arc};
use util::{asset_str, paths::PLUGINS_DIR};
use self::elixir::ElixirSettings;
use self::{deno::DenoSettings, elixir::ElixirSettings};
mod c;
mod css;
mod deno;
mod elixir;
mod gleam;
mod go;
mod haskell;
mod html;
@ -30,6 +32,7 @@ mod typescript;
mod uiua;
mod vue;
mod yaml;
mod zig;
// 1. Add tree-sitter-{language} parser to zed crate
// 2. Create a language directory in zed/crates/zed/src/languages and add the language to init function below
@ -51,6 +54,7 @@ pub fn init(
cx: &mut AppContext,
) {
ElixirSettings::register(cx);
DenoSettings::register(cx);
let language = |name, grammar, adapters| {
languages.register(name, load_config(name), grammar, adapters, load_queries)
@ -100,11 +104,21 @@ pub fn init(
),
}
language(
"gleam",
tree_sitter_gleam::language(),
vec![Arc::new(gleam::GleamLspAdapter)],
);
language(
"go",
tree_sitter_go::language(),
vec![Arc::new(go::GoLspAdapter)],
);
language(
"zig",
tree_sitter_zig::language(),
vec![Arc::new(zig::ZlsAdapter)],
);
language(
"heex",
tree_sitter_heex::language(),
@ -135,32 +149,59 @@ pub fn init(
vec![Arc::new(rust::RustLspAdapter)],
);
language("toml", tree_sitter_toml::language(), vec![]);
language(
"tsx",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
language(
"typescript",
tree_sitter_typescript::language_typescript(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
],
);
language(
"javascript",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
match &DenoSettings::get(None, cx).enable {
true => {
language(
"tsx",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(deno::DenoLspAdapter::new()),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
language(
"typescript",
tree_sitter_typescript::language_typescript(),
vec![Arc::new(deno::DenoLspAdapter::new())],
);
language(
"javascript",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(deno::DenoLspAdapter::new()),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
}
false => {
language(
"tsx",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
language(
"typescript",
tree_sitter_typescript::language_typescript(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
],
);
language(
"javascript",
tree_sitter_typescript::language_tsx(),
vec![
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
],
);
}
}
language("haskell", tree_sitter_haskell::language(), vec![]);
language(
"html",

View File

@ -1,6 +1,6 @@
name = "Shell Script"
path_suffixes = ["sh", "bash", "bashrc", "bash_profile", "bash_aliases", "bash_logout", "profile", "zsh", "zshrc", "zshenv", "zsh_profile", "zsh_aliases", "zsh_histfile", "zlogin", "zprofile"]
line_comment = "# "
line_comments = ["# "]
first_line_pattern = "^#!.*\\b(?:ba|z)?sh\\b"
brackets = [
{ start = "[", end = "]", close = true, newline = false },

View File

@ -15,7 +15,7 @@ pub struct CLspAdapter;
#[async_trait]
impl super::LspAdapter for CLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("clangd".into())
}

View File

@ -1,6 +1,6 @@
name = "C"
path_suffixes = ["c"]
line_comment = "// "
line_comments = ["// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -1,6 +1,6 @@
name = "C++"
path_suffixes = ["cc", "cpp", "h", "hpp", "cxx", "hxx", "inl"]
line_comment = "// "
line_comments = ["// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -33,7 +33,7 @@ impl CssLspAdapter {
#[async_trait]
impl LspAdapter for CssLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("vscode-css-language-server".into())
}
@ -91,7 +91,7 @@ impl LspAdapter for CssLspAdapter {
get_cached_server_binary(container_dir, &*self.node).await
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))

View File

@ -0,0 +1,223 @@
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use collections::HashMap;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary};
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use serde_json::json;
use settings::Settings;
use smol::{fs, fs::File};
use std::{any::Any, env::consts, ffi::OsString, path::PathBuf, sync::Arc};
use util::{fs::remove_matching, github::latest_github_release};
use util::{github::GitHubLspBinaryVersion, ResultExt};
#[derive(Clone, Serialize, Deserialize, JsonSchema)]
pub struct DenoSettings {
pub enable: bool,
}
#[derive(Clone, Serialize, Default, Deserialize, JsonSchema)]
pub struct DenoSettingsContent {
enable: Option<bool>,
}
impl Settings for DenoSettings {
const KEY: Option<&'static str> = Some("deno");
type FileContent = DenoSettingsContent;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> Result<Self>
where
Self: Sized,
{
Self::load_via_json_merge(default_value, user_values)
}
}
fn deno_server_binary_arguments() -> Vec<OsString> {
vec!["lsp".into()]
}
pub struct DenoLspAdapter {}
impl DenoLspAdapter {
pub fn new() -> Self {
DenoLspAdapter {}
}
}
#[async_trait]
impl LspAdapter for DenoLspAdapter {
fn name(&self) -> LanguageServerName {
LanguageServerName("deno-language-server".into())
}
fn short_name(&self) -> &'static str {
"deno-ts"
}
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("denoland/deno", false, delegate.http_client()).await?;
let asset_name = format!("deno-{}-apple-darwin.zip", consts::ARCH);
let asset = release
.assets
.iter()
.find(|asset| asset.name == asset_name)
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
let version = GitHubLspBinaryVersion {
name: release.name,
url: asset.browser_download_url.clone(),
};
Ok(Box::new(version) as Box<_>)
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let zip_path = container_dir.join(format!("deno_{}.zip", version.name));
let version_dir = container_dir.join(format!("deno_{}", version.name));
let binary_path = version_dir.join("deno");
if fs::metadata(&binary_path).await.is_err() {
let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true)
.await
.context("error downloading release")?;
let mut file = File::create(&zip_path).await?;
if !response.status().is_success() {
Err(anyhow!(
"download failed with status {}",
response.status().to_string()
))?;
}
futures::io::copy(response.body_mut(), &mut file).await?;
let unzip_status = smol::process::Command::new("unzip")
.current_dir(&container_dir)
.arg(&zip_path)
.arg("-d")
.arg(&version_dir)
.output()
.await?
.status;
if !unzip_status.success() {
Err(anyhow!("failed to unzip deno archive"))?;
}
remove_matching(&container_dir, |entry| entry != version_dir).await;
}
Ok(LanguageServerBinary {
path: binary_path,
arguments: deno_server_binary_arguments(),
})
}
async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir).await
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir).await
}
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
Some(vec![
CodeActionKind::QUICKFIX,
CodeActionKind::REFACTOR,
CodeActionKind::REFACTOR_EXTRACT,
CodeActionKind::SOURCE,
])
}
async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
use lsp::CompletionItemKind as Kind;
let len = item.label.len();
let grammar = language.grammar()?;
let highlight_id = match item.kind? {
Kind::CLASS | Kind::INTERFACE => grammar.highlight_id_for_name("type"),
Kind::CONSTRUCTOR => grammar.highlight_id_for_name("type"),
Kind::CONSTANT => grammar.highlight_id_for_name("constant"),
Kind::FUNCTION | Kind::METHOD => grammar.highlight_id_for_name("function"),
Kind::PROPERTY | Kind::FIELD => grammar.highlight_id_for_name("property"),
_ => None,
}?;
let text = match &item.detail {
Some(detail) => format!("{} {}", item.label, detail),
None => item.label.clone(),
};
Some(language::CodeLabel {
text,
runs: vec![(0..len, highlight_id)],
filter_range: 0..len,
})
}
fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true,
}))
}
fn language_ids(&self) -> HashMap<String, String> {
HashMap::from_iter([
("TypeScript".into(), "typescript".into()),
("JavaScript".into(), "javascript".into()),
("TSX".into(), "typescriptreact".into()),
])
}
}
async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
(|| async move {
let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
last = Some(entry?.path());
}
match last {
Some(path) if path.is_dir() => {
let binary = path.join("deno");
if fs::metadata(&binary).await.is_ok() {
return Ok(LanguageServerBinary {
path: binary,
arguments: deno_server_binary_arguments(),
});
}
}
_ => {}
}
Err(anyhow!("no cached binary"))
})()
.await
.log_err()
}

View File

@ -67,7 +67,7 @@ pub struct ElixirLspAdapter;
#[async_trait]
impl LspAdapter for ElixirLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("elixir-ls".into())
}
@ -301,7 +301,7 @@ pub struct NextLspAdapter;
#[async_trait]
impl LspAdapter for NextLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("next-ls".into())
}
@ -452,7 +452,7 @@ pub struct LocalLspAdapter {
#[async_trait]
impl LspAdapter for LocalLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("local-ls".into())
}

View File

@ -1,6 +1,6 @@
name = "Elixir"
path_suffixes = ["ex", "exs"]
line_comment = "# "
line_comments = ["# "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -1,6 +1,6 @@
name = "Elm"
path_suffixes = ["elm"]
line_comment = "-- "
line_comments = ["-- "]
block_comment = ["{- ", " -}"]
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -0,0 +1,118 @@
use std::any::Any;
use std::ffi::OsString;
use std::path::PathBuf;
use anyhow::{anyhow, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
use futures::io::BufReader;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use smol::fs;
use util::github::{latest_github_release, GitHubLspBinaryVersion};
use util::{async_maybe, ResultExt};
fn server_binary_arguments() -> Vec<OsString> {
vec!["lsp".into()]
}
pub struct GleamLspAdapter;
#[async_trait]
impl LspAdapter for GleamLspAdapter {
fn name(&self) -> LanguageServerName {
LanguageServerName("gleam".into())
}
fn short_name(&self) -> &'static str {
"gleam"
}
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
let release =
latest_github_release("gleam-lang/gleam", false, delegate.http_client()).await?;
let asset_name = format!(
"gleam-{version}-{arch}-apple-darwin.tar.gz",
version = release.name,
arch = std::env::consts::ARCH
);
let asset = release
.assets
.iter()
.find(|asset| asset.name == asset_name)
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
Ok(Box::new(GitHubLspBinaryVersion {
name: release.name,
url: asset.browser_download_url.clone(),
}))
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let binary_path = container_dir.join("gleam");
if fs::metadata(&binary_path).await.is_err() {
let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true)
.await
.map_err(|err| anyhow!("error downloading release: {}", err))?;
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
let archive = Archive::new(decompressed_bytes);
archive.unpack(container_dir).await?;
}
Ok(LanguageServerBinary {
path: binary_path,
arguments: server_binary_arguments(),
})
}
async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir).await
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir)
.await
.map(|mut binary| {
binary.arguments = vec!["--version".into()];
binary
})
}
}
async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
async_maybe!({
let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
last = Some(entry?.path());
}
anyhow::Ok(LanguageServerBinary {
path: last.ok_or_else(|| anyhow!("no cached binary"))?,
arguments: server_binary_arguments(),
})
})
.await
.log_err()
}

View File

@ -0,0 +1,10 @@
name = "Gleam"
path_suffixes = ["gleam"]
line_comments = ["// ", "/// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string", "comment"] },
]

View File

@ -0,0 +1,130 @@
; Comments
(module_comment) @comment
(statement_comment) @comment
(comment) @comment
; Constants
(constant
name: (identifier) @constant)
; Modules
(module) @module
(import alias: (identifier) @module)
(remote_type_identifier
module: (identifier) @module)
(remote_constructor_name
module: (identifier) @module)
((field_access
record: (identifier) @module
field: (label) @function)
(#is-not? local))
; Functions
(unqualified_import (identifier) @function)
(unqualified_import "type" (type_identifier) @type)
(unqualified_import (type_identifier) @constructor)
(function
name: (identifier) @function)
(external_function
name: (identifier) @function)
(function_parameter
name: (identifier) @variable.parameter)
((function_call
function: (identifier) @function)
(#is-not? local))
((binary_expression
operator: "|>"
right: (identifier) @function)
(#is-not? local))
; "Properties"
; Assumed to be intended to refer to a name for a field; something that comes
; before ":" or after "."
; e.g. record field names, tuple indices, names for named arguments, etc
(label) @property
(tuple_access
index: (integer) @property)
; Attributes
(attribute
"@" @attribute
name: (identifier) @attribute)
(attribute_value (identifier) @constant)
; Type names
(remote_type_identifier) @type
(type_identifier) @type
; Data constructors
(constructor_name) @constructor
; Literals
(string) @string
((escape_sequence) @warning
; Deprecated in v0.33.0-rc2:
(#eq? @warning "\\e"))
(escape_sequence) @string.escape
(bit_string_segment_option) @function.builtin
(integer) @number
(float) @number
; Reserved identifiers
; TODO: when tree-sitter supports `#any-of?` in the Rust bindings,
; refactor this to use `#any-of?` rather than `#match?`
((identifier) @warning
(#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$"))
; Variables
(identifier) @variable
(discard) @comment.unused
; Keywords
[
(visibility_modifier) ; "pub"
(opacity_modifier) ; "opaque"
"as"
"assert"
"case"
"const"
; DEPRECATED: 'external' was removed in v0.30.
"external"
"fn"
"if"
"import"
"let"
"panic"
"todo"
"type"
"use"
] @keyword
; Operators
(binary_expression
operator: _ @operator)
(boolean_negation "!" @operator)
(integer_negation "-" @operator)
; Punctuation
[
"("
")"
"["
"]"
"{"
"}"
"<<"
">>"
] @punctuation.bracket
[
"."
","
;; Controversial -- maybe some are operators?
":"
"#"
"="
"->"
".."
"-"
"<-"
] @punctuation.delimiter

View File

@ -0,0 +1,4 @@
(function
(visibility_modifier)? @context
"fn" @context
name: (_) @name) @item

View File

@ -1,6 +1,6 @@
name = "GLSL"
path_suffixes = ["vert", "frag", "tesc", "tese", "geom", "comp"]
line_comment = "// "
line_comments = ["// "]
block_comment = ["/* ", " */"]
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -33,7 +33,7 @@ lazy_static! {
#[async_trait]
impl super::LspAdapter for GoLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("gopls".into())
}

View File

@ -1,6 +1,6 @@
name = "Go"
path_suffixes = ["go"]
line_comment = "// "
line_comments = ["// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -33,7 +33,7 @@ impl HtmlLspAdapter {
#[async_trait]
impl LspAdapter for HtmlLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("vscode-html-language-server".into())
}
@ -91,7 +91,7 @@ impl LspAdapter for HtmlLspAdapter {
get_cached_server_binary(container_dir, &*self.node).await
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))

View File

@ -1,5 +1,5 @@
name = "HTML"
path_suffixes = ["html"]
path_suffixes = ["html", "htm", "shtml"]
autoclose_before = ">})"
block_comment = ["<!-- ", " -->"]
brackets = [

View File

@ -1,7 +1,7 @@
name = "JavaScript"
path_suffixes = ["js", "jsx", "mjs", "cjs"]
first_line_pattern = '^#!.*\bnode\b'
line_comment = "// "
line_comments = ["// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
@ -18,7 +18,7 @@ scope_opt_in_language_servers = ["tailwindcss-language-server"]
prettier_parser_name = "babel"
[overrides.element]
line_comment = { remove = true }
line_comments = { remove = true }
block_comment = ["{/* ", " */}"]
[overrides.string]

View File

@ -38,7 +38,7 @@ impl JsonLspAdapter {
#[async_trait]
impl LspAdapter for JsonLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("json-language-server".into())
}
@ -96,7 +96,7 @@ impl LspAdapter for JsonLspAdapter {
get_cached_server_binary(container_dir, &*self.node).await
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))
@ -140,7 +140,7 @@ impl LspAdapter for JsonLspAdapter {
})
}
async fn language_ids(&self) -> HashMap<String, String> {
fn language_ids(&self) -> HashMap<String, String> {
[("JSON".into(), "jsonc".into())].into_iter().collect()
}
}

View File

@ -1,6 +1,6 @@
name = "JSON"
path_suffixes = ["json"]
line_comment = "// "
line_comments = ["// "]
autoclose_before = ",]}"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

View File

@ -18,7 +18,7 @@ pub struct LuaLspAdapter;
#[async_trait]
impl super::LspAdapter for LuaLspAdapter {
async fn name(&self) -> LanguageServerName {
fn name(&self) -> LanguageServerName {
LanguageServerName("lua-language-server".into())
}

View File

@ -1,6 +1,6 @@
name = "Lua"
path_suffixes = ["lua"]
line_comment = "-- "
line_comments = ["-- "]
autoclose_before = ",]}"
brackets = [
{ start = "{", end = "}", close = true, newline = true },

Some files were not shown because too many files have changed in this diff Show More