mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Remove 2 suffix for lsp, language, fuzzy
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
c5a1950522
commit
9f99e58834
206
Cargo.lock
generated
206
Cargo.lock
generated
@ -11,7 +11,7 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"project",
|
"project",
|
||||||
"settings2",
|
"settings2",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
@ -84,7 +84,7 @@ dependencies = [
|
|||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"isahc",
|
"isahc",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"matrixmultiply",
|
"matrixmultiply",
|
||||||
@ -315,7 +315,7 @@ dependencies = [
|
|||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"isahc",
|
"isahc",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"multi_buffer",
|
"multi_buffer",
|
||||||
@ -1025,7 +1025,7 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"outline",
|
"outline",
|
||||||
"project",
|
"project",
|
||||||
"search",
|
"search",
|
||||||
@ -1152,7 +1152,7 @@ dependencies = [
|
|||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"image",
|
"image",
|
||||||
"language2",
|
"language",
|
||||||
"live_kit_client2",
|
"live_kit_client2",
|
||||||
"log",
|
"log",
|
||||||
"media",
|
"media",
|
||||||
@ -1278,7 +1278,7 @@ dependencies = [
|
|||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"image",
|
"image",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
@ -1659,13 +1659,13 @@ dependencies = [
|
|||||||
"gpui2",
|
"gpui2",
|
||||||
"hyper",
|
"hyper",
|
||||||
"indoc",
|
"indoc",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"lipsum",
|
"lipsum",
|
||||||
"live_kit_client2",
|
"live_kit_client2",
|
||||||
"live_kit_server",
|
"live_kit_server",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"nanoid",
|
"nanoid",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"notifications2",
|
"notifications2",
|
||||||
@ -1719,9 +1719,9 @@ dependencies = [
|
|||||||
"feature_flags",
|
"feature_flags",
|
||||||
"feedback",
|
"feedback",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
@ -1787,10 +1787,10 @@ dependencies = [
|
|||||||
"ctor",
|
"ctor",
|
||||||
"editor",
|
"editor",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"go_to_line",
|
"go_to_line",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"menu2",
|
"menu2",
|
||||||
"picker",
|
"picker",
|
||||||
"project",
|
"project",
|
||||||
@ -1889,9 +1889,9 @@ dependencies = [
|
|||||||
"fs",
|
"fs",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"rpc2",
|
"rpc2",
|
||||||
@ -1914,7 +1914,7 @@ dependencies = [
|
|||||||
"fs2",
|
"fs2",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"settings2",
|
"settings2",
|
||||||
"smol",
|
"smol",
|
||||||
"theme2",
|
"theme2",
|
||||||
@ -2438,9 +2438,9 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"postage",
|
"postage",
|
||||||
"project",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
@ -2603,15 +2603,15 @@ dependencies = [
|
|||||||
"db2",
|
"db2",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"git3",
|
"git3",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"multi_buffer",
|
"multi_buffer",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
@ -2830,7 +2830,7 @@ dependencies = [
|
|||||||
"gpui2",
|
"gpui2",
|
||||||
"human_bytes",
|
"human_bytes",
|
||||||
"isahc",
|
"isahc",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
@ -2860,9 +2860,9 @@ dependencies = [
|
|||||||
"ctor",
|
"ctor",
|
||||||
"editor",
|
"editor",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"menu2",
|
"menu2",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
@ -3250,7 +3250,7 @@ dependencies = [
|
|||||||
name = "fuzzy"
|
name = "fuzzy"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gpui",
|
"gpui2",
|
||||||
"util",
|
"util",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3263,14 +3263,6 @@ dependencies = [
|
|||||||
"thread_local",
|
"thread_local",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fuzzy2"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"gpui2",
|
|
||||||
"util",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "generic-array"
|
name = "generic-array"
|
||||||
version = "0.14.7"
|
version = "0.14.7"
|
||||||
@ -4191,61 +4183,6 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "language"
|
name = "language"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"async-broadcast",
|
|
||||||
"async-trait",
|
|
||||||
"client",
|
|
||||||
"clock",
|
|
||||||
"collections",
|
|
||||||
"ctor",
|
|
||||||
"env_logger",
|
|
||||||
"fs",
|
|
||||||
"futures 0.3.28",
|
|
||||||
"fuzzy",
|
|
||||||
"git",
|
|
||||||
"globset",
|
|
||||||
"gpui",
|
|
||||||
"indoc",
|
|
||||||
"lazy_static",
|
|
||||||
"log",
|
|
||||||
"lsp",
|
|
||||||
"parking_lot 0.11.2",
|
|
||||||
"postage",
|
|
||||||
"pulldown-cmark",
|
|
||||||
"rand 0.8.5",
|
|
||||||
"regex",
|
|
||||||
"rpc",
|
|
||||||
"schemars",
|
|
||||||
"serde",
|
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
|
||||||
"settings",
|
|
||||||
"similar",
|
|
||||||
"smallvec",
|
|
||||||
"smol",
|
|
||||||
"sum_tree",
|
|
||||||
"text",
|
|
||||||
"theme",
|
|
||||||
"tree-sitter",
|
|
||||||
"tree-sitter-elixir",
|
|
||||||
"tree-sitter-embedded-template",
|
|
||||||
"tree-sitter-heex",
|
|
||||||
"tree-sitter-html",
|
|
||||||
"tree-sitter-json 0.20.0",
|
|
||||||
"tree-sitter-markdown",
|
|
||||||
"tree-sitter-python",
|
|
||||||
"tree-sitter-ruby",
|
|
||||||
"tree-sitter-rust",
|
|
||||||
"tree-sitter-typescript",
|
|
||||||
"unicase",
|
|
||||||
"unindent",
|
|
||||||
"util",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "language2"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-broadcast",
|
"async-broadcast",
|
||||||
@ -4256,14 +4193,14 @@ dependencies = [
|
|||||||
"ctor",
|
"ctor",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"git3",
|
"git3",
|
||||||
"globset",
|
"globset",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"pulldown-cmark",
|
"pulldown-cmark",
|
||||||
@ -4303,9 +4240,9 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"editor",
|
"editor",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"picker",
|
"picker",
|
||||||
"project",
|
"project",
|
||||||
"settings2",
|
"settings2",
|
||||||
@ -4326,8 +4263,8 @@ dependencies = [
|
|||||||
"env_logger",
|
"env_logger",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"project",
|
"project",
|
||||||
"serde",
|
"serde",
|
||||||
"settings2",
|
"settings2",
|
||||||
@ -4599,7 +4536,7 @@ dependencies = [
|
|||||||
"ctor",
|
"ctor",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui",
|
"gpui2",
|
||||||
"log",
|
"log",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
@ -4624,29 +4561,6 @@ dependencies = [
|
|||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lsp2"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"async-pipe",
|
|
||||||
"collections",
|
|
||||||
"ctor",
|
|
||||||
"env_logger",
|
|
||||||
"futures 0.3.28",
|
|
||||||
"gpui2",
|
|
||||||
"log",
|
|
||||||
"lsp-types",
|
|
||||||
"parking_lot 0.11.2",
|
|
||||||
"postage",
|
|
||||||
"serde",
|
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
|
||||||
"smol",
|
|
||||||
"unindent",
|
|
||||||
"util",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mach"
|
name = "mach"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
@ -4954,10 +4868,10 @@ dependencies = [
|
|||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
@ -5563,9 +5477,9 @@ name = "outline"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"editor",
|
"editor",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
@ -5994,9 +5908,9 @@ dependencies = [
|
|||||||
"fs2",
|
"fs2",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"serde",
|
"serde",
|
||||||
@ -6106,17 +6020,17 @@ dependencies = [
|
|||||||
"fs2",
|
"fs2",
|
||||||
"fsevent",
|
"fsevent",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"git2",
|
"git2",
|
||||||
"git3",
|
"git3",
|
||||||
"globset",
|
"globset",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"ignore",
|
"ignore",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
@ -6154,7 +6068,7 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"menu2",
|
"menu2",
|
||||||
"postage",
|
"postage",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
@ -6180,10 +6094,10 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
@ -6531,9 +6445,9 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
@ -6730,7 +6644,7 @@ dependencies = [
|
|||||||
"collections",
|
"collections",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"pulldown-cmark",
|
"pulldown-cmark",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
@ -7356,7 +7270,7 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"postage",
|
"postage",
|
||||||
@ -7466,7 +7380,7 @@ dependencies = [
|
|||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"globset",
|
"globset",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"ndarray",
|
"ndarray",
|
||||||
@ -8280,11 +8194,11 @@ dependencies = [
|
|||||||
"clap 4.4.4",
|
"clap 4.4.4",
|
||||||
"dialoguer",
|
"dialoguer",
|
||||||
"editor",
|
"editor",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"itertools 0.11.0",
|
"itertools 0.11.0",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"picker",
|
"picker",
|
||||||
@ -8646,7 +8560,7 @@ dependencies = [
|
|||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
"mio-extras",
|
"mio-extras",
|
||||||
@ -8791,7 +8705,7 @@ dependencies = [
|
|||||||
"editor",
|
"editor",
|
||||||
"feature_flags",
|
"feature_flags",
|
||||||
"fs2",
|
"fs2",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"log",
|
"log",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
@ -9861,7 +9775,7 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"fs2",
|
"fs2",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"picker",
|
"picker",
|
||||||
"ui2",
|
"ui2",
|
||||||
@ -9890,9 +9804,9 @@ dependencies = [
|
|||||||
"gpui2",
|
"gpui2",
|
||||||
"indoc",
|
"indoc",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"nvim-rs",
|
"nvim-rs",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"project",
|
"project",
|
||||||
@ -10305,7 +10219,7 @@ dependencies = [
|
|||||||
"db2",
|
"db2",
|
||||||
"editor",
|
"editor",
|
||||||
"fs2",
|
"fs2",
|
||||||
"fuzzy2",
|
"fuzzy",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"install_cli",
|
"install_cli",
|
||||||
"log",
|
"log",
|
||||||
@ -10579,7 +10493,7 @@ dependencies = [
|
|||||||
"indoc",
|
"indoc",
|
||||||
"install_cli",
|
"install_cli",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
@ -10722,13 +10636,13 @@ dependencies = [
|
|||||||
"install_cli",
|
"install_cli",
|
||||||
"isahc",
|
"isahc",
|
||||||
"journal",
|
"journal",
|
||||||
"language2",
|
"language",
|
||||||
"language_selector",
|
"language_selector",
|
||||||
"language_tools",
|
"language_tools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp",
|
||||||
"menu2",
|
"menu2",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"notifications2",
|
"notifications2",
|
||||||
|
@ -38,7 +38,6 @@ members = [
|
|||||||
"crates/fs2",
|
"crates/fs2",
|
||||||
"crates/fsevent",
|
"crates/fsevent",
|
||||||
"crates/fuzzy",
|
"crates/fuzzy",
|
||||||
"crates/fuzzy2",
|
|
||||||
"crates/git",
|
"crates/git",
|
||||||
"crates/go_to_line",
|
"crates/go_to_line",
|
||||||
"crates/gpui",
|
"crates/gpui",
|
||||||
@ -49,13 +48,11 @@ members = [
|
|||||||
"crates/journal",
|
"crates/journal",
|
||||||
"crates/journal",
|
"crates/journal",
|
||||||
"crates/language",
|
"crates/language",
|
||||||
"crates/language2",
|
|
||||||
"crates/language_selector",
|
"crates/language_selector",
|
||||||
"crates/language_tools",
|
"crates/language_tools",
|
||||||
"crates/live_kit_client",
|
"crates/live_kit_client",
|
||||||
"crates/live_kit_server",
|
"crates/live_kit_server",
|
||||||
"crates/lsp",
|
"crates/lsp",
|
||||||
"crates/lsp2",
|
|
||||||
"crates/media",
|
"crates/media",
|
||||||
"crates/menu",
|
"crates/menu",
|
||||||
"crates/menu2",
|
"crates/menu2",
|
||||||
|
@ -11,7 +11,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
auto_update = { path = "../auto_update" }
|
auto_update = { path = "../auto_update" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
language = { path = "../language2", package = "language2" }
|
language = { path = "../language" }
|
||||||
gpui = { path = "../gpui2", package = "gpui2" }
|
gpui = { path = "../gpui2", package = "gpui2" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
settings = { path = "../settings2", package = "settings2" }
|
settings = { path = "../settings2", package = "settings2" }
|
||||||
|
@ -14,7 +14,7 @@ test-support = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
@ -15,7 +15,7 @@ collections = { path = "../collections"}
|
|||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
multi_buffer = { path = "../multi_buffer" }
|
multi_buffer = { path = "../multi_buffer" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
|
@ -13,7 +13,7 @@ collections = { path = "../collections" }
|
|||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
@ -26,7 +26,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
|||||||
log.workspace = true
|
log.workspace = true
|
||||||
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
|
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
media = { path = "../media" }
|
media = { path = "../media" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
@ -46,7 +46,7 @@ smallvec.workspace = true
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
||||||
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
||||||
|
@ -19,7 +19,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
|||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
text = { package = "text2", path = "../text2" }
|
text = { package = "text2", path = "../text2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
feature_flags = { path = "../feature_flags" }
|
feature_flags = { path = "../feature_flags" }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
|
@ -67,11 +67,11 @@ call = { package = "call2", path = "../call2", features = ["test-support"] }
|
|||||||
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
||||||
channel = { package = "channel2", path = "../channel2" }
|
channel = { package = "channel2", path = "../channel2" }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
||||||
git = { package = "git3", path = "../git3", features = ["test-support"] }
|
git = { package = "git3", path = "../git3", features = ["test-support"] }
|
||||||
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
||||||
|
|
||||||
|
@ -33,9 +33,9 @@ collections = { path = "../collections" }
|
|||||||
# drag_and_drop = { path = "../drag_and_drop" }
|
# drag_and_drop = { path = "../drag_and_drop" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
feedback = { path = "../feedback" }
|
feedback = { path = "../feedback" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
notifications = { package = "notifications2", path = "../notifications2" }
|
notifications = { package = "notifications2", path = "../notifications2" }
|
||||||
rich_text = { path = "../rich_text" }
|
rich_text = { path = "../rich_text" }
|
||||||
|
@ -11,7 +11,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
@ -27,7 +27,7 @@ serde.workspace = true
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
language = { package="language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
go_to_line = { path = "../go_to_line" }
|
go_to_line = { path = "../go_to_line" }
|
||||||
|
@ -22,10 +22,10 @@ test-support = [
|
|||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
# context_menu = { path = "../context_menu" }
|
# context_menu = { path = "../context_menu" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
node_runtime = { path = "../node_runtime"}
|
node_runtime = { path = "../node_runtime"}
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
@ -44,8 +44,8 @@ clock = { path = "../clock" }
|
|||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
fs = { path = "../fs", features = ["test-support"] }
|
fs = { path = "../fs", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
|
@ -14,7 +14,7 @@ editor = { path = "../editor" }
|
|||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
zed_actions = { path = "../zed_actions"}
|
zed_actions = { path = "../zed_actions"}
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
@ -13,8 +13,8 @@ collections = { path = "../collections" }
|
|||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
@ -33,8 +33,8 @@ postage.workspace = true
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
workspace = {path = "../workspace", features = ["test-support"] }
|
workspace = {path = "../workspace", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
||||||
|
@ -29,11 +29,11 @@ copilot = { path = "../copilot" }
|
|||||||
db = { package="db2", path = "../db2" }
|
db = { package="db2", path = "../db2" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
# context_menu = { path = "../context_menu" }
|
# context_menu = { path = "../context_menu" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
git = { package = "git3", path = "../git3" }
|
git = { package = "git3", path = "../git3" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
multi_buffer = { path = "../multi_buffer" }
|
multi_buffer = { path = "../multi_buffer" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
@ -74,8 +74,8 @@ tree-sitter-typescript = { workspace = true, optional = true }
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
copilot = { path = "../copilot", features = ["test-support"] }
|
copilot = { path = "../copilot", features = ["test-support"] }
|
||||||
text = { package="text2", path = "../text2", features = ["test-support"] }
|
text = { package="text2", path = "../text2", features = ["test-support"] }
|
||||||
language = { package="language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
|
@ -15,7 +15,7 @@ client = { package = "client2", path = "../client2" }
|
|||||||
db = { package = "db2", path = "../db2" }
|
db = { package = "db2", path = "../db2" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
|
@ -11,7 +11,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
@ -28,7 +28,7 @@ serde.workspace = true
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
||||||
|
|
||||||
|
@ -9,5 +9,5 @@ path = "src/fuzzy.rs"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
gpui = { path = "../gpui" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use gpui::BackgroundExecutor;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
cmp::{self, Ordering},
|
cmp::{self, Ordering},
|
||||||
@ -5,8 +6,6 @@ use std::{
|
|||||||
sync::{atomic::AtomicBool, Arc},
|
sync::{atomic::AtomicBool, Arc},
|
||||||
};
|
};
|
||||||
|
|
||||||
use gpui::executor;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
matcher::{Match, MatchCandidate, Matcher},
|
matcher::{Match, MatchCandidate, Matcher},
|
||||||
CharBag,
|
CharBag,
|
||||||
@ -135,7 +134,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
|
|||||||
smart_case: bool,
|
smart_case: bool,
|
||||||
max_results: usize,
|
max_results: usize,
|
||||||
cancel_flag: &AtomicBool,
|
cancel_flag: &AtomicBool,
|
||||||
background: Arc<executor::Background>,
|
executor: BackgroundExecutor,
|
||||||
) -> Vec<PathMatch> {
|
) -> Vec<PathMatch> {
|
||||||
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
|
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
|
||||||
if path_count == 0 {
|
if path_count == 0 {
|
||||||
@ -149,13 +148,13 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
|
|||||||
let query = &query;
|
let query = &query;
|
||||||
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
||||||
|
|
||||||
let num_cpus = background.num_cpus().min(path_count);
|
let num_cpus = executor.num_cpus().min(path_count);
|
||||||
let segment_size = (path_count + num_cpus - 1) / num_cpus;
|
let segment_size = (path_count + num_cpus - 1) / num_cpus;
|
||||||
let mut segment_results = (0..num_cpus)
|
let mut segment_results = (0..num_cpus)
|
||||||
.map(|_| Vec::with_capacity(max_results))
|
.map(|_| Vec::with_capacity(max_results))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
background
|
executor
|
||||||
.scoped(|scope| {
|
.scoped(|scope| {
|
||||||
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
||||||
let relative_to = relative_to.clone();
|
let relative_to = relative_to.clone();
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
cmp::{self, Ordering},
|
|
||||||
sync::{atomic::AtomicBool, Arc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use gpui::executor;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
matcher::{Match, MatchCandidate, Matcher},
|
matcher::{Match, MatchCandidate, Matcher},
|
||||||
CharBag,
|
CharBag,
|
||||||
};
|
};
|
||||||
|
use gpui::BackgroundExecutor;
|
||||||
|
use std::{
|
||||||
|
borrow::Cow,
|
||||||
|
cmp::{self, Ordering},
|
||||||
|
iter,
|
||||||
|
ops::Range,
|
||||||
|
sync::atomic::AtomicBool,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct StringMatchCandidate {
|
pub struct StringMatchCandidate {
|
||||||
@ -56,6 +56,32 @@ pub struct StringMatch {
|
|||||||
pub string: String,
|
pub string: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl StringMatch {
|
||||||
|
pub fn ranges<'a>(&'a self) -> impl 'a + Iterator<Item = Range<usize>> {
|
||||||
|
let mut positions = self.positions.iter().peekable();
|
||||||
|
iter::from_fn(move || {
|
||||||
|
while let Some(start) = positions.next().copied() {
|
||||||
|
let mut end = start + self.char_len_at_index(start);
|
||||||
|
while let Some(next_start) = positions.peek() {
|
||||||
|
if end == **next_start {
|
||||||
|
end += self.char_len_at_index(end);
|
||||||
|
positions.next();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Some(start..end);
|
||||||
|
}
|
||||||
|
None
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn char_len_at_index(&self, ix: usize) -> usize {
|
||||||
|
self.string[ix..].chars().next().unwrap().len_utf8()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PartialEq for StringMatch {
|
impl PartialEq for StringMatch {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.cmp(other).is_eq()
|
self.cmp(other).is_eq()
|
||||||
@ -85,7 +111,7 @@ pub async fn match_strings(
|
|||||||
smart_case: bool,
|
smart_case: bool,
|
||||||
max_results: usize,
|
max_results: usize,
|
||||||
cancel_flag: &AtomicBool,
|
cancel_flag: &AtomicBool,
|
||||||
background: Arc<executor::Background>,
|
executor: BackgroundExecutor,
|
||||||
) -> Vec<StringMatch> {
|
) -> Vec<StringMatch> {
|
||||||
if candidates.is_empty() || max_results == 0 {
|
if candidates.is_empty() || max_results == 0 {
|
||||||
return Default::default();
|
return Default::default();
|
||||||
@ -110,13 +136,13 @@ pub async fn match_strings(
|
|||||||
let query = &query;
|
let query = &query;
|
||||||
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
||||||
|
|
||||||
let num_cpus = background.num_cpus().min(candidates.len());
|
let num_cpus = executor.num_cpus().min(candidates.len());
|
||||||
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
|
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
|
||||||
let mut segment_results = (0..num_cpus)
|
let mut segment_results = (0..num_cpus)
|
||||||
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
|
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
background
|
executor
|
||||||
.scoped(|scope| {
|
.scoped(|scope| {
|
||||||
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
||||||
let cancel_flag = &cancel_flag;
|
let cancel_flag = &cancel_flag;
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "fuzzy2"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/fuzzy2.rs"
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
|
||||||
util = { path = "../util" }
|
|
@ -1,63 +0,0 @@
|
|||||||
use std::iter::FromIterator;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
|
||||||
pub struct CharBag(u64);
|
|
||||||
|
|
||||||
impl CharBag {
|
|
||||||
pub fn is_superset(self, other: CharBag) -> bool {
|
|
||||||
self.0 & other.0 == other.0
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert(&mut self, c: char) {
|
|
||||||
let c = c.to_ascii_lowercase();
|
|
||||||
if ('a'..='z').contains(&c) {
|
|
||||||
let mut count = self.0;
|
|
||||||
let idx = c as u8 - b'a';
|
|
||||||
count >>= idx * 2;
|
|
||||||
count = ((count << 1) | 1) & 3;
|
|
||||||
count <<= idx * 2;
|
|
||||||
self.0 |= count;
|
|
||||||
} else if ('0'..='9').contains(&c) {
|
|
||||||
let idx = c as u8 - b'0';
|
|
||||||
self.0 |= 1 << (idx + 52);
|
|
||||||
} else if c == '-' {
|
|
||||||
self.0 |= 1 << 62;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Extend<char> for CharBag {
|
|
||||||
fn extend<T: IntoIterator<Item = char>>(&mut self, iter: T) {
|
|
||||||
for c in iter {
|
|
||||||
self.insert(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<char> for CharBag {
|
|
||||||
fn from_iter<T: IntoIterator<Item = char>>(iter: T) -> Self {
|
|
||||||
let mut result = Self::default();
|
|
||||||
result.extend(iter);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for CharBag {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
let mut bag = Self(0);
|
|
||||||
for c in s.chars() {
|
|
||||||
bag.insert(c);
|
|
||||||
}
|
|
||||||
bag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&[char]> for CharBag {
|
|
||||||
fn from(chars: &[char]) -> Self {
|
|
||||||
let mut bag = Self(0);
|
|
||||||
for c in chars {
|
|
||||||
bag.insert(*c);
|
|
||||||
}
|
|
||||||
bag
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +0,0 @@
|
|||||||
mod char_bag;
|
|
||||||
mod matcher;
|
|
||||||
mod paths;
|
|
||||||
mod strings;
|
|
||||||
|
|
||||||
pub use char_bag::CharBag;
|
|
||||||
pub use paths::{
|
|
||||||
match_fixed_path_set, match_path_sets, PathMatch, PathMatchCandidate, PathMatchCandidateSet,
|
|
||||||
};
|
|
||||||
pub use strings::{match_strings, StringMatch, StringMatchCandidate};
|
|
@ -1,464 +0,0 @@
|
|||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
sync::atomic::{self, AtomicBool},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::CharBag;
|
|
||||||
|
|
||||||
const BASE_DISTANCE_PENALTY: f64 = 0.6;
|
|
||||||
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
|
|
||||||
const MIN_DISTANCE_PENALTY: f64 = 0.2;
|
|
||||||
|
|
||||||
pub struct Matcher<'a> {
|
|
||||||
query: &'a [char],
|
|
||||||
lowercase_query: &'a [char],
|
|
||||||
query_char_bag: CharBag,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
min_score: f64,
|
|
||||||
match_positions: Vec<usize>,
|
|
||||||
last_positions: Vec<usize>,
|
|
||||||
score_matrix: Vec<Option<f64>>,
|
|
||||||
best_position_matrix: Vec<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Match: Ord {
|
|
||||||
fn score(&self) -> f64;
|
|
||||||
fn set_positions(&mut self, positions: Vec<usize>);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait MatchCandidate {
|
|
||||||
fn has_chars(&self, bag: CharBag) -> bool;
|
|
||||||
fn to_string(&self) -> Cow<'_, str>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Matcher<'a> {
|
|
||||||
pub fn new(
|
|
||||||
query: &'a [char],
|
|
||||||
lowercase_query: &'a [char],
|
|
||||||
query_char_bag: CharBag,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
query,
|
|
||||||
lowercase_query,
|
|
||||||
query_char_bag,
|
|
||||||
min_score: 0.0,
|
|
||||||
last_positions: vec![0; query.len()],
|
|
||||||
match_positions: vec![0; query.len()],
|
|
||||||
score_matrix: Vec::new(),
|
|
||||||
best_position_matrix: Vec::new(),
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn match_candidates<C: MatchCandidate, R, F>(
|
|
||||||
&mut self,
|
|
||||||
prefix: &[char],
|
|
||||||
lowercase_prefix: &[char],
|
|
||||||
candidates: impl Iterator<Item = C>,
|
|
||||||
results: &mut Vec<R>,
|
|
||||||
cancel_flag: &AtomicBool,
|
|
||||||
build_match: F,
|
|
||||||
) where
|
|
||||||
R: Match,
|
|
||||||
F: Fn(&C, f64) -> R,
|
|
||||||
{
|
|
||||||
let mut candidate_chars = Vec::new();
|
|
||||||
let mut lowercase_candidate_chars = Vec::new();
|
|
||||||
|
|
||||||
for candidate in candidates {
|
|
||||||
if !candidate.has_chars(self.query_char_bag) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if cancel_flag.load(atomic::Ordering::Relaxed) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
candidate_chars.clear();
|
|
||||||
lowercase_candidate_chars.clear();
|
|
||||||
for c in candidate.to_string().chars() {
|
|
||||||
candidate_chars.push(c);
|
|
||||||
lowercase_candidate_chars.push(c.to_ascii_lowercase());
|
|
||||||
}
|
|
||||||
|
|
||||||
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
|
|
||||||
self.score_matrix.clear();
|
|
||||||
self.score_matrix.resize(matrix_len, None);
|
|
||||||
self.best_position_matrix.clear();
|
|
||||||
self.best_position_matrix.resize(matrix_len, 0);
|
|
||||||
|
|
||||||
let score = self.score_match(
|
|
||||||
&candidate_chars,
|
|
||||||
&lowercase_candidate_chars,
|
|
||||||
prefix,
|
|
||||||
lowercase_prefix,
|
|
||||||
);
|
|
||||||
|
|
||||||
if score > 0.0 {
|
|
||||||
let mut mat = build_match(&candidate, score);
|
|
||||||
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
|
|
||||||
if results.len() < self.max_results {
|
|
||||||
mat.set_positions(self.match_positions.clone());
|
|
||||||
results.insert(i, mat);
|
|
||||||
} else if i < results.len() {
|
|
||||||
results.pop();
|
|
||||||
mat.set_positions(self.match_positions.clone());
|
|
||||||
results.insert(i, mat);
|
|
||||||
}
|
|
||||||
if results.len() == self.max_results {
|
|
||||||
self.min_score = results.last().unwrap().score();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_last_positions(
|
|
||||||
&mut self,
|
|
||||||
lowercase_prefix: &[char],
|
|
||||||
lowercase_candidate: &[char],
|
|
||||||
) -> bool {
|
|
||||||
let mut lowercase_prefix = lowercase_prefix.iter();
|
|
||||||
let mut lowercase_candidate = lowercase_candidate.iter();
|
|
||||||
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
|
|
||||||
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
|
|
||||||
self.last_positions[i] = j + lowercase_prefix.len();
|
|
||||||
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
|
|
||||||
self.last_positions[i] = j;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn score_match(
|
|
||||||
&mut self,
|
|
||||||
path: &[char],
|
|
||||||
path_cased: &[char],
|
|
||||||
prefix: &[char],
|
|
||||||
lowercase_prefix: &[char],
|
|
||||||
) -> f64 {
|
|
||||||
let score = self.recursive_score_match(
|
|
||||||
path,
|
|
||||||
path_cased,
|
|
||||||
prefix,
|
|
||||||
lowercase_prefix,
|
|
||||||
0,
|
|
||||||
0,
|
|
||||||
self.query.len() as f64,
|
|
||||||
) * self.query.len() as f64;
|
|
||||||
|
|
||||||
if score <= 0.0 {
|
|
||||||
return 0.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_len = prefix.len() + path.len();
|
|
||||||
let mut cur_start = 0;
|
|
||||||
let mut byte_ix = 0;
|
|
||||||
let mut char_ix = 0;
|
|
||||||
for i in 0..self.query.len() {
|
|
||||||
let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
|
|
||||||
while char_ix < match_char_ix {
|
|
||||||
let ch = prefix
|
|
||||||
.get(char_ix)
|
|
||||||
.or_else(|| path.get(char_ix - prefix.len()))
|
|
||||||
.unwrap();
|
|
||||||
byte_ix += ch.len_utf8();
|
|
||||||
char_ix += 1;
|
|
||||||
}
|
|
||||||
cur_start = match_char_ix + 1;
|
|
||||||
self.match_positions[i] = byte_ix;
|
|
||||||
}
|
|
||||||
|
|
||||||
score
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn recursive_score_match(
|
|
||||||
&mut self,
|
|
||||||
path: &[char],
|
|
||||||
path_cased: &[char],
|
|
||||||
prefix: &[char],
|
|
||||||
lowercase_prefix: &[char],
|
|
||||||
query_idx: usize,
|
|
||||||
path_idx: usize,
|
|
||||||
cur_score: f64,
|
|
||||||
) -> f64 {
|
|
||||||
if query_idx == self.query.len() {
|
|
||||||
return 1.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_len = prefix.len() + path.len();
|
|
||||||
|
|
||||||
if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
|
|
||||||
return memoized;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut score = 0.0;
|
|
||||||
let mut best_position = 0;
|
|
||||||
|
|
||||||
let query_char = self.lowercase_query[query_idx];
|
|
||||||
let limit = self.last_positions[query_idx];
|
|
||||||
|
|
||||||
let mut last_slash = 0;
|
|
||||||
for j in path_idx..=limit {
|
|
||||||
let path_char = if j < prefix.len() {
|
|
||||||
lowercase_prefix[j]
|
|
||||||
} else {
|
|
||||||
path_cased[j - prefix.len()]
|
|
||||||
};
|
|
||||||
let is_path_sep = path_char == '/' || path_char == '\\';
|
|
||||||
|
|
||||||
if query_idx == 0 && is_path_sep {
|
|
||||||
last_slash = j;
|
|
||||||
}
|
|
||||||
|
|
||||||
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
|
|
||||||
let curr = if j < prefix.len() {
|
|
||||||
prefix[j]
|
|
||||||
} else {
|
|
||||||
path[j - prefix.len()]
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut char_score = 1.0;
|
|
||||||
if j > path_idx {
|
|
||||||
let last = if j - 1 < prefix.len() {
|
|
||||||
prefix[j - 1]
|
|
||||||
} else {
|
|
||||||
path[j - 1 - prefix.len()]
|
|
||||||
};
|
|
||||||
|
|
||||||
if last == '/' {
|
|
||||||
char_score = 0.9;
|
|
||||||
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|
|
||||||
|| (last.is_lowercase() && curr.is_uppercase())
|
|
||||||
{
|
|
||||||
char_score = 0.8;
|
|
||||||
} else if last == '.' {
|
|
||||||
char_score = 0.7;
|
|
||||||
} else if query_idx == 0 {
|
|
||||||
char_score = BASE_DISTANCE_PENALTY;
|
|
||||||
} else {
|
|
||||||
char_score = MIN_DISTANCE_PENALTY.max(
|
|
||||||
BASE_DISTANCE_PENALTY
|
|
||||||
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply a severe penalty if the case doesn't match.
|
|
||||||
// This will make the exact matches have higher score than the case-insensitive and the
|
|
||||||
// path insensitive matches.
|
|
||||||
if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
|
|
||||||
char_score *= 0.001;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut multiplier = char_score;
|
|
||||||
|
|
||||||
// Scale the score based on how deep within the path we found the match.
|
|
||||||
if query_idx == 0 {
|
|
||||||
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut next_score = 1.0;
|
|
||||||
if self.min_score > 0.0 {
|
|
||||||
next_score = cur_score * multiplier;
|
|
||||||
// Scores only decrease. If we can't pass the previous best, bail
|
|
||||||
if next_score < self.min_score {
|
|
||||||
// Ensure that score is non-zero so we use it in the memo table.
|
|
||||||
if score == 0.0 {
|
|
||||||
score = 1e-18;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_score = self.recursive_score_match(
|
|
||||||
path,
|
|
||||||
path_cased,
|
|
||||||
prefix,
|
|
||||||
lowercase_prefix,
|
|
||||||
query_idx + 1,
|
|
||||||
j + 1,
|
|
||||||
next_score,
|
|
||||||
) * multiplier;
|
|
||||||
|
|
||||||
if new_score > score {
|
|
||||||
score = new_score;
|
|
||||||
best_position = j;
|
|
||||||
// Optimization: can't score better than 1.
|
|
||||||
if new_score == 1.0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if best_position != 0 {
|
|
||||||
self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.score_matrix[query_idx * path_len + path_idx] = Some(score);
|
|
||||||
score
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::{PathMatch, PathMatchCandidate};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use std::{
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_last_positions() {
|
|
||||||
let mut query: &[char] = &['d', 'c'];
|
|
||||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
|
||||||
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
|
|
||||||
assert!(!result);
|
|
||||||
|
|
||||||
query = &['c', 'd'];
|
|
||||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
|
||||||
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
|
|
||||||
assert!(result);
|
|
||||||
assert_eq!(matcher.last_positions, vec![2, 4]);
|
|
||||||
|
|
||||||
query = &['z', '/', 'z', 'f'];
|
|
||||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
|
||||||
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
|
|
||||||
assert!(result);
|
|
||||||
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_match_path_entries() {
|
|
||||||
let paths = vec![
|
|
||||||
"",
|
|
||||||
"a",
|
|
||||||
"ab",
|
|
||||||
"abC",
|
|
||||||
"abcd",
|
|
||||||
"alphabravocharlie",
|
|
||||||
"AlphaBravoCharlie",
|
|
||||||
"thisisatestdir",
|
|
||||||
"/////ThisIsATestDir",
|
|
||||||
"/this/is/a/test/dir",
|
|
||||||
"/test/tiatd",
|
|
||||||
];
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
match_single_path_query("abc", false, &paths),
|
|
||||||
vec![
|
|
||||||
("abC", vec![0, 1, 2]),
|
|
||||||
("abcd", vec![0, 1, 2]),
|
|
||||||
("AlphaBravoCharlie", vec![0, 5, 10]),
|
|
||||||
("alphabravocharlie", vec![4, 5, 10]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
match_single_path_query("t/i/a/t/d", false, &paths),
|
|
||||||
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
match_single_path_query("tiatd", false, &paths),
|
|
||||||
vec![
|
|
||||||
("/test/tiatd", vec![6, 7, 8, 9, 10]),
|
|
||||||
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
|
|
||||||
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
|
|
||||||
("thisisatestdir", vec![0, 2, 6, 7, 11]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_match_multibyte_path_entries() {
|
|
||||||
let paths = vec!["aαbβ/cγdδ", "αβγδ/bcde", "c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", "/d/🆒/h"];
|
|
||||||
assert_eq!("1️⃣".len(), 7);
|
|
||||||
assert_eq!(
|
|
||||||
match_single_path_query("bcd", false, &paths),
|
|
||||||
vec![
|
|
||||||
("αβγδ/bcde", vec![9, 10, 11]),
|
|
||||||
("aαbβ/cγdδ", vec![3, 7, 10]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
match_single_path_query("cde", false, &paths),
|
|
||||||
vec![
|
|
||||||
("αβγδ/bcde", vec![10, 11, 12]),
|
|
||||||
("c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", vec![0, 23, 46]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn match_single_path_query<'a>(
|
|
||||||
query: &str,
|
|
||||||
smart_case: bool,
|
|
||||||
paths: &[&'a str],
|
|
||||||
) -> Vec<(&'a str, Vec<usize>)> {
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
let query_chars = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let path_arcs: Vec<Arc<Path>> = paths
|
|
||||||
.iter()
|
|
||||||
.map(|path| Arc::from(PathBuf::from(path)))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let mut path_entries = Vec::new();
|
|
||||||
for (i, path) in paths.iter().enumerate() {
|
|
||||||
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let char_bag = CharBag::from(lowercase_path.as_slice());
|
|
||||||
path_entries.push(PathMatchCandidate {
|
|
||||||
char_bag,
|
|
||||||
path: &path_arcs[i],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
|
|
||||||
|
|
||||||
let cancel_flag = AtomicBool::new(false);
|
|
||||||
let mut results = Vec::new();
|
|
||||||
|
|
||||||
matcher.match_candidates(
|
|
||||||
&[],
|
|
||||||
&[],
|
|
||||||
path_entries.into_iter(),
|
|
||||||
&mut results,
|
|
||||||
&cancel_flag,
|
|
||||||
|candidate, score| PathMatch {
|
|
||||||
score,
|
|
||||||
worktree_id: 0,
|
|
||||||
positions: Vec::new(),
|
|
||||||
path: Arc::from(candidate.path),
|
|
||||||
path_prefix: "".into(),
|
|
||||||
distance_to_relative_ancestor: usize::MAX,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
results
|
|
||||||
.into_iter()
|
|
||||||
.map(|result| {
|
|
||||||
(
|
|
||||||
paths
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.find(|p| result.path.as_ref() == Path::new(p))
|
|
||||||
.unwrap(),
|
|
||||||
result.positions,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,257 +0,0 @@
|
|||||||
use gpui::BackgroundExecutor;
|
|
||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
cmp::{self, Ordering},
|
|
||||||
path::Path,
|
|
||||||
sync::{atomic::AtomicBool, Arc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
matcher::{Match, MatchCandidate, Matcher},
|
|
||||||
CharBag,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathMatchCandidate<'a> {
|
|
||||||
pub path: &'a Path,
|
|
||||||
pub char_bag: CharBag,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathMatch {
|
|
||||||
pub score: f64,
|
|
||||||
pub positions: Vec<usize>,
|
|
||||||
pub worktree_id: usize,
|
|
||||||
pub path: Arc<Path>,
|
|
||||||
pub path_prefix: Arc<str>,
|
|
||||||
/// Number of steps removed from a shared parent with the relative path
|
|
||||||
/// Used to order closer paths first in the search list
|
|
||||||
pub distance_to_relative_ancestor: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait PathMatchCandidateSet<'a>: Send + Sync {
|
|
||||||
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
|
|
||||||
fn id(&self) -> usize;
|
|
||||||
fn len(&self) -> usize;
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.len() == 0
|
|
||||||
}
|
|
||||||
fn prefix(&self) -> Arc<str>;
|
|
||||||
fn candidates(&'a self, start: usize) -> Self::Candidates;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Match for PathMatch {
|
|
||||||
fn score(&self) -> f64 {
|
|
||||||
self.score
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_positions(&mut self, positions: Vec<usize>) {
|
|
||||||
self.positions = positions;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> MatchCandidate for PathMatchCandidate<'a> {
|
|
||||||
fn has_chars(&self, bag: CharBag) -> bool {
|
|
||||||
self.char_bag.is_superset(bag)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_string(&self) -> Cow<'a, str> {
|
|
||||||
self.path.to_string_lossy()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for PathMatch {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.cmp(other).is_eq()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for PathMatch {}
|
|
||||||
|
|
||||||
impl PartialOrd for PathMatch {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for PathMatch {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.score
|
|
||||||
.partial_cmp(&other.score)
|
|
||||||
.unwrap_or(Ordering::Equal)
|
|
||||||
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
|
|
||||||
.then_with(|| {
|
|
||||||
other
|
|
||||||
.distance_to_relative_ancestor
|
|
||||||
.cmp(&self.distance_to_relative_ancestor)
|
|
||||||
})
|
|
||||||
.then_with(|| self.path.cmp(&other.path))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn match_fixed_path_set(
|
|
||||||
candidates: Vec<PathMatchCandidate>,
|
|
||||||
worktree_id: usize,
|
|
||||||
query: &str,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
) -> Vec<PathMatch> {
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let mut matcher = Matcher::new(
|
|
||||||
&query,
|
|
||||||
&lowercase_query,
|
|
||||||
query_char_bag,
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
|
||||||
matcher.match_candidates(
|
|
||||||
&[],
|
|
||||||
&[],
|
|
||||||
candidates.into_iter(),
|
|
||||||
&mut results,
|
|
||||||
&AtomicBool::new(false),
|
|
||||||
|candidate, score| PathMatch {
|
|
||||||
score,
|
|
||||||
worktree_id,
|
|
||||||
positions: Vec::new(),
|
|
||||||
path: Arc::from(candidate.path),
|
|
||||||
path_prefix: Arc::from(""),
|
|
||||||
distance_to_relative_ancestor: usize::MAX,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
results
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
|
|
||||||
candidate_sets: &'a [Set],
|
|
||||||
query: &str,
|
|
||||||
relative_to: Option<Arc<Path>>,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
cancel_flag: &AtomicBool,
|
|
||||||
executor: BackgroundExecutor,
|
|
||||||
) -> Vec<PathMatch> {
|
|
||||||
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
|
|
||||||
if path_count == 0 {
|
|
||||||
return Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let lowercase_query = &lowercase_query;
|
|
||||||
let query = &query;
|
|
||||||
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let num_cpus = executor.num_cpus().min(path_count);
|
|
||||||
let segment_size = (path_count + num_cpus - 1) / num_cpus;
|
|
||||||
let mut segment_results = (0..num_cpus)
|
|
||||||
.map(|_| Vec::with_capacity(max_results))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
executor
|
|
||||||
.scoped(|scope| {
|
|
||||||
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
|
||||||
let relative_to = relative_to.clone();
|
|
||||||
scope.spawn(async move {
|
|
||||||
let segment_start = segment_idx * segment_size;
|
|
||||||
let segment_end = segment_start + segment_size;
|
|
||||||
let mut matcher = Matcher::new(
|
|
||||||
query,
|
|
||||||
lowercase_query,
|
|
||||||
query_char_bag,
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut tree_start = 0;
|
|
||||||
for candidate_set in candidate_sets {
|
|
||||||
let tree_end = tree_start + candidate_set.len();
|
|
||||||
|
|
||||||
if tree_start < segment_end && segment_start < tree_end {
|
|
||||||
let start = cmp::max(tree_start, segment_start) - tree_start;
|
|
||||||
let end = cmp::min(tree_end, segment_end) - tree_start;
|
|
||||||
let candidates = candidate_set.candidates(start).take(end - start);
|
|
||||||
|
|
||||||
let worktree_id = candidate_set.id();
|
|
||||||
let prefix = candidate_set.prefix().chars().collect::<Vec<_>>();
|
|
||||||
let lowercase_prefix = prefix
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.to_ascii_lowercase())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
matcher.match_candidates(
|
|
||||||
&prefix,
|
|
||||||
&lowercase_prefix,
|
|
||||||
candidates,
|
|
||||||
results,
|
|
||||||
cancel_flag,
|
|
||||||
|candidate, score| PathMatch {
|
|
||||||
score,
|
|
||||||
worktree_id,
|
|
||||||
positions: Vec::new(),
|
|
||||||
path: Arc::from(candidate.path),
|
|
||||||
path_prefix: candidate_set.prefix(),
|
|
||||||
distance_to_relative_ancestor: relative_to.as_ref().map_or(
|
|
||||||
usize::MAX,
|
|
||||||
|relative_to| {
|
|
||||||
distance_between_paths(
|
|
||||||
candidate.path.as_ref(),
|
|
||||||
relative_to.as_ref(),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if tree_end >= segment_end {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
tree_start = tree_end;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
|
||||||
for segment_result in segment_results {
|
|
||||||
if results.is_empty() {
|
|
||||||
results = segment_result;
|
|
||||||
} else {
|
|
||||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
results
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Compute the distance from a given path to some other path
|
|
||||||
/// If there is no shared path, returns usize::MAX
|
|
||||||
fn distance_between_paths(path: &Path, relative_to: &Path) -> usize {
|
|
||||||
let mut path_components = path.components();
|
|
||||||
let mut relative_components = relative_to.components();
|
|
||||||
|
|
||||||
while path_components
|
|
||||||
.next()
|
|
||||||
.zip(relative_components.next())
|
|
||||||
.map(|(path_component, relative_component)| path_component == relative_component)
|
|
||||||
.unwrap_or_default()
|
|
||||||
{}
|
|
||||||
path_components.count() + relative_components.count() + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use super::distance_between_paths;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_distance_between_paths_empty() {
|
|
||||||
distance_between_paths(Path::new(""), Path::new(""));
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,187 +0,0 @@
|
|||||||
use crate::{
|
|
||||||
matcher::{Match, MatchCandidate, Matcher},
|
|
||||||
CharBag,
|
|
||||||
};
|
|
||||||
use gpui::BackgroundExecutor;
|
|
||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
cmp::{self, Ordering},
|
|
||||||
iter,
|
|
||||||
ops::Range,
|
|
||||||
sync::atomic::AtomicBool,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct StringMatchCandidate {
|
|
||||||
pub id: usize,
|
|
||||||
pub string: String,
|
|
||||||
pub char_bag: CharBag,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Match for StringMatch {
|
|
||||||
fn score(&self) -> f64 {
|
|
||||||
self.score
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_positions(&mut self, positions: Vec<usize>) {
|
|
||||||
self.positions = positions;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StringMatchCandidate {
|
|
||||||
pub fn new(id: usize, string: String) -> Self {
|
|
||||||
Self {
|
|
||||||
id,
|
|
||||||
char_bag: CharBag::from(string.as_str()),
|
|
||||||
string,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> MatchCandidate for &'a StringMatchCandidate {
|
|
||||||
fn has_chars(&self, bag: CharBag) -> bool {
|
|
||||||
self.char_bag.is_superset(bag)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_string(&self) -> Cow<'a, str> {
|
|
||||||
self.string.as_str().into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct StringMatch {
|
|
||||||
pub candidate_id: usize,
|
|
||||||
pub score: f64,
|
|
||||||
pub positions: Vec<usize>,
|
|
||||||
pub string: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StringMatch {
|
|
||||||
pub fn ranges<'a>(&'a self) -> impl 'a + Iterator<Item = Range<usize>> {
|
|
||||||
let mut positions = self.positions.iter().peekable();
|
|
||||||
iter::from_fn(move || {
|
|
||||||
while let Some(start) = positions.next().copied() {
|
|
||||||
let mut end = start + self.char_len_at_index(start);
|
|
||||||
while let Some(next_start) = positions.peek() {
|
|
||||||
if end == **next_start {
|
|
||||||
end += self.char_len_at_index(end);
|
|
||||||
positions.next();
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Some(start..end);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn char_len_at_index(&self, ix: usize) -> usize {
|
|
||||||
self.string[ix..].chars().next().unwrap().len_utf8()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for StringMatch {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.cmp(other).is_eq()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for StringMatch {}
|
|
||||||
|
|
||||||
impl PartialOrd for StringMatch {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for StringMatch {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.score
|
|
||||||
.partial_cmp(&other.score)
|
|
||||||
.unwrap_or(Ordering::Equal)
|
|
||||||
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn match_strings(
|
|
||||||
candidates: &[StringMatchCandidate],
|
|
||||||
query: &str,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
cancel_flag: &AtomicBool,
|
|
||||||
executor: BackgroundExecutor,
|
|
||||||
) -> Vec<StringMatch> {
|
|
||||||
if candidates.is_empty() || max_results == 0 {
|
|
||||||
return Default::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
if query.is_empty() {
|
|
||||||
return candidates
|
|
||||||
.iter()
|
|
||||||
.map(|candidate| StringMatch {
|
|
||||||
candidate_id: candidate.id,
|
|
||||||
score: 0.,
|
|
||||||
positions: Default::default(),
|
|
||||||
string: candidate.string.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let lowercase_query = &lowercase_query;
|
|
||||||
let query = &query;
|
|
||||||
let query_char_bag = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let num_cpus = executor.num_cpus().min(candidates.len());
|
|
||||||
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
|
|
||||||
let mut segment_results = (0..num_cpus)
|
|
||||||
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
executor
|
|
||||||
.scoped(|scope| {
|
|
||||||
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
|
||||||
let cancel_flag = &cancel_flag;
|
|
||||||
scope.spawn(async move {
|
|
||||||
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
|
|
||||||
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
|
|
||||||
let mut matcher = Matcher::new(
|
|
||||||
query,
|
|
||||||
lowercase_query,
|
|
||||||
query_char_bag,
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
);
|
|
||||||
|
|
||||||
matcher.match_candidates(
|
|
||||||
&[],
|
|
||||||
&[],
|
|
||||||
candidates[segment_start..segment_end].iter(),
|
|
||||||
results,
|
|
||||||
cancel_flag,
|
|
||||||
|candidate, score| StringMatch {
|
|
||||||
candidate_id: candidate.id,
|
|
||||||
score,
|
|
||||||
positions: Vec::new(),
|
|
||||||
string: candidate.string.to_string(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
|
||||||
for segment_result in segment_results {
|
|
||||||
if results.is_empty() {
|
|
||||||
results = segment_result;
|
|
||||||
} else {
|
|
||||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
results
|
|
||||||
}
|
|
@ -24,16 +24,15 @@ test-support = [
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
clock = { path = "../clock" }
|
clock = { path = "../clock" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
fuzzy = { path = "../fuzzy" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
fs = { path = "../fs" }
|
git = { package = "git3", path = "../git3" }
|
||||||
git = { path = "../git" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
gpui = { path = "../gpui" }
|
|
||||||
lsp = { path = "../lsp" }
|
lsp = { path = "../lsp" }
|
||||||
rpc = { path = "../rpc" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
settings = { path = "../settings" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
text = { path = "../text" }
|
text = { package = "text2", path = "../text2" }
|
||||||
theme = { path = "../theme" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
@ -45,7 +44,6 @@ lazy_static.workspace = true
|
|||||||
log.workspace = true
|
log.workspace = true
|
||||||
parking_lot.workspace = true
|
parking_lot.workspace = true
|
||||||
postage.workspace = true
|
postage.workspace = true
|
||||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
schemars.workspace = true
|
schemars.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
@ -60,14 +58,15 @@ unicase = "2.6"
|
|||||||
rand = { workspace = true, optional = true }
|
rand = { workspace = true, optional = true }
|
||||||
tree-sitter-rust = { workspace = true, optional = true }
|
tree-sitter-rust = { workspace = true, optional = true }
|
||||||
tree-sitter-typescript = { workspace = true, optional = true }
|
tree-sitter-typescript = { workspace = true, optional = true }
|
||||||
|
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
client = { path = "../client", features = ["test-support"] }
|
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
lsp = { path = "../lsp", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
text = { path = "../text", features = ["test-support"] }
|
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
||||||
settings = { path = "../settings", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
|
@ -18,7 +18,8 @@ use crate::{
|
|||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
pub use clock::ReplicaId;
|
pub use clock::ReplicaId;
|
||||||
use futures::channel::oneshot;
|
use futures::channel::oneshot;
|
||||||
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
|
use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
use lsp::LanguageServerId;
|
use lsp::LanguageServerId;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use similar::{ChangeTag, TextDiff};
|
use similar::{ChangeTag, TextDiff};
|
||||||
@ -52,14 +53,23 @@ pub use {tree_sitter_rust, tree_sitter_typescript};
|
|||||||
|
|
||||||
pub use lsp::DiagnosticSeverity;
|
pub use lsp::DiagnosticSeverity;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Buffer {
|
pub struct Buffer {
|
||||||
text: TextBuffer,
|
text: TextBuffer,
|
||||||
diff_base: Option<String>,
|
diff_base: Option<String>,
|
||||||
git_diff: git::diff::BufferDiff,
|
git_diff: git::diff::BufferDiff,
|
||||||
file: Option<Arc<dyn File>>,
|
file: Option<Arc<dyn File>>,
|
||||||
saved_version: clock::Global,
|
/// The mtime of the file when this buffer was last loaded from
|
||||||
saved_version_fingerprint: RopeFingerprint,
|
/// or saved to disk.
|
||||||
saved_mtime: SystemTime,
|
saved_mtime: SystemTime,
|
||||||
|
/// The version vector when this buffer was last loaded from
|
||||||
|
/// or saved to disk.
|
||||||
|
saved_version: clock::Global,
|
||||||
|
/// A hash of the current contents of the buffer's file.
|
||||||
|
file_fingerprint: RopeFingerprint,
|
||||||
transaction_depth: usize,
|
transaction_depth: usize,
|
||||||
was_dirty_before_starting_transaction: Option<bool>,
|
was_dirty_before_starting_transaction: Option<bool>,
|
||||||
reload_task: Option<Task<Result<()>>>,
|
reload_task: Option<Task<Result<()>>>,
|
||||||
@ -190,8 +200,8 @@ pub struct Completion {
|
|||||||
pub old_range: Range<Anchor>,
|
pub old_range: Range<Anchor>,
|
||||||
pub new_text: String,
|
pub new_text: String,
|
||||||
pub label: CodeLabel,
|
pub label: CodeLabel,
|
||||||
pub documentation: Option<Documentation>,
|
|
||||||
pub server_id: LanguageServerId,
|
pub server_id: LanguageServerId,
|
||||||
|
pub documentation: Option<Documentation>,
|
||||||
pub lsp_completion: lsp::CompletionItem,
|
pub lsp_completion: lsp::CompletionItem,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -422,8 +432,7 @@ impl Buffer {
|
|||||||
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
||||||
));
|
));
|
||||||
this.saved_version = proto::deserialize_version(&message.saved_version);
|
this.saved_version = proto::deserialize_version(&message.saved_version);
|
||||||
this.saved_version_fingerprint =
|
this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||||
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
|
||||||
this.saved_mtime = message
|
this.saved_mtime = message
|
||||||
.saved_mtime
|
.saved_mtime
|
||||||
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
||||||
@ -439,7 +448,7 @@ impl Buffer {
|
|||||||
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
||||||
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
||||||
saved_version: proto::serialize_version(&self.saved_version),
|
saved_version: proto::serialize_version(&self.saved_version),
|
||||||
saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
|
saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
|
||||||
saved_mtime: Some(self.saved_mtime.into()),
|
saved_mtime: Some(self.saved_mtime.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -477,7 +486,7 @@ impl Buffer {
|
|||||||
));
|
));
|
||||||
|
|
||||||
let text_operations = self.text.operations().clone();
|
let text_operations = self.text.operations().clone();
|
||||||
cx.background().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
let since = since.unwrap_or_default();
|
let since = since.unwrap_or_default();
|
||||||
operations.extend(
|
operations.extend(
|
||||||
text_operations
|
text_operations
|
||||||
@ -509,7 +518,7 @@ impl Buffer {
|
|||||||
Self {
|
Self {
|
||||||
saved_mtime,
|
saved_mtime,
|
||||||
saved_version: buffer.version(),
|
saved_version: buffer.version(),
|
||||||
saved_version_fingerprint: buffer.as_rope().fingerprint(),
|
file_fingerprint: buffer.as_rope().fingerprint(),
|
||||||
reload_task: None,
|
reload_task: None,
|
||||||
transaction_depth: 0,
|
transaction_depth: 0,
|
||||||
was_dirty_before_starting_transaction: None,
|
was_dirty_before_starting_transaction: None,
|
||||||
@ -576,7 +585,7 @@ impl Buffer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
||||||
self.saved_version_fingerprint
|
self.file_fingerprint
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn saved_mtime(&self) -> SystemTime {
|
pub fn saved_mtime(&self) -> SystemTime {
|
||||||
@ -604,7 +613,7 @@ impl Buffer {
|
|||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.saved_version = version;
|
self.saved_version = version;
|
||||||
self.saved_version_fingerprint = fingerprint;
|
self.file_fingerprint = fingerprint;
|
||||||
self.saved_mtime = mtime;
|
self.saved_mtime = mtime;
|
||||||
cx.emit(Event::Saved);
|
cx.emit(Event::Saved);
|
||||||
cx.notify();
|
cx.notify();
|
||||||
@ -620,13 +629,14 @@ impl Buffer {
|
|||||||
let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
|
let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
|
||||||
let file = this.file.as_ref()?.as_local()?;
|
let file = this.file.as_ref()?.as_local()?;
|
||||||
Some((file.mtime(), file.load(cx)))
|
Some((file.mtime(), file.load(cx)))
|
||||||
}) else {
|
})?
|
||||||
|
else {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
let new_text = new_text.await?;
|
let new_text = new_text.await?;
|
||||||
let diff = this
|
let diff = this
|
||||||
.update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))
|
.update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
|
||||||
.await;
|
.await;
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
if this.version() == diff.base_version {
|
if this.version() == diff.base_version {
|
||||||
@ -652,8 +662,7 @@ impl Buffer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.reload_task.take();
|
this.reload_task.take();
|
||||||
});
|
})
|
||||||
Ok(())
|
|
||||||
}));
|
}));
|
||||||
rx
|
rx
|
||||||
}
|
}
|
||||||
@ -667,14 +676,14 @@ impl Buffer {
|
|||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.saved_version = version;
|
self.saved_version = version;
|
||||||
self.saved_version_fingerprint = fingerprint;
|
self.file_fingerprint = fingerprint;
|
||||||
self.text.set_line_ending(line_ending);
|
self.text.set_line_ending(line_ending);
|
||||||
self.saved_mtime = mtime;
|
self.saved_mtime = mtime;
|
||||||
if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
|
if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
|
||||||
file.buffer_reloaded(
|
file.buffer_reloaded(
|
||||||
self.remote_id(),
|
self.remote_id(),
|
||||||
&self.saved_version,
|
&self.saved_version,
|
||||||
self.saved_version_fingerprint,
|
self.file_fingerprint,
|
||||||
self.line_ending(),
|
self.line_ending(),
|
||||||
self.saved_mtime,
|
self.saved_mtime,
|
||||||
cx,
|
cx,
|
||||||
@ -736,20 +745,18 @@ impl Buffer {
|
|||||||
let snapshot = self.snapshot();
|
let snapshot = self.snapshot();
|
||||||
|
|
||||||
let mut diff = self.git_diff.clone();
|
let mut diff = self.git_diff.clone();
|
||||||
let diff = cx.background().spawn(async move {
|
let diff = cx.background_executor().spawn(async move {
|
||||||
diff.update(&diff_base, &snapshot).await;
|
diff.update(&diff_base, &snapshot).await;
|
||||||
diff
|
diff
|
||||||
});
|
});
|
||||||
|
|
||||||
let handle = cx.weak_handle();
|
Some(cx.spawn(|this, mut cx| async move {
|
||||||
Some(cx.spawn_weak(|_, mut cx| async move {
|
|
||||||
let buffer_diff = diff.await;
|
let buffer_diff = diff.await;
|
||||||
if let Some(this) = handle.upgrade(&mut cx) {
|
this.update(&mut cx, |this, _| {
|
||||||
this.update(&mut cx, |this, _| {
|
this.git_diff = buffer_diff;
|
||||||
this.git_diff = buffer_diff;
|
this.git_diff_update_count += 1;
|
||||||
this.git_diff_update_count += 1;
|
})
|
||||||
})
|
.ok();
|
||||||
}
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -847,7 +854,7 @@ impl Buffer {
|
|||||||
let mut syntax_snapshot = syntax_map.snapshot();
|
let mut syntax_snapshot = syntax_map.snapshot();
|
||||||
drop(syntax_map);
|
drop(syntax_map);
|
||||||
|
|
||||||
let parse_task = cx.background().spawn({
|
let parse_task = cx.background_executor().spawn({
|
||||||
let language = language.clone();
|
let language = language.clone();
|
||||||
let language_registry = language_registry.clone();
|
let language_registry = language_registry.clone();
|
||||||
async move {
|
async move {
|
||||||
@ -857,7 +864,7 @@ impl Buffer {
|
|||||||
});
|
});
|
||||||
|
|
||||||
match cx
|
match cx
|
||||||
.background()
|
.background_executor()
|
||||||
.block_with_timeout(self.sync_parse_timeout, parse_task)
|
.block_with_timeout(self.sync_parse_timeout, parse_task)
|
||||||
{
|
{
|
||||||
Ok(new_syntax_snapshot) => {
|
Ok(new_syntax_snapshot) => {
|
||||||
@ -886,7 +893,8 @@ impl Buffer {
|
|||||||
if parse_again {
|
if parse_again {
|
||||||
this.reparse(cx);
|
this.reparse(cx);
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
|
.ok();
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
}
|
}
|
||||||
@ -919,9 +927,9 @@ impl Buffer {
|
|||||||
|
|
||||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
if let Some(indent_sizes) = self.compute_autoindents() {
|
if let Some(indent_sizes) = self.compute_autoindents() {
|
||||||
let indent_sizes = cx.background().spawn(indent_sizes);
|
let indent_sizes = cx.background_executor().spawn(indent_sizes);
|
||||||
match cx
|
match cx
|
||||||
.background()
|
.background_executor()
|
||||||
.block_with_timeout(Duration::from_micros(500), indent_sizes)
|
.block_with_timeout(Duration::from_micros(500), indent_sizes)
|
||||||
{
|
{
|
||||||
Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
|
Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
|
||||||
@ -930,7 +938,8 @@ impl Buffer {
|
|||||||
let indent_sizes = indent_sizes.await;
|
let indent_sizes = indent_sizes.await;
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.apply_autoindents(indent_sizes, cx);
|
this.apply_autoindents(indent_sizes, cx);
|
||||||
});
|
})
|
||||||
|
.ok();
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1169,36 +1178,72 @@ impl Buffer {
|
|||||||
pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
|
pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
|
||||||
let old_text = self.as_rope().clone();
|
let old_text = self.as_rope().clone();
|
||||||
let base_version = self.version();
|
let base_version = self.version();
|
||||||
cx.background().spawn(async move {
|
cx.background_executor()
|
||||||
let old_text = old_text.to_string();
|
.spawn_labeled(*BUFFER_DIFF_TASK, async move {
|
||||||
let line_ending = LineEnding::detect(&new_text);
|
let old_text = old_text.to_string();
|
||||||
LineEnding::normalize(&mut new_text);
|
let line_ending = LineEnding::detect(&new_text);
|
||||||
let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
|
LineEnding::normalize(&mut new_text);
|
||||||
let mut edits = Vec::new();
|
|
||||||
let mut offset = 0;
|
let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
|
||||||
let empty: Arc<str> = "".into();
|
let empty: Arc<str> = "".into();
|
||||||
for change in diff.iter_all_changes() {
|
|
||||||
let value = change.value();
|
let mut edits = Vec::new();
|
||||||
let end_offset = offset + value.len();
|
let mut old_offset = 0;
|
||||||
match change.tag() {
|
let mut new_offset = 0;
|
||||||
ChangeTag::Equal => {
|
let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
|
||||||
offset = end_offset;
|
for change in diff.iter_all_changes().map(Some).chain([None]) {
|
||||||
|
if let Some(change) = &change {
|
||||||
|
let len = change.value().len();
|
||||||
|
match change.tag() {
|
||||||
|
ChangeTag::Equal => {
|
||||||
|
old_offset += len;
|
||||||
|
new_offset += len;
|
||||||
|
}
|
||||||
|
ChangeTag::Delete => {
|
||||||
|
let old_end_offset = old_offset + len;
|
||||||
|
if let Some((last_old_range, _)) = &mut last_edit {
|
||||||
|
last_old_range.end = old_end_offset;
|
||||||
|
} else {
|
||||||
|
last_edit =
|
||||||
|
Some((old_offset..old_end_offset, new_offset..new_offset));
|
||||||
|
}
|
||||||
|
old_offset = old_end_offset;
|
||||||
|
}
|
||||||
|
ChangeTag::Insert => {
|
||||||
|
let new_end_offset = new_offset + len;
|
||||||
|
if let Some((_, last_new_range)) = &mut last_edit {
|
||||||
|
last_new_range.end = new_end_offset;
|
||||||
|
} else {
|
||||||
|
last_edit =
|
||||||
|
Some((old_offset..old_offset, new_offset..new_end_offset));
|
||||||
|
}
|
||||||
|
new_offset = new_end_offset;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ChangeTag::Delete => {
|
|
||||||
edits.push((offset..end_offset, empty.clone()));
|
if let Some((old_range, new_range)) = &last_edit {
|
||||||
offset = end_offset;
|
if old_offset > old_range.end
|
||||||
}
|
|| new_offset > new_range.end
|
||||||
ChangeTag::Insert => {
|
|| change.is_none()
|
||||||
edits.push((offset..offset, value.into()));
|
{
|
||||||
|
let text = if new_range.is_empty() {
|
||||||
|
empty.clone()
|
||||||
|
} else {
|
||||||
|
new_text[new_range.clone()].into()
|
||||||
|
};
|
||||||
|
edits.push((old_range.clone(), text));
|
||||||
|
last_edit.take();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Diff {
|
Diff {
|
||||||
base_version,
|
base_version,
|
||||||
line_ending,
|
line_ending,
|
||||||
edits,
|
edits,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Spawn a background task that searches the buffer for any whitespace
|
/// Spawn a background task that searches the buffer for any whitespace
|
||||||
@ -1207,7 +1252,7 @@ impl Buffer {
|
|||||||
let old_text = self.as_rope().clone();
|
let old_text = self.as_rope().clone();
|
||||||
let line_ending = self.line_ending();
|
let line_ending = self.line_ending();
|
||||||
let base_version = self.version();
|
let base_version = self.version();
|
||||||
cx.background().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
let ranges = trailing_whitespace_ranges(&old_text);
|
let ranges = trailing_whitespace_ranges(&old_text);
|
||||||
let empty = Arc::<str>::from("");
|
let empty = Arc::<str>::from("");
|
||||||
Diff {
|
Diff {
|
||||||
@ -1282,12 +1327,12 @@ impl Buffer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_dirty(&self) -> bool {
|
pub fn is_dirty(&self) -> bool {
|
||||||
self.saved_version_fingerprint != self.as_rope().fingerprint()
|
self.file_fingerprint != self.as_rope().fingerprint()
|
||||||
|| self.file.as_ref().map_or(false, |file| file.is_deleted())
|
|| self.file.as_ref().map_or(false, |file| file.is_deleted())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_conflict(&self) -> bool {
|
pub fn has_conflict(&self) -> bool {
|
||||||
self.saved_version_fingerprint != self.as_rope().fingerprint()
|
self.file_fingerprint != self.as_rope().fingerprint()
|
||||||
&& self
|
&& self
|
||||||
.file
|
.file
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -1458,95 +1503,82 @@ impl Buffer {
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Non-generic part hoisted out to reduce LLVM IR size.
|
self.start_transaction();
|
||||||
fn tail(
|
self.pending_autoindent.take();
|
||||||
this: &mut Buffer,
|
let autoindent_request = autoindent_mode
|
||||||
edits: Vec<(Range<usize>, Arc<str>)>,
|
.and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
|
||||||
autoindent_mode: Option<AutoindentMode>,
|
|
||||||
cx: &mut ModelContext<Buffer>,
|
|
||||||
) -> Option<clock::Lamport> {
|
|
||||||
this.start_transaction();
|
|
||||||
this.pending_autoindent.take();
|
|
||||||
let autoindent_request = autoindent_mode
|
|
||||||
.and_then(|mode| this.language.as_ref().map(|_| (this.snapshot(), mode)));
|
|
||||||
|
|
||||||
let edit_operation = this.text.edit(edits.iter().cloned());
|
let edit_operation = self.text.edit(edits.iter().cloned());
|
||||||
let edit_id = edit_operation.timestamp();
|
let edit_id = edit_operation.timestamp();
|
||||||
|
|
||||||
if let Some((before_edit, mode)) = autoindent_request {
|
if let Some((before_edit, mode)) = autoindent_request {
|
||||||
let mut delta = 0isize;
|
let mut delta = 0isize;
|
||||||
let entries = edits
|
let entries = edits
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||||
.map(|((ix, (range, _)), new_text)| {
|
.map(|((ix, (range, _)), new_text)| {
|
||||||
let new_text_length = new_text.len();
|
let new_text_length = new_text.len();
|
||||||
let old_start = range.start.to_point(&before_edit);
|
let old_start = range.start.to_point(&before_edit);
|
||||||
let new_start = (delta + range.start as isize) as usize;
|
let new_start = (delta + range.start as isize) as usize;
|
||||||
delta +=
|
delta += new_text_length as isize - (range.end as isize - range.start as isize);
|
||||||
new_text_length as isize - (range.end as isize - range.start as isize);
|
|
||||||
|
|
||||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||||
let mut first_line_is_new = false;
|
let mut first_line_is_new = false;
|
||||||
let mut original_indent_column = None;
|
let mut original_indent_column = None;
|
||||||
|
|
||||||
// When inserting an entire line at the beginning of an existing line,
|
// When inserting an entire line at the beginning of an existing line,
|
||||||
// treat the insertion as new.
|
// treat the insertion as new.
|
||||||
if new_text.contains('\n')
|
if new_text.contains('\n')
|
||||||
&& old_start.column
|
&& old_start.column <= before_edit.indent_size_for_line(old_start.row).len
|
||||||
<= before_edit.indent_size_for_line(old_start.row).len
|
{
|
||||||
{
|
first_line_is_new = true;
|
||||||
first_line_is_new = true;
|
}
|
||||||
|
|
||||||
|
// When inserting text starting with a newline, avoid auto-indenting the
|
||||||
|
// previous line.
|
||||||
|
if new_text.starts_with('\n') {
|
||||||
|
range_of_insertion_to_indent.start += 1;
|
||||||
|
first_line_is_new = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid auto-indenting after the insertion.
|
||||||
|
if let AutoindentMode::Block {
|
||||||
|
original_indent_columns,
|
||||||
|
} = &mode
|
||||||
|
{
|
||||||
|
original_indent_column =
|
||||||
|
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||||
|
indent_size_for_text(
|
||||||
|
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||||
|
)
|
||||||
|
.len
|
||||||
|
}));
|
||||||
|
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||||
|
range_of_insertion_to_indent.end -= 1;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// When inserting text starting with a newline, avoid auto-indenting the
|
AutoindentRequestEntry {
|
||||||
// previous line.
|
first_line_is_new,
|
||||||
if new_text.starts_with('\n') {
|
original_indent_column,
|
||||||
range_of_insertion_to_indent.start += 1;
|
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||||
first_line_is_new = true;
|
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||||
}
|
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
// Avoid auto-indenting after the insertion.
|
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||||
if let AutoindentMode::Block {
|
before_edit,
|
||||||
original_indent_columns,
|
entries,
|
||||||
} = &mode
|
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||||
{
|
}));
|
||||||
original_indent_column = Some(
|
|
||||||
original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
|
||||||
indent_size_for_text(
|
|
||||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
|
||||||
)
|
|
||||||
.len
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
|
||||||
range_of_insertion_to_indent.end -= 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
AutoindentRequestEntry {
|
|
||||||
first_line_is_new,
|
|
||||||
original_indent_column,
|
|
||||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
|
||||||
range: this
|
|
||||||
.anchor_before(new_start + range_of_insertion_to_indent.start)
|
|
||||||
..this.anchor_after(new_start + range_of_insertion_to_indent.end),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
this.autoindent_requests.push(Arc::new(AutoindentRequest {
|
|
||||||
before_edit,
|
|
||||||
entries,
|
|
||||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
this.end_transaction(cx);
|
|
||||||
this.send_operation(Operation::Buffer(edit_operation), cx);
|
|
||||||
Some(edit_id)
|
|
||||||
}
|
}
|
||||||
tail(self, edits, autoindent_mode, cx)
|
|
||||||
|
self.end_transaction(cx);
|
||||||
|
self.send_operation(Operation::Buffer(edit_operation), cx);
|
||||||
|
Some(edit_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn did_edit(
|
fn did_edit(
|
||||||
@ -1879,9 +1911,7 @@ impl Buffer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entity for Buffer {
|
impl EventEmitter<Event> for Buffer {}
|
||||||
type Event = Event;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Buffer {
|
impl Deref for Buffer {
|
||||||
type Target = TextBuffer;
|
type Target = TextBuffer;
|
||||||
|
@ -1,25 +1,25 @@
|
|||||||
|
use super::*;
|
||||||
use crate::language_settings::{
|
use crate::language_settings::{
|
||||||
AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
|
AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
|
||||||
};
|
};
|
||||||
|
use crate::Buffer;
|
||||||
use super::*;
|
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use collections::BTreeMap;
|
use collections::BTreeMap;
|
||||||
use gpui::{AppContext, ModelHandle};
|
use gpui::{AppContext, Model};
|
||||||
|
use gpui::{Context, TestAppContext};
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use proto::deserialize_operation;
|
use proto::deserialize_operation;
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
use regex::RegexBuilder;
|
use regex::RegexBuilder;
|
||||||
use settings::SettingsStore;
|
use settings::SettingsStore;
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
|
||||||
env,
|
env,
|
||||||
ops::Range,
|
ops::Range,
|
||||||
rc::Rc,
|
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
use text::network::Network;
|
use text::network::Network;
|
||||||
use text::LineEnding;
|
use text::LineEnding;
|
||||||
|
use text::{Point, ToPoint};
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
|
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
|
||||||
|
|
||||||
@ -42,8 +42,8 @@ fn init_logger() {
|
|||||||
fn test_line_endings(cx: &mut gpui::AppContext) {
|
fn test_line_endings(cx: &mut gpui::AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
|
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "one\r\ntwo\rthree")
|
||||||
.with_language(Arc::new(rust_lang()), cx);
|
.with_language(Arc::new(rust_lang()), cx);
|
||||||
assert_eq!(buffer.text(), "one\ntwo\nthree");
|
assert_eq!(buffer.text(), "one\ntwo\nthree");
|
||||||
assert_eq!(buffer.line_ending(), LineEnding::Windows);
|
assert_eq!(buffer.line_ending(), LineEnding::Windows);
|
||||||
@ -135,24 +135,24 @@ fn test_select_language() {
|
|||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_edit_events(cx: &mut gpui::AppContext) {
|
fn test_edit_events(cx: &mut gpui::AppContext) {
|
||||||
let mut now = Instant::now();
|
let mut now = Instant::now();
|
||||||
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
|
let buffer_1_events = Arc::new(Mutex::new(Vec::new()));
|
||||||
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
|
let buffer_2_events = Arc::new(Mutex::new(Vec::new()));
|
||||||
|
|
||||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
|
let buffer1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcdef"));
|
||||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
|
let buffer2 = cx.new_model(|cx| Buffer::new(1, cx.entity_id().as_u64(), "abcdef"));
|
||||||
let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
|
let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
|
||||||
buffer1.update(cx, {
|
buffer1.update(cx, {
|
||||||
let buffer1_ops = buffer1_ops.clone();
|
let buffer1_ops = buffer1_ops.clone();
|
||||||
|buffer, cx| {
|
|buffer, cx| {
|
||||||
let buffer_1_events = buffer_1_events.clone();
|
let buffer_1_events = buffer_1_events.clone();
|
||||||
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
|
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
|
||||||
Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
|
Event::Operation(op) => buffer1_ops.lock().push(op),
|
||||||
event => buffer_1_events.borrow_mut().push(event),
|
event => buffer_1_events.lock().push(event),
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
let buffer_2_events = buffer_2_events.clone();
|
let buffer_2_events = buffer_2_events.clone();
|
||||||
cx.subscribe(&buffer2, move |_, _, event, _| {
|
cx.subscribe(&buffer2, move |_, _, event, _| {
|
||||||
buffer_2_events.borrow_mut().push(event.clone())
|
buffer_2_events.lock().push(event.clone())
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
@ -179,12 +179,10 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||||||
// Incorporating a set of remote ops emits a single edited event,
|
// Incorporating a set of remote ops emits a single edited event,
|
||||||
// followed by a dirty changed event.
|
// followed by a dirty changed event.
|
||||||
buffer2.update(cx, |buffer, cx| {
|
buffer2.update(cx, |buffer, cx| {
|
||||||
buffer
|
buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
|
||||||
.apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
|
|
||||||
.unwrap();
|
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mem::take(&mut *buffer_1_events.borrow_mut()),
|
mem::take(&mut *buffer_1_events.lock()),
|
||||||
vec![
|
vec![
|
||||||
Event::Edited,
|
Event::Edited,
|
||||||
Event::DirtyChanged,
|
Event::DirtyChanged,
|
||||||
@ -193,7 +191,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mem::take(&mut *buffer_2_events.borrow_mut()),
|
mem::take(&mut *buffer_2_events.lock()),
|
||||||
vec![Event::Edited, Event::DirtyChanged]
|
vec![Event::Edited, Event::DirtyChanged]
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -205,28 +203,26 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||||||
// Incorporating the remote ops again emits a single edited event,
|
// Incorporating the remote ops again emits a single edited event,
|
||||||
// followed by a dirty changed event.
|
// followed by a dirty changed event.
|
||||||
buffer2.update(cx, |buffer, cx| {
|
buffer2.update(cx, |buffer, cx| {
|
||||||
buffer
|
buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
|
||||||
.apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
|
|
||||||
.unwrap();
|
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mem::take(&mut *buffer_1_events.borrow_mut()),
|
mem::take(&mut *buffer_1_events.lock()),
|
||||||
vec![Event::Edited, Event::DirtyChanged,]
|
vec![Event::Edited, Event::DirtyChanged,]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mem::take(&mut *buffer_2_events.borrow_mut()),
|
mem::take(&mut *buffer_2_events.lock()),
|
||||||
vec![Event::Edited, Event::DirtyChanged]
|
vec![Event::Edited, Event::DirtyChanged]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
async fn test_apply_diff(cx: &mut TestAppContext) {
|
||||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
|
||||||
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
||||||
|
|
||||||
let text = "a\nccc\ndddd\nffffff\n";
|
let text = "a\nccc\ndddd\nffffff\n";
|
||||||
let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
|
let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.apply_diff(diff, cx).unwrap();
|
buffer.apply_diff(diff, cx).unwrap();
|
||||||
assert_eq!(buffer.text(), text);
|
assert_eq!(buffer.text(), text);
|
||||||
@ -234,7 +230,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
||||||
let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
|
let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.apply_diff(diff, cx).unwrap();
|
buffer.apply_diff(diff, cx).unwrap();
|
||||||
assert_eq!(buffer.text(), text);
|
assert_eq!(buffer.text(), text);
|
||||||
@ -254,15 +250,15 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||||||
]
|
]
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
|
||||||
|
|
||||||
// Spawn a task to format the buffer's whitespace.
|
// Spawn a task to format the buffer's whitespace.
|
||||||
// Pause so that the foratting task starts running.
|
// Pause so that the foratting task starts running.
|
||||||
let format = buffer.read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
|
let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
|
||||||
smol::future::yield_now().await;
|
smol::future::yield_now().await;
|
||||||
|
|
||||||
// Edit the buffer while the normalization task is running.
|
// Edit the buffer while the normalization task is running.
|
||||||
let version_before_edit = buffer.read_with(cx, |buffer, _| buffer.version());
|
let version_before_edit = buffer.update(cx, |buffer, _| buffer.version());
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
[
|
[
|
||||||
@ -318,12 +314,13 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
||||||
let text = "fn a() {}";
|
let text = "fn a() {}";
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Wait for the initial text to parse
|
// Wait for the initial text to parse
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
|
assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
concat!(
|
concat!(
|
||||||
@ -354,7 +351,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||||||
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
||||||
assert!(buf.is_parsing());
|
assert!(buf.is_parsing());
|
||||||
});
|
});
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
|
assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
concat!(
|
concat!(
|
||||||
@ -386,7 +384,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||||||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||||
assert!(buf.is_parsing());
|
assert!(buf.is_parsing());
|
||||||
});
|
});
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
concat!(
|
concat!(
|
||||||
@ -408,7 +406,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||||||
assert_eq!(buf.text(), "fn a() {}");
|
assert_eq!(buf.text(), "fn a() {}");
|
||||||
assert!(buf.is_parsing());
|
assert!(buf.is_parsing());
|
||||||
});
|
});
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
|
||||||
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
concat!(
|
concat!(
|
||||||
@ -426,7 +425,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||||||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||||
assert!(buf.is_parsing());
|
assert!(buf.is_parsing());
|
||||||
});
|
});
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
concat!(
|
concat!(
|
||||||
@ -443,15 +442,15 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), "{}").with_language(Arc::new(rust_lang()), cx);
|
||||||
buffer.set_sync_parse_timeout(Duration::ZERO);
|
buffer.set_sync_parse_timeout(Duration::ZERO);
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
|
|
||||||
// Wait for the initial text to parse
|
// Wait for the initial text to parse
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_tree_sexp(&buffer, cx),
|
get_tree_sexp(&buffer, cx),
|
||||||
"(source_file (expression_statement (block)))"
|
"(source_file (expression_statement (block)))"
|
||||||
@ -460,7 +459,7 @@ async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
|||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.set_language(Some(Arc::new(json_lang())), cx)
|
buffer.set_language(Some(Arc::new(json_lang())), cx)
|
||||||
});
|
});
|
||||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
|
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -493,11 +492,11 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||||
});
|
});
|
||||||
let outline = buffer
|
let outline = buffer
|
||||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
.update(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -560,7 +559,7 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||||||
cx: &'a gpui::TestAppContext,
|
cx: &'a gpui::TestAppContext,
|
||||||
) -> Vec<(&'a str, Vec<usize>)> {
|
) -> Vec<(&'a str, Vec<usize>)> {
|
||||||
let matches = cx
|
let matches = cx
|
||||||
.read(|cx| outline.search(query, cx.background().clone()))
|
.update(|cx| outline.search(query, cx.background_executor().clone()))
|
||||||
.await;
|
.await;
|
||||||
matches
|
matches
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -579,11 +578,11 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||||
});
|
});
|
||||||
let outline = buffer
|
let outline = buffer
|
||||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
.update(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -617,10 +616,10 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(language), cx)
|
||||||
});
|
});
|
||||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||||
|
|
||||||
// extra context nodes are included in the outline.
|
// extra context nodes are included in the outline.
|
||||||
let outline = snapshot.outline(None).unwrap();
|
let outline = snapshot.outline(None).unwrap();
|
||||||
@ -661,10 +660,10 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||||
});
|
});
|
||||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||||
|
|
||||||
// point is at the start of an item
|
// point is at the start of an item
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -882,10 +881,10 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
|
|||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "fn a() { b(|c| {}) }";
|
let text = "fn a() { b(|c| {}) }";
|
||||||
let buffer =
|
let buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
let snapshot = buffer.snapshot();
|
let snapshot = buffer.snapshot();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -923,10 +922,10 @@ fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
|
fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "fn a() {}";
|
let text = "fn a() {}";
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||||
@ -966,10 +965,10 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||||||
settings.defaults.hard_tabs = Some(true);
|
settings.defaults.hard_tabs = Some(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "fn a() {}";
|
let text = "fn a() {}";
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||||
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
||||||
@ -1007,10 +1006,11 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
|
fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
|
let entity_id = cx.entity_id();
|
||||||
let mut buffer = Buffer::new(
|
let mut buffer = Buffer::new(
|
||||||
0,
|
0,
|
||||||
cx.model_id() as u64,
|
entity_id.as_u64(),
|
||||||
"
|
"
|
||||||
fn a() {
|
fn a() {
|
||||||
c;
|
c;
|
||||||
@ -1080,10 +1080,12 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
|
eprintln!("second buffer: {:?}", cx.entity_id());
|
||||||
|
|
||||||
let mut buffer = Buffer::new(
|
let mut buffer = Buffer::new(
|
||||||
0,
|
0,
|
||||||
cx.model_id() as u64,
|
cx.entity_id().as_u64(),
|
||||||
"
|
"
|
||||||
fn a() {
|
fn a() {
|
||||||
b();
|
b();
|
||||||
@ -1137,16 +1139,18 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||||||
);
|
);
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
|
|
||||||
|
eprintln!("DONE");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
|
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let mut buffer = Buffer::new(
|
let mut buffer = Buffer::new(
|
||||||
0,
|
0,
|
||||||
cx.model_id() as u64,
|
cx.entity_id().as_u64(),
|
||||||
"
|
"
|
||||||
fn a() {
|
fn a() {
|
||||||
i
|
i
|
||||||
@ -1205,10 +1209,10 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
|
|||||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let mut buffer = Buffer::new(
|
let mut buffer = Buffer::new(
|
||||||
0,
|
0,
|
||||||
cx.model_id() as u64,
|
cx.entity_id().as_u64(),
|
||||||
"
|
"
|
||||||
fn a() {}
|
fn a() {}
|
||||||
"
|
"
|
||||||
@ -1262,10 +1266,10 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "a\nb";
|
let text = "a\nb";
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
[(0..1, "\n"), (2..3, "\n")],
|
[(0..1, "\n"), (2..3, "\n")],
|
||||||
Some(AutoindentMode::EachLine),
|
Some(AutoindentMode::EachLine),
|
||||||
@ -1280,7 +1284,7 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "
|
let text = "
|
||||||
const a: usize = 1;
|
const a: usize = 1;
|
||||||
fn b() {
|
fn b() {
|
||||||
@ -1292,7 +1296,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
|
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
|
||||||
Some(AutoindentMode::EachLine),
|
Some(AutoindentMode::EachLine),
|
||||||
@ -1322,7 +1326,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_block_mode(cx: &mut AppContext) {
|
fn test_autoindent_block_mode(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = r#"
|
let text = r#"
|
||||||
fn a() {
|
fn a() {
|
||||||
b();
|
b();
|
||||||
@ -1330,7 +1334,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
// When this text was copied, both of the quotation marks were at the same
|
// When this text was copied, both of the quotation marks were at the same
|
||||||
// indent level, but the indentation of the first line was not included in
|
// indent level, but the indentation of the first line was not included in
|
||||||
@ -1406,7 +1410,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||||||
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
|
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = r#"
|
let text = r#"
|
||||||
fn a() {
|
fn a() {
|
||||||
if b() {
|
if b() {
|
||||||
@ -1416,7 +1420,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
// The original indent columns are not known, so this text is
|
// The original indent columns are not known, so this text is
|
||||||
// auto-indented in a block as if the first line was copied in
|
// auto-indented in a block as if the first line was copied in
|
||||||
@ -1486,7 +1490,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||||||
fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = "
|
let text = "
|
||||||
* one
|
* one
|
||||||
- a
|
- a
|
||||||
@ -1495,7 +1499,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
|||||||
"
|
"
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
|
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text).with_language(
|
||||||
Arc::new(Language::new(
|
Arc::new(Language::new(
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "Markdown".into(),
|
name: "Markdown".into(),
|
||||||
@ -1555,7 +1559,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||||||
language_registry.add(html_language.clone());
|
language_registry.add(html_language.clone());
|
||||||
language_registry.add(javascript_language.clone());
|
language_registry.add(javascript_language.clone());
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let (text, ranges) = marked_text_ranges(
|
let (text, ranges) = marked_text_ranges(
|
||||||
&"
|
&"
|
||||||
<div>ˇ
|
<div>ˇ
|
||||||
@ -1571,7 +1575,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
|
||||||
buffer.set_language_registry(language_registry);
|
buffer.set_language_registry(language_registry);
|
||||||
buffer.set_language(Some(html_language), cx);
|
buffer.set_language(Some(html_language), cx);
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
@ -1606,9 +1610,9 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||||||
settings.defaults.tab_size = Some(2.try_into().unwrap());
|
settings.defaults.tab_size = Some(2.try_into().unwrap());
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let mut buffer =
|
let mut buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(Arc::new(ruby_lang()), cx);
|
||||||
|
|
||||||
let text = r#"
|
let text = r#"
|
||||||
class C
|
class C
|
||||||
@ -1649,7 +1653,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||||||
fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let language = Language::new(
|
let language = Language::new(
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "JavaScript".into(),
|
name: "JavaScript".into(),
|
||||||
@ -1710,7 +1714,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
|||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer =
|
let buffer =
|
||||||
Buffer::new(0, cx.model_id() as u64, &text).with_language(Arc::new(language), cx);
|
Buffer::new(0, cx.entity_id().as_u64(), &text).with_language(Arc::new(language), cx);
|
||||||
let snapshot = buffer.snapshot();
|
let snapshot = buffer.snapshot();
|
||||||
|
|
||||||
let config = snapshot.language_scope_at(0).unwrap();
|
let config = snapshot.language_scope_at(0).unwrap();
|
||||||
@ -1782,7 +1786,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
|||||||
fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let language = Language::new(
|
let language = Language::new(
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "Rust".into(),
|
name: "Rust".into(),
|
||||||
@ -1822,7 +1826,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
|||||||
"#
|
"#
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
|
let buffer = Buffer::new(0, cx.entity_id().as_u64(), text.clone())
|
||||||
.with_language(Arc::new(language), cx);
|
.with_language(Arc::new(language), cx);
|
||||||
let snapshot = buffer.snapshot();
|
let snapshot = buffer.snapshot();
|
||||||
|
|
||||||
@ -1850,7 +1854,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
|||||||
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
||||||
init_settings(cx, |_| {});
|
init_settings(cx, |_| {});
|
||||||
|
|
||||||
cx.add_model(|cx| {
|
cx.new_model(|cx| {
|
||||||
let text = r#"
|
let text = r#"
|
||||||
<ol>
|
<ol>
|
||||||
<% people.each do |person| %>
|
<% people.each do |person| %>
|
||||||
@ -1867,7 +1871,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||||||
language_registry.add(Arc::new(html_lang()));
|
language_registry.add(Arc::new(html_lang()));
|
||||||
language_registry.add(Arc::new(erb_lang()));
|
language_registry.add(Arc::new(erb_lang()));
|
||||||
|
|
||||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
|
||||||
buffer.set_language_registry(language_registry.clone());
|
buffer.set_language_registry(language_registry.clone());
|
||||||
buffer.set_language(
|
buffer.set_language(
|
||||||
language_registry
|
language_registry
|
||||||
@ -1898,8 +1902,8 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||||||
fn test_serialization(cx: &mut gpui::AppContext) {
|
fn test_serialization(cx: &mut gpui::AppContext) {
|
||||||
let mut now = Instant::now();
|
let mut now = Instant::now();
|
||||||
|
|
||||||
let buffer1 = cx.add_model(|cx| {
|
let buffer1 = cx.new_model(|cx| {
|
||||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
|
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "abc");
|
||||||
buffer.edit([(3..3, "D")], None, cx);
|
buffer.edit([(3..3, "D")], None, cx);
|
||||||
|
|
||||||
now += Duration::from_secs(1);
|
now += Duration::from_secs(1);
|
||||||
@ -1919,9 +1923,9 @@ fn test_serialization(cx: &mut gpui::AppContext) {
|
|||||||
|
|
||||||
let state = buffer1.read(cx).to_proto();
|
let state = buffer1.read(cx).to_proto();
|
||||||
let ops = cx
|
let ops = cx
|
||||||
.background()
|
.background_executor()
|
||||||
.block(buffer1.read(cx).serialize_ops(None, cx));
|
.block(buffer1.read(cx).serialize_ops(None, cx));
|
||||||
let buffer2 = cx.add_model(|cx| {
|
let buffer2 = cx.new_model(|cx| {
|
||||||
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
|
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
|
||||||
buffer
|
buffer
|
||||||
.apply_ops(
|
.apply_ops(
|
||||||
@ -1953,14 +1957,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
let mut replica_ids = Vec::new();
|
let mut replica_ids = Vec::new();
|
||||||
let mut buffers = Vec::new();
|
let mut buffers = Vec::new();
|
||||||
let network = Rc::new(RefCell::new(Network::new(rng.clone())));
|
let network = Arc::new(Mutex::new(Network::new(rng.clone())));
|
||||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
|
let base_buffer =
|
||||||
|
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text.as_str()));
|
||||||
|
|
||||||
for i in 0..rng.gen_range(min_peers..=max_peers) {
|
for i in 0..rng.gen_range(min_peers..=max_peers) {
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
let state = base_buffer.read(cx).to_proto();
|
let state = base_buffer.read(cx).to_proto();
|
||||||
let ops = cx
|
let ops = cx
|
||||||
.background()
|
.background_executor()
|
||||||
.block(base_buffer.read(cx).serialize_ops(None, cx));
|
.block(base_buffer.read(cx).serialize_ops(None, cx));
|
||||||
let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
|
let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
|
||||||
buffer
|
buffer
|
||||||
@ -1975,16 +1980,17 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
||||||
if let Event::Operation(op) = event {
|
if let Event::Operation(op) = event {
|
||||||
network
|
network
|
||||||
.borrow_mut()
|
.lock()
|
||||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
|
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
|
|
||||||
buffers.push(buffer);
|
buffers.push(buffer);
|
||||||
replica_ids.push(i as ReplicaId);
|
replica_ids.push(i as ReplicaId);
|
||||||
network.borrow_mut().add_peer(i as ReplicaId);
|
network.lock().add_peer(i as ReplicaId);
|
||||||
log::info!("Adding initial peer with replica id {}", i);
|
log::info!("Adding initial peer with replica id {}", i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2065,7 +2071,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
50..=59 if replica_ids.len() < max_peers => {
|
50..=59 if replica_ids.len() < max_peers => {
|
||||||
let old_buffer_state = buffer.read(cx).to_proto();
|
let old_buffer_state = buffer.read(cx).to_proto();
|
||||||
let old_buffer_ops = cx
|
let old_buffer_ops = cx
|
||||||
.background()
|
.background_executor()
|
||||||
.block(buffer.read(cx).serialize_ops(None, cx));
|
.block(buffer.read(cx).serialize_ops(None, cx));
|
||||||
let new_replica_id = (0..=replica_ids.len() as ReplicaId)
|
let new_replica_id = (0..=replica_ids.len() as ReplicaId)
|
||||||
.filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
|
.filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
|
||||||
@ -2076,7 +2082,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
new_replica_id,
|
new_replica_id,
|
||||||
replica_id
|
replica_id
|
||||||
);
|
);
|
||||||
new_buffer = Some(cx.add_model(|cx| {
|
new_buffer = Some(cx.new_model(|cx| {
|
||||||
let mut new_buffer =
|
let mut new_buffer =
|
||||||
Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
|
Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
|
||||||
new_buffer
|
new_buffer
|
||||||
@ -2096,7 +2102,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
let network = network.clone();
|
let network = network.clone();
|
||||||
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
||||||
if let Event::Operation(op) = event {
|
if let Event::Operation(op) = event {
|
||||||
network.borrow_mut().broadcast(
|
network.lock().broadcast(
|
||||||
buffer.replica_id(),
|
buffer.replica_id(),
|
||||||
vec![proto::serialize_operation(op)],
|
vec![proto::serialize_operation(op)],
|
||||||
);
|
);
|
||||||
@ -2105,15 +2111,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
.detach();
|
.detach();
|
||||||
new_buffer
|
new_buffer
|
||||||
}));
|
}));
|
||||||
network.borrow_mut().replicate(replica_id, new_replica_id);
|
network.lock().replicate(replica_id, new_replica_id);
|
||||||
|
|
||||||
if new_replica_id as usize == replica_ids.len() {
|
if new_replica_id as usize == replica_ids.len() {
|
||||||
replica_ids.push(new_replica_id);
|
replica_ids.push(new_replica_id);
|
||||||
} else {
|
} else {
|
||||||
let new_buffer = new_buffer.take().unwrap();
|
let new_buffer = new_buffer.take().unwrap();
|
||||||
while network.borrow().has_unreceived(new_replica_id) {
|
while network.lock().has_unreceived(new_replica_id) {
|
||||||
let ops = network
|
let ops = network
|
||||||
.borrow_mut()
|
.lock()
|
||||||
.receive(new_replica_id)
|
.receive(new_replica_id)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|op| proto::deserialize_operation(op).unwrap());
|
.map(|op| proto::deserialize_operation(op).unwrap());
|
||||||
@ -2140,9 +2146,9 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
});
|
});
|
||||||
mutation_count -= 1;
|
mutation_count -= 1;
|
||||||
}
|
}
|
||||||
_ if network.borrow().has_unreceived(replica_id) => {
|
_ if network.lock().has_unreceived(replica_id) => {
|
||||||
let ops = network
|
let ops = network
|
||||||
.borrow_mut()
|
.lock()
|
||||||
.receive(replica_id)
|
.receive(replica_id)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|op| proto::deserialize_operation(op).unwrap());
|
.map(|op| proto::deserialize_operation(op).unwrap());
|
||||||
@ -2167,7 +2173,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||||||
buffer.read(cx).check_invariants();
|
buffer.read(cx).check_invariants();
|
||||||
}
|
}
|
||||||
|
|
||||||
if mutation_count == 0 && network.borrow().is_idle() {
|
if mutation_count == 0 && network.lock().is_idle() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2438,8 +2444,8 @@ fn javascript_lang() -> Language {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
|
fn get_tree_sexp(buffer: &Model<Buffer>, cx: &mut gpui::TestAppContext) -> String {
|
||||||
buffer.read_with(cx, |buffer, _| {
|
buffer.update(cx, |buffer, _| {
|
||||||
let snapshot = buffer.snapshot();
|
let snapshot = buffer.snapshot();
|
||||||
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
|
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
|
||||||
layers[0].node().to_sexp()
|
layers[0].node().to_sexp()
|
||||||
@ -2454,8 +2460,8 @@ fn assert_bracket_pairs(
|
|||||||
cx: &mut AppContext,
|
cx: &mut AppContext,
|
||||||
) {
|
) {
|
||||||
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
|
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.new_model(|cx| {
|
||||||
Buffer::new(0, cx.model_id() as u64, expected_text.clone())
|
Buffer::new(0, cx.entity_id().as_u64(), expected_text.clone())
|
||||||
.with_language(Arc::new(language), cx)
|
.with_language(Arc::new(language), cx)
|
||||||
});
|
});
|
||||||
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
|
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
|
||||||
@ -2478,9 +2484,10 @@ fn assert_bracket_pairs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||||
cx.set_global(SettingsStore::test(cx));
|
let settings_store = SettingsStore::test(cx);
|
||||||
|
cx.set_global(settings_store);
|
||||||
crate::init(cx);
|
crate::init(cx);
|
||||||
cx.update_global::<SettingsStore, _, _>(|settings, cx| {
|
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||||
settings.update_user_settings::<AllLanguageSettings>(cx, f);
|
settings.update_user_settings::<AllLanguageSettings>(cx, f);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use gpui::fonts::HighlightStyle;
|
use gpui::HighlightStyle;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
|
|
||||||
@ -79,23 +79,23 @@ impl Default for HighlightId {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use gpui::color::Color;
|
use gpui::rgba;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_highlight_map() {
|
fn test_highlight_map() {
|
||||||
let theme = SyntaxTheme::new(
|
let theme = SyntaxTheme {
|
||||||
[
|
highlights: [
|
||||||
("function", Color::from_u32(0x100000ff)),
|
("function", rgba(0x100000ff)),
|
||||||
("function.method", Color::from_u32(0x200000ff)),
|
("function.method", rgba(0x200000ff)),
|
||||||
("function.async", Color::from_u32(0x300000ff)),
|
("function.async", rgba(0x300000ff)),
|
||||||
("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
|
("variable.builtin.self.rust", rgba(0x400000ff)),
|
||||||
("variable.builtin", Color::from_u32(0x500000ff)),
|
("variable.builtin", rgba(0x500000ff)),
|
||||||
("variable", Color::from_u32(0x600000ff)),
|
("variable", rgba(0x600000ff)),
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, color)| (name.to_string(), (*color).into()))
|
.map(|(name, color)| (name.to_string(), (*color).into()))
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
};
|
||||||
|
|
||||||
let capture_names = &[
|
let capture_names = &[
|
||||||
"function.special",
|
"function.special",
|
||||||
|
@ -2,13 +2,13 @@ mod buffer;
|
|||||||
mod diagnostic_set;
|
mod diagnostic_set;
|
||||||
mod highlight_map;
|
mod highlight_map;
|
||||||
pub mod language_settings;
|
pub mod language_settings;
|
||||||
pub mod markdown;
|
|
||||||
mod outline;
|
mod outline;
|
||||||
pub mod proto;
|
pub mod proto;
|
||||||
mod syntax_map;
|
mod syntax_map;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod buffer_tests;
|
mod buffer_tests;
|
||||||
|
pub mod markdown;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
@ -18,7 +18,7 @@ use futures::{
|
|||||||
future::{BoxFuture, Shared},
|
future::{BoxFuture, Shared},
|
||||||
FutureExt, TryFutureExt as _,
|
FutureExt, TryFutureExt as _,
|
||||||
};
|
};
|
||||||
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
|
use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
|
||||||
pub use highlight_map::HighlightMap;
|
pub use highlight_map::HighlightMap;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||||
@ -44,7 +44,7 @@ use std::{
|
|||||||
};
|
};
|
||||||
use syntax_map::SyntaxSnapshot;
|
use syntax_map::SyntaxSnapshot;
|
||||||
use theme::{SyntaxTheme, Theme};
|
use theme::{SyntaxTheme, Theme};
|
||||||
use tree_sitter::{self, Query};
|
use tree_sitter::{self, wasmtime, Query, WasmStore};
|
||||||
use unicase::UniCase;
|
use unicase::UniCase;
|
||||||
use util::{http::HttpClient, paths::PathExt};
|
use util::{http::HttpClient, paths::PathExt};
|
||||||
use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
|
use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
|
||||||
@ -84,10 +84,15 @@ impl LspBinaryStatusSender {
|
|||||||
}
|
}
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
|
static PARSER: RefCell<Parser> = {
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
parser.set_wasm_store(WasmStore::new(WASM_ENGINE.clone()).unwrap()).unwrap();
|
||||||
|
RefCell::new(parser)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
pub static ref WASM_ENGINE: wasmtime::Engine = wasmtime::Engine::default();
|
||||||
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
|
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
|
||||||
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
|
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
@ -111,6 +116,7 @@ pub struct LanguageServerName(pub Arc<str>);
|
|||||||
pub struct CachedLspAdapter {
|
pub struct CachedLspAdapter {
|
||||||
pub name: LanguageServerName,
|
pub name: LanguageServerName,
|
||||||
pub short_name: &'static str,
|
pub short_name: &'static str,
|
||||||
|
pub initialization_options: Option<Value>,
|
||||||
pub disk_based_diagnostic_sources: Vec<String>,
|
pub disk_based_diagnostic_sources: Vec<String>,
|
||||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||||
pub language_ids: HashMap<String, String>,
|
pub language_ids: HashMap<String, String>,
|
||||||
@ -122,6 +128,7 @@ impl CachedLspAdapter {
|
|||||||
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
|
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
|
||||||
let name = adapter.name().await;
|
let name = adapter.name().await;
|
||||||
let short_name = adapter.short_name();
|
let short_name = adapter.short_name();
|
||||||
|
let initialization_options = adapter.initialization_options().await;
|
||||||
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
||||||
let disk_based_diagnostics_progress_token =
|
let disk_based_diagnostics_progress_token =
|
||||||
adapter.disk_based_diagnostics_progress_token().await;
|
adapter.disk_based_diagnostics_progress_token().await;
|
||||||
@ -130,6 +137,7 @@ impl CachedLspAdapter {
|
|||||||
Arc::new(CachedLspAdapter {
|
Arc::new(CachedLspAdapter {
|
||||||
name,
|
name,
|
||||||
short_name,
|
short_name,
|
||||||
|
initialization_options,
|
||||||
disk_based_diagnostic_sources,
|
disk_based_diagnostic_sources,
|
||||||
disk_based_diagnostics_progress_token,
|
disk_based_diagnostics_progress_token,
|
||||||
language_ids,
|
language_ids,
|
||||||
@ -357,6 +365,7 @@ pub struct CodeLabel {
|
|||||||
#[derive(Clone, Deserialize)]
|
#[derive(Clone, Deserialize)]
|
||||||
pub struct LanguageConfig {
|
pub struct LanguageConfig {
|
||||||
pub name: Arc<str>,
|
pub name: Arc<str>,
|
||||||
|
pub grammar_name: Option<Arc<str>>,
|
||||||
pub path_suffixes: Vec<String>,
|
pub path_suffixes: Vec<String>,
|
||||||
pub brackets: BracketPairConfig,
|
pub brackets: BracketPairConfig,
|
||||||
#[serde(default, deserialize_with = "deserialize_regex")]
|
#[serde(default, deserialize_with = "deserialize_regex")]
|
||||||
@ -443,6 +452,7 @@ impl Default for LanguageConfig {
|
|||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
name: "".into(),
|
name: "".into(),
|
||||||
|
grammar_name: None,
|
||||||
path_suffixes: Default::default(),
|
path_suffixes: Default::default(),
|
||||||
brackets: Default::default(),
|
brackets: Default::default(),
|
||||||
auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
|
auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
|
||||||
@ -617,14 +627,25 @@ type AvailableLanguageId = usize;
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct AvailableLanguage {
|
struct AvailableLanguage {
|
||||||
id: AvailableLanguageId,
|
id: AvailableLanguageId,
|
||||||
path: &'static str,
|
|
||||||
config: LanguageConfig,
|
config: LanguageConfig,
|
||||||
grammar: tree_sitter::Language,
|
grammar: AvailableGrammar,
|
||||||
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
||||||
get_queries: fn(&str) -> LanguageQueries,
|
|
||||||
loaded: bool,
|
loaded: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
enum AvailableGrammar {
|
||||||
|
Native {
|
||||||
|
grammar: tree_sitter::Language,
|
||||||
|
asset_dir: &'static str,
|
||||||
|
get_queries: fn(&str) -> LanguageQueries,
|
||||||
|
},
|
||||||
|
Wasm {
|
||||||
|
grammar_name: Arc<str>,
|
||||||
|
path: Arc<Path>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
pub struct LanguageRegistry {
|
pub struct LanguageRegistry {
|
||||||
state: RwLock<LanguageRegistryState>,
|
state: RwLock<LanguageRegistryState>,
|
||||||
language_server_download_dir: Option<Arc<Path>>,
|
language_server_download_dir: Option<Arc<Path>>,
|
||||||
@ -633,7 +654,7 @@ pub struct LanguageRegistry {
|
|||||||
lsp_binary_paths: Mutex<
|
lsp_binary_paths: Mutex<
|
||||||
HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
|
HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
|
||||||
>,
|
>,
|
||||||
executor: Option<Arc<Background>>,
|
executor: Option<BackgroundExecutor>,
|
||||||
lsp_binary_status_tx: LspBinaryStatusSender,
|
lsp_binary_status_tx: LspBinaryStatusSender,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -682,7 +703,7 @@ impl LanguageRegistry {
|
|||||||
Self::new(Task::ready(()))
|
Self::new(Task::ready(()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_executor(&mut self, executor: Arc<Background>) {
|
pub fn set_executor(&mut self, executor: BackgroundExecutor) {
|
||||||
self.executor = Some(executor);
|
self.executor = Some(executor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -696,7 +717,7 @@ impl LanguageRegistry {
|
|||||||
|
|
||||||
pub fn register(
|
pub fn register(
|
||||||
&self,
|
&self,
|
||||||
path: &'static str,
|
asset_dir: &'static str,
|
||||||
config: LanguageConfig,
|
config: LanguageConfig,
|
||||||
grammar: tree_sitter::Language,
|
grammar: tree_sitter::Language,
|
||||||
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
||||||
@ -705,11 +726,24 @@ impl LanguageRegistry {
|
|||||||
let state = &mut *self.state.write();
|
let state = &mut *self.state.write();
|
||||||
state.available_languages.push(AvailableLanguage {
|
state.available_languages.push(AvailableLanguage {
|
||||||
id: post_inc(&mut state.next_available_language_id),
|
id: post_inc(&mut state.next_available_language_id),
|
||||||
path,
|
|
||||||
config,
|
config,
|
||||||
grammar,
|
grammar: AvailableGrammar::Native {
|
||||||
|
grammar,
|
||||||
|
get_queries,
|
||||||
|
asset_dir,
|
||||||
|
},
|
||||||
lsp_adapters,
|
lsp_adapters,
|
||||||
get_queries,
|
loaded: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_wasm(&self, path: Arc<Path>, grammar_name: Arc<str>, config: LanguageConfig) {
|
||||||
|
let state = &mut *self.state.write();
|
||||||
|
state.available_languages.push(AvailableLanguage {
|
||||||
|
id: post_inc(&mut state.next_available_language_id),
|
||||||
|
config,
|
||||||
|
grammar: AvailableGrammar::Wasm { grammar_name, path },
|
||||||
|
lsp_adapters: Vec::new(),
|
||||||
loaded: false,
|
loaded: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -749,7 +783,7 @@ impl LanguageRegistry {
|
|||||||
let mut state = self.state.write();
|
let mut state = self.state.write();
|
||||||
state.theme = Some(theme.clone());
|
state.theme = Some(theme.clone());
|
||||||
for language in &state.languages {
|
for language in &state.languages {
|
||||||
language.set_theme(&theme.editor.syntax);
|
language.set_theme(&theme.syntax());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -834,13 +868,43 @@ impl LanguageRegistry {
|
|||||||
executor
|
executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
let id = language.id;
|
let id = language.id;
|
||||||
let queries = (language.get_queries)(&language.path);
|
let name = language.config.name.clone();
|
||||||
let language =
|
let language = async {
|
||||||
Language::new(language.config, Some(language.grammar))
|
let (grammar, queries) = match language.grammar {
|
||||||
|
AvailableGrammar::Native {
|
||||||
|
grammar,
|
||||||
|
asset_dir,
|
||||||
|
get_queries,
|
||||||
|
} => (grammar, (get_queries)(asset_dir)),
|
||||||
|
AvailableGrammar::Wasm { grammar_name, path } => {
|
||||||
|
let mut wasm_path = path.join(grammar_name.as_ref());
|
||||||
|
wasm_path.set_extension("wasm");
|
||||||
|
let wasm_bytes = std::fs::read(&wasm_path)?;
|
||||||
|
let grammar = PARSER.with(|parser| {
|
||||||
|
let mut parser = parser.borrow_mut();
|
||||||
|
let mut store = parser.take_wasm_store().unwrap();
|
||||||
|
let grammar =
|
||||||
|
store.load_language(&grammar_name, &wasm_bytes);
|
||||||
|
parser.set_wasm_store(store).unwrap();
|
||||||
|
grammar
|
||||||
|
})?;
|
||||||
|
let mut queries = LanguageQueries::default();
|
||||||
|
if let Ok(contents) = std::fs::read_to_string(
|
||||||
|
&path.join("highlights.scm"),
|
||||||
|
) {
|
||||||
|
queries.highlights = Some(contents.into());
|
||||||
|
}
|
||||||
|
(grammar, queries)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Language::new(language.config, Some(grammar))
|
||||||
.with_lsp_adapters(language.lsp_adapters)
|
.with_lsp_adapters(language.lsp_adapters)
|
||||||
.await;
|
.await
|
||||||
let name = language.name();
|
.with_queries(queries)
|
||||||
match language.with_queries(queries) {
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match language {
|
||||||
Ok(language) => {
|
Ok(language) => {
|
||||||
let language = Arc::new(language);
|
let language = Arc::new(language);
|
||||||
let mut state = this.state.write();
|
let mut state = this.state.write();
|
||||||
@ -918,7 +982,7 @@ impl LanguageRegistry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let servers_tx = servers_tx.clone();
|
let servers_tx = servers_tx.clone();
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
if fake_server
|
if fake_server
|
||||||
.try_receive_notification::<lsp::notification::Initialized>()
|
.try_receive_notification::<lsp::notification::Initialized>()
|
||||||
@ -955,18 +1019,22 @@ impl LanguageRegistry {
|
|||||||
|
|
||||||
let task = {
|
let task = {
|
||||||
let container_dir = container_dir.clone();
|
let container_dir = container_dir.clone();
|
||||||
cx.spawn(|mut cx| async move {
|
cx.spawn(move |mut cx| async move {
|
||||||
login_shell_env_loaded.await;
|
login_shell_env_loaded.await;
|
||||||
|
|
||||||
let mut lock = this.lsp_binary_paths.lock();
|
let entry = this
|
||||||
let entry = lock
|
.lsp_binary_paths
|
||||||
|
.lock()
|
||||||
.entry(adapter.name.clone())
|
.entry(adapter.name.clone())
|
||||||
.or_insert_with(|| {
|
.or_insert_with(|| {
|
||||||
|
let adapter = adapter.clone();
|
||||||
|
let language = language.clone();
|
||||||
|
let delegate = delegate.clone();
|
||||||
cx.spawn(|cx| {
|
cx.spawn(|cx| {
|
||||||
get_binary(
|
get_binary(
|
||||||
adapter.clone(),
|
adapter,
|
||||||
language.clone(),
|
language,
|
||||||
delegate.clone(),
|
delegate,
|
||||||
container_dir,
|
container_dir,
|
||||||
lsp_binary_statuses,
|
lsp_binary_statuses,
|
||||||
cx,
|
cx,
|
||||||
@ -976,9 +1044,8 @@ impl LanguageRegistry {
|
|||||||
.shared()
|
.shared()
|
||||||
})
|
})
|
||||||
.clone();
|
.clone();
|
||||||
drop(lock);
|
|
||||||
|
|
||||||
let binary = match entry.clone().await {
|
let binary = match entry.await {
|
||||||
Ok(binary) => binary,
|
Ok(binary) => binary,
|
||||||
Err(err) => anyhow::bail!("{err}"),
|
Err(err) => anyhow::bail!("{err}"),
|
||||||
};
|
};
|
||||||
@ -1047,7 +1114,7 @@ impl LanguageRegistryState {
|
|||||||
|
|
||||||
fn add(&mut self, language: Arc<Language>) {
|
fn add(&mut self, language: Arc<Language>) {
|
||||||
if let Some(theme) = self.theme.as_ref() {
|
if let Some(theme) = self.theme.as_ref() {
|
||||||
language.set_theme(&theme.editor.syntax);
|
language.set_theme(&theme.syntax());
|
||||||
}
|
}
|
||||||
self.languages.push(language);
|
self.languages.push(language);
|
||||||
self.version += 1;
|
self.version += 1;
|
||||||
@ -1387,9 +1454,9 @@ impl Language {
|
|||||||
let query = Query::new(&self.grammar_mut().ts_language, source)?;
|
let query = Query::new(&self.grammar_mut().ts_language, source)?;
|
||||||
|
|
||||||
let mut override_configs_by_id = HashMap::default();
|
let mut override_configs_by_id = HashMap::default();
|
||||||
for (ix, name) in query.capture_names().iter().copied().enumerate() {
|
for (ix, name) in query.capture_names().iter().enumerate() {
|
||||||
if !name.starts_with('_') {
|
if !name.starts_with('_') {
|
||||||
let value = self.config.overrides.remove(name).unwrap_or_default();
|
let value = self.config.overrides.remove(*name).unwrap_or_default();
|
||||||
for server_name in &value.opt_into_language_servers {
|
for server_name in &value.opt_into_language_servers {
|
||||||
if !self
|
if !self
|
||||||
.config
|
.config
|
||||||
@ -1400,7 +1467,7 @@ impl Language {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override_configs_by_id.insert(ix as u32, (name.into(), value));
|
override_configs_by_id.insert(ix as u32, (name.to_string(), value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1855,7 +1922,8 @@ mod tests {
|
|||||||
#[gpui::test(iterations = 10)]
|
#[gpui::test(iterations = 10)]
|
||||||
async fn test_first_line_pattern(cx: &mut TestAppContext) {
|
async fn test_first_line_pattern(cx: &mut TestAppContext) {
|
||||||
let mut languages = LanguageRegistry::test();
|
let mut languages = LanguageRegistry::test();
|
||||||
languages.set_executor(cx.background());
|
|
||||||
|
languages.set_executor(cx.executor());
|
||||||
let languages = Arc::new(languages);
|
let languages = Arc::new(languages);
|
||||||
languages.register(
|
languages.register(
|
||||||
"/javascript",
|
"/javascript",
|
||||||
@ -1892,7 +1960,7 @@ mod tests {
|
|||||||
#[gpui::test(iterations = 10)]
|
#[gpui::test(iterations = 10)]
|
||||||
async fn test_language_loading(cx: &mut TestAppContext) {
|
async fn test_language_loading(cx: &mut TestAppContext) {
|
||||||
let mut languages = LanguageRegistry::test();
|
let mut languages = LanguageRegistry::test();
|
||||||
languages.set_executor(cx.background());
|
languages.set_executor(cx.executor());
|
||||||
let languages = Arc::new(languages);
|
let languages = Arc::new(languages);
|
||||||
languages.register(
|
languages.register(
|
||||||
"/JSON",
|
"/JSON",
|
||||||
|
@ -8,10 +8,11 @@ use schemars::{
|
|||||||
JsonSchema,
|
JsonSchema,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use settings::Settings;
|
||||||
use std::{num::NonZeroU32, path::Path, sync::Arc};
|
use std::{num::NonZeroU32, path::Path, sync::Arc};
|
||||||
|
|
||||||
pub fn init(cx: &mut AppContext) {
|
pub fn init(cx: &mut AppContext) {
|
||||||
settings::register::<AllLanguageSettings>(cx);
|
AllLanguageSettings::register(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn language_settings<'a>(
|
pub fn language_settings<'a>(
|
||||||
@ -28,7 +29,7 @@ pub fn all_language_settings<'a>(
|
|||||||
cx: &'a AppContext,
|
cx: &'a AppContext,
|
||||||
) -> &'a AllLanguageSettings {
|
) -> &'a AllLanguageSettings {
|
||||||
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
|
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
|
||||||
settings::get_local(location, cx)
|
AllLanguageSettings::get(location, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -254,7 +255,7 @@ impl InlayHintKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl settings::Setting for AllLanguageSettings {
|
impl settings::Settings for AllLanguageSettings {
|
||||||
const KEY: Option<&'static str> = None;
|
const KEY: Option<&'static str> = None;
|
||||||
|
|
||||||
type FileContent = AllLanguageSettingsContent;
|
type FileContent = AllLanguageSettingsContent;
|
||||||
@ -262,7 +263,7 @@ impl settings::Setting for AllLanguageSettings {
|
|||||||
fn load(
|
fn load(
|
||||||
default_value: &Self::FileContent,
|
default_value: &Self::FileContent,
|
||||||
user_settings: &[&Self::FileContent],
|
user_settings: &[&Self::FileContent],
|
||||||
_: &AppContext,
|
_: &mut AppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
// A default is provided for all settings.
|
// A default is provided for all settings.
|
||||||
let mut defaults: LanguageSettings =
|
let mut defaults: LanguageSettings =
|
||||||
|
@ -2,7 +2,7 @@ use std::sync::Arc;
|
|||||||
use std::{ops::Range, path::PathBuf};
|
use std::{ops::Range, path::PathBuf};
|
||||||
|
|
||||||
use crate::{HighlightId, Language, LanguageRegistry};
|
use crate::{HighlightId, Language, LanguageRegistry};
|
||||||
use gpui::fonts::{self, HighlightStyle, Weight};
|
use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
|
||||||
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -26,18 +26,18 @@ impl MarkdownHighlight {
|
|||||||
let mut highlight = HighlightStyle::default();
|
let mut highlight = HighlightStyle::default();
|
||||||
|
|
||||||
if style.italic {
|
if style.italic {
|
||||||
highlight.italic = Some(true);
|
highlight.font_style = Some(FontStyle::Italic);
|
||||||
}
|
}
|
||||||
|
|
||||||
if style.underline {
|
if style.underline {
|
||||||
highlight.underline = Some(fonts::Underline {
|
highlight.underline = Some(UnderlineStyle {
|
||||||
thickness: 1.0.into(),
|
thickness: px(1.),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if style.weight != fonts::Weight::default() {
|
if style.weight != FontWeight::default() {
|
||||||
highlight.weight = Some(style.weight);
|
highlight.font_weight = Some(style.weight);
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(highlight)
|
Some(highlight)
|
||||||
@ -52,7 +52,7 @@ impl MarkdownHighlight {
|
|||||||
pub struct MarkdownHighlightStyle {
|
pub struct MarkdownHighlightStyle {
|
||||||
pub italic: bool,
|
pub italic: bool,
|
||||||
pub underline: bool,
|
pub underline: bool,
|
||||||
pub weight: Weight,
|
pub weight: FontWeight,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -138,7 +138,7 @@ pub async fn parse_markdown_block(
|
|||||||
let mut style = MarkdownHighlightStyle::default();
|
let mut style = MarkdownHighlightStyle::default();
|
||||||
|
|
||||||
if bold_depth > 0 {
|
if bold_depth > 0 {
|
||||||
style.weight = Weight::BOLD;
|
style.weight = FontWeight::BOLD;
|
||||||
}
|
}
|
||||||
|
|
||||||
if italic_depth > 0 {
|
if italic_depth > 0 {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||||
use gpui::{executor::Background, fonts::HighlightStyle};
|
use gpui::{BackgroundExecutor, HighlightStyle};
|
||||||
use std::{ops::Range, sync::Arc};
|
use std::ops::Range;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Outline<T> {
|
pub struct Outline<T> {
|
||||||
@ -57,7 +57,7 @@ impl<T> Outline<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search(&self, query: &str, executor: Arc<Background>) -> Vec<StringMatch> {
|
pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
|
||||||
let query = query.trim_start();
|
let query = query.trim_start();
|
||||||
let is_path_query = query.contains(' ');
|
let is_path_query = query.contains(' ');
|
||||||
let smart_case = query.chars().any(|c| c.is_uppercase());
|
let smart_case = query.chars().any(|c| c.is_uppercase());
|
||||||
@ -81,6 +81,7 @@ impl<T> Outline<T> {
|
|||||||
let mut prev_item_ix = 0;
|
let mut prev_item_ix = 0;
|
||||||
for mut string_match in matches {
|
for mut string_match in matches {
|
||||||
let outline_match = &self.items[string_match.candidate_id];
|
let outline_match = &self.items[string_match.candidate_id];
|
||||||
|
string_match.string = outline_match.text.clone();
|
||||||
|
|
||||||
if is_path_query {
|
if is_path_query {
|
||||||
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
|
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
|
||||||
|
@ -7,7 +7,6 @@ use futures::FutureExt;
|
|||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
cell::RefCell,
|
|
||||||
cmp::{self, Ordering, Reverse},
|
cmp::{self, Ordering, Reverse},
|
||||||
collections::BinaryHeap,
|
collections::BinaryHeap,
|
||||||
fmt, iter,
|
fmt, iter,
|
||||||
@ -16,13 +15,9 @@ use std::{
|
|||||||
};
|
};
|
||||||
use sum_tree::{Bias, SeekTarget, SumTree};
|
use sum_tree::{Bias, SeekTarget, SumTree};
|
||||||
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
|
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
|
||||||
use tree_sitter::{
|
use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree};
|
||||||
Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
|
|
||||||
};
|
|
||||||
|
|
||||||
thread_local! {
|
use super::PARSER;
|
||||||
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
|
|
||||||
}
|
|
||||||
|
|
||||||
static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
|
static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
|
||||||
|
|
||||||
|
@ -1,86 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "language2"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/language2.rs"
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[features]
|
|
||||||
test-support = [
|
|
||||||
"rand",
|
|
||||||
"client/test-support",
|
|
||||||
"collections/test-support",
|
|
||||||
"lsp/test-support",
|
|
||||||
"text/test-support",
|
|
||||||
"tree-sitter-rust",
|
|
||||||
"tree-sitter-typescript",
|
|
||||||
"settings/test-support",
|
|
||||||
"util/test-support",
|
|
||||||
]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
clock = { path = "../clock" }
|
|
||||||
collections = { path = "../collections" }
|
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
|
||||||
git = { package = "git3", path = "../git3" }
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
|
||||||
sum_tree = { path = "../sum_tree" }
|
|
||||||
text = { package = "text2", path = "../text2" }
|
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
|
||||||
util = { path = "../util" }
|
|
||||||
|
|
||||||
anyhow.workspace = true
|
|
||||||
async-broadcast = "0.4"
|
|
||||||
async-trait.workspace = true
|
|
||||||
futures.workspace = true
|
|
||||||
globset.workspace = true
|
|
||||||
lazy_static.workspace = true
|
|
||||||
log.workspace = true
|
|
||||||
parking_lot.workspace = true
|
|
||||||
postage.workspace = true
|
|
||||||
regex.workspace = true
|
|
||||||
schemars.workspace = true
|
|
||||||
serde.workspace = true
|
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
similar = "1.3"
|
|
||||||
smallvec.workspace = true
|
|
||||||
smol.workspace = true
|
|
||||||
tree-sitter.workspace = true
|
|
||||||
unicase = "2.6"
|
|
||||||
|
|
||||||
rand = { workspace = true, optional = true }
|
|
||||||
tree-sitter-rust = { workspace = true, optional = true }
|
|
||||||
tree-sitter-typescript = { workspace = true, optional = true }
|
|
||||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
|
||||||
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
|
||||||
util = { path = "../util", features = ["test-support"] }
|
|
||||||
ctor.workspace = true
|
|
||||||
env_logger.workspace = true
|
|
||||||
indoc.workspace = true
|
|
||||||
rand.workspace = true
|
|
||||||
unindent.workspace = true
|
|
||||||
|
|
||||||
tree-sitter-embedded-template.workspace = true
|
|
||||||
tree-sitter-html.workspace = true
|
|
||||||
tree-sitter-json.workspace = true
|
|
||||||
tree-sitter-markdown.workspace = true
|
|
||||||
tree-sitter-rust.workspace = true
|
|
||||||
tree-sitter-python.workspace = true
|
|
||||||
tree-sitter-typescript.workspace = true
|
|
||||||
tree-sitter-ruby.workspace = true
|
|
||||||
tree-sitter-elixir.workspace = true
|
|
||||||
tree-sitter-heex.workspace = true
|
|
@ -1,5 +0,0 @@
|
|||||||
fn main() {
|
|
||||||
if let Ok(bundled) = std::env::var("ZED_BUNDLE") {
|
|
||||||
println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,236 +0,0 @@
|
|||||||
use crate::Diagnostic;
|
|
||||||
use collections::HashMap;
|
|
||||||
use lsp::LanguageServerId;
|
|
||||||
use std::{
|
|
||||||
cmp::{Ordering, Reverse},
|
|
||||||
iter,
|
|
||||||
ops::Range,
|
|
||||||
};
|
|
||||||
use sum_tree::{self, Bias, SumTree};
|
|
||||||
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct DiagnosticSet {
|
|
||||||
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub struct DiagnosticEntry<T> {
|
|
||||||
pub range: Range<T>,
|
|
||||||
pub diagnostic: Diagnostic,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct DiagnosticGroup<T> {
|
|
||||||
pub entries: Vec<DiagnosticEntry<T>>,
|
|
||||||
pub primary_ix: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Summary {
|
|
||||||
start: Anchor,
|
|
||||||
end: Anchor,
|
|
||||||
min_start: Anchor,
|
|
||||||
max_end: Anchor,
|
|
||||||
count: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> DiagnosticEntry<T> {
|
|
||||||
// Used to provide diagnostic context to lsp codeAction request
|
|
||||||
pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
|
|
||||||
let code = self
|
|
||||||
.diagnostic
|
|
||||||
.code
|
|
||||||
.clone()
|
|
||||||
.map(lsp::NumberOrString::String);
|
|
||||||
|
|
||||||
lsp::Diagnostic {
|
|
||||||
code,
|
|
||||||
severity: Some(self.diagnostic.severity),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DiagnosticSet {
|
|
||||||
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
|
|
||||||
{
|
|
||||||
Self {
|
|
||||||
diagnostics: SumTree::from_iter(iter, buffer),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
|
|
||||||
{
|
|
||||||
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
|
||||||
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
|
|
||||||
Self {
|
|
||||||
diagnostics: SumTree::from_iter(
|
|
||||||
entries.into_iter().map(|entry| DiagnosticEntry {
|
|
||||||
range: buffer.anchor_before(entry.range.start)
|
|
||||||
..buffer.anchor_before(entry.range.end),
|
|
||||||
diagnostic: entry.diagnostic,
|
|
||||||
}),
|
|
||||||
buffer,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.diagnostics.summary().count
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
|
|
||||||
self.diagnostics.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn range<'a, T, O>(
|
|
||||||
&'a self,
|
|
||||||
range: Range<T>,
|
|
||||||
buffer: &'a text::BufferSnapshot,
|
|
||||||
inclusive: bool,
|
|
||||||
reversed: bool,
|
|
||||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
|
||||||
where
|
|
||||||
T: 'a + ToOffset,
|
|
||||||
O: FromAnchor,
|
|
||||||
{
|
|
||||||
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
|
|
||||||
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
|
|
||||||
let mut cursor = self.diagnostics.filter::<_, ()>({
|
|
||||||
move |summary: &Summary| {
|
|
||||||
let start_cmp = range.start.cmp(&summary.max_end, buffer);
|
|
||||||
let end_cmp = range.end.cmp(&summary.min_start, buffer);
|
|
||||||
if inclusive {
|
|
||||||
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
|
|
||||||
} else {
|
|
||||||
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if reversed {
|
|
||||||
cursor.prev(buffer);
|
|
||||||
} else {
|
|
||||||
cursor.next(buffer);
|
|
||||||
}
|
|
||||||
iter::from_fn({
|
|
||||||
move || {
|
|
||||||
if let Some(diagnostic) = cursor.item() {
|
|
||||||
if reversed {
|
|
||||||
cursor.prev(buffer);
|
|
||||||
} else {
|
|
||||||
cursor.next(buffer);
|
|
||||||
}
|
|
||||||
Some(diagnostic.resolve(buffer))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn groups(
|
|
||||||
&self,
|
|
||||||
language_server_id: LanguageServerId,
|
|
||||||
output: &mut Vec<(LanguageServerId, DiagnosticGroup<Anchor>)>,
|
|
||||||
buffer: &text::BufferSnapshot,
|
|
||||||
) {
|
|
||||||
let mut groups = HashMap::default();
|
|
||||||
for entry in self.diagnostics.iter() {
|
|
||||||
groups
|
|
||||||
.entry(entry.diagnostic.group_id)
|
|
||||||
.or_insert(Vec::new())
|
|
||||||
.push(entry.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_ix = output.len();
|
|
||||||
output.extend(groups.into_values().filter_map(|mut entries| {
|
|
||||||
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer));
|
|
||||||
entries
|
|
||||||
.iter()
|
|
||||||
.position(|entry| entry.diagnostic.is_primary)
|
|
||||||
.map(|primary_ix| {
|
|
||||||
(
|
|
||||||
language_server_id,
|
|
||||||
DiagnosticGroup {
|
|
||||||
entries,
|
|
||||||
primary_ix,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}));
|
|
||||||
output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| {
|
|
||||||
group_a.entries[group_a.primary_ix]
|
|
||||||
.range
|
|
||||||
.start
|
|
||||||
.cmp(&group_b.entries[group_b.primary_ix].range.start, buffer)
|
|
||||||
.then_with(|| id_a.cmp(&id_b))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn group<'a, O: FromAnchor>(
|
|
||||||
&'a self,
|
|
||||||
group_id: usize,
|
|
||||||
buffer: &'a text::BufferSnapshot,
|
|
||||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
|
|
||||||
self.iter()
|
|
||||||
.filter(move |entry| entry.diagnostic.group_id == group_id)
|
|
||||||
.map(|entry| entry.resolve(buffer))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl sum_tree::Item for DiagnosticEntry<Anchor> {
|
|
||||||
type Summary = Summary;
|
|
||||||
|
|
||||||
fn summary(&self) -> Self::Summary {
|
|
||||||
Summary {
|
|
||||||
start: self.range.start,
|
|
||||||
end: self.range.end,
|
|
||||||
min_start: self.range.start,
|
|
||||||
max_end: self.range.end,
|
|
||||||
count: 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DiagnosticEntry<Anchor> {
|
|
||||||
pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: O::from_anchor(&self.range.start, buffer)
|
|
||||||
..O::from_anchor(&self.range.end, buffer),
|
|
||||||
diagnostic: self.diagnostic.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Summary {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
start: Anchor::MIN,
|
|
||||||
end: Anchor::MAX,
|
|
||||||
min_start: Anchor::MAX,
|
|
||||||
max_end: Anchor::MIN,
|
|
||||||
count: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sum_tree::Summary for Summary {
|
|
||||||
type Context = text::BufferSnapshot;
|
|
||||||
|
|
||||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
|
||||||
if other.min_start.cmp(&self.min_start, buffer).is_lt() {
|
|
||||||
self.min_start = other.min_start;
|
|
||||||
}
|
|
||||||
if other.max_end.cmp(&self.max_end, buffer).is_gt() {
|
|
||||||
self.max_end = other.max_end;
|
|
||||||
}
|
|
||||||
self.start = other.start;
|
|
||||||
self.end = other.end;
|
|
||||||
self.count += other.count;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,111 +0,0 @@
|
|||||||
use gpui::HighlightStyle;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use theme::SyntaxTheme;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct HighlightMap(Arc<[HighlightId]>);
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
|
||||||
pub struct HighlightId(pub u32);
|
|
||||||
|
|
||||||
const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
|
|
||||||
|
|
||||||
impl HighlightMap {
|
|
||||||
pub fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
|
|
||||||
// For each capture name in the highlight query, find the longest
|
|
||||||
// key in the theme's syntax styles that matches all of the
|
|
||||||
// dot-separated components of the capture name.
|
|
||||||
HighlightMap(
|
|
||||||
capture_names
|
|
||||||
.iter()
|
|
||||||
.map(|capture_name| {
|
|
||||||
theme
|
|
||||||
.highlights
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.filter_map(|(i, (key, _))| {
|
|
||||||
let mut len = 0;
|
|
||||||
let capture_parts = capture_name.split('.');
|
|
||||||
for key_part in key.split('.') {
|
|
||||||
if capture_parts.clone().any(|part| part == key_part) {
|
|
||||||
len += 1;
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some((i, len))
|
|
||||||
})
|
|
||||||
.max_by_key(|(_, len)| *len)
|
|
||||||
.map_or(DEFAULT_SYNTAX_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32))
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, capture_id: u32) -> HighlightId {
|
|
||||||
self.0
|
|
||||||
.get(capture_id as usize)
|
|
||||||
.copied()
|
|
||||||
.unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HighlightId {
|
|
||||||
pub fn is_default(&self) -> bool {
|
|
||||||
*self == DEFAULT_SYNTAX_HIGHLIGHT_ID
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
|
|
||||||
theme.highlights.get(self.0 as usize).map(|entry| entry.1)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> {
|
|
||||||
theme.highlights.get(self.0 as usize).map(|e| e.0.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for HighlightMap {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(Arc::new([]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for HighlightId {
|
|
||||||
fn default() -> Self {
|
|
||||||
DEFAULT_SYNTAX_HIGHLIGHT_ID
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use gpui::rgba;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_highlight_map() {
|
|
||||||
let theme = SyntaxTheme {
|
|
||||||
highlights: [
|
|
||||||
("function", rgba(0x100000ff)),
|
|
||||||
("function.method", rgba(0x200000ff)),
|
|
||||||
("function.async", rgba(0x300000ff)),
|
|
||||||
("variable.builtin.self.rust", rgba(0x400000ff)),
|
|
||||||
("variable.builtin", rgba(0x500000ff)),
|
|
||||||
("variable", rgba(0x600000ff)),
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.map(|(name, color)| (name.to_string(), (*color).into()))
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let capture_names = &[
|
|
||||||
"function.special",
|
|
||||||
"function.async.rust",
|
|
||||||
"variable.builtin.self",
|
|
||||||
];
|
|
||||||
|
|
||||||
let map = HighlightMap::new(capture_names, &theme);
|
|
||||||
assert_eq!(map.get(0).name(&theme), Some("function"));
|
|
||||||
assert_eq!(map.get(1).name(&theme), Some("function.async"));
|
|
||||||
assert_eq!(map.get(2).name(&theme), Some("variable.builtin"));
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,431 +0,0 @@
|
|||||||
use crate::{File, Language};
|
|
||||||
use anyhow::Result;
|
|
||||||
use collections::{HashMap, HashSet};
|
|
||||||
use globset::GlobMatcher;
|
|
||||||
use gpui::AppContext;
|
|
||||||
use schemars::{
|
|
||||||
schema::{InstanceType, ObjectValidation, Schema, SchemaObject},
|
|
||||||
JsonSchema,
|
|
||||||
};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use settings::Settings;
|
|
||||||
use std::{num::NonZeroU32, path::Path, sync::Arc};
|
|
||||||
|
|
||||||
pub fn init(cx: &mut AppContext) {
|
|
||||||
AllLanguageSettings::register(cx);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn language_settings<'a>(
|
|
||||||
language: Option<&Arc<Language>>,
|
|
||||||
file: Option<&Arc<dyn File>>,
|
|
||||||
cx: &'a AppContext,
|
|
||||||
) -> &'a LanguageSettings {
|
|
||||||
let language_name = language.map(|l| l.name());
|
|
||||||
all_language_settings(file, cx).language(language_name.as_deref())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn all_language_settings<'a>(
|
|
||||||
file: Option<&Arc<dyn File>>,
|
|
||||||
cx: &'a AppContext,
|
|
||||||
) -> &'a AllLanguageSettings {
|
|
||||||
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
|
|
||||||
AllLanguageSettings::get(location, cx)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct AllLanguageSettings {
|
|
||||||
pub copilot: CopilotSettings,
|
|
||||||
defaults: LanguageSettings,
|
|
||||||
languages: HashMap<Arc<str>, LanguageSettings>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
|
||||||
pub struct LanguageSettings {
|
|
||||||
pub tab_size: NonZeroU32,
|
|
||||||
pub hard_tabs: bool,
|
|
||||||
pub soft_wrap: SoftWrap,
|
|
||||||
pub preferred_line_length: u32,
|
|
||||||
pub show_wrap_guides: bool,
|
|
||||||
pub wrap_guides: Vec<usize>,
|
|
||||||
pub format_on_save: FormatOnSave,
|
|
||||||
pub remove_trailing_whitespace_on_save: bool,
|
|
||||||
pub ensure_final_newline_on_save: bool,
|
|
||||||
pub formatter: Formatter,
|
|
||||||
pub prettier: HashMap<String, serde_json::Value>,
|
|
||||||
pub enable_language_server: bool,
|
|
||||||
pub show_copilot_suggestions: bool,
|
|
||||||
pub show_whitespaces: ShowWhitespaceSetting,
|
|
||||||
pub extend_comment_on_newline: bool,
|
|
||||||
pub inlay_hints: InlayHintSettings,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct CopilotSettings {
|
|
||||||
pub feature_enabled: bool,
|
|
||||||
pub disabled_globs: Vec<GlobMatcher>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
pub struct AllLanguageSettingsContent {
|
|
||||||
#[serde(default)]
|
|
||||||
pub features: Option<FeaturesContent>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub copilot: Option<CopilotSettingsContent>,
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub defaults: LanguageSettingsContent,
|
|
||||||
#[serde(default, alias = "language_overrides")]
|
|
||||||
pub languages: HashMap<Arc<str>, LanguageSettingsContent>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
pub struct LanguageSettingsContent {
|
|
||||||
#[serde(default)]
|
|
||||||
pub tab_size: Option<NonZeroU32>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub hard_tabs: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub soft_wrap: Option<SoftWrap>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub preferred_line_length: Option<u32>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub show_wrap_guides: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub wrap_guides: Option<Vec<usize>>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub format_on_save: Option<FormatOnSave>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub remove_trailing_whitespace_on_save: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub ensure_final_newline_on_save: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub formatter: Option<Formatter>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub prettier: Option<HashMap<String, serde_json::Value>>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub enable_language_server: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub show_copilot_suggestions: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub show_whitespaces: Option<ShowWhitespaceSetting>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub extend_comment_on_newline: Option<bool>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub inlay_hints: Option<InlayHintSettings>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
pub struct CopilotSettingsContent {
|
|
||||||
#[serde(default)]
|
|
||||||
pub disabled_globs: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub struct FeaturesContent {
|
|
||||||
pub copilot: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum SoftWrap {
|
|
||||||
None,
|
|
||||||
EditorWidth,
|
|
||||||
PreferredLineLength,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum FormatOnSave {
|
|
||||||
On,
|
|
||||||
Off,
|
|
||||||
LanguageServer,
|
|
||||||
External {
|
|
||||||
command: Arc<str>,
|
|
||||||
arguments: Arc<[String]>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum ShowWhitespaceSetting {
|
|
||||||
Selection,
|
|
||||||
None,
|
|
||||||
All,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum Formatter {
|
|
||||||
#[default]
|
|
||||||
Auto,
|
|
||||||
LanguageServer,
|
|
||||||
Prettier,
|
|
||||||
External {
|
|
||||||
command: Arc<str>,
|
|
||||||
arguments: Arc<[String]>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
|
||||||
pub struct InlayHintSettings {
|
|
||||||
#[serde(default)]
|
|
||||||
pub enabled: bool,
|
|
||||||
#[serde(default = "default_true")]
|
|
||||||
pub show_type_hints: bool,
|
|
||||||
#[serde(default = "default_true")]
|
|
||||||
pub show_parameter_hints: bool,
|
|
||||||
#[serde(default = "default_true")]
|
|
||||||
pub show_other_hints: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_true() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InlayHintSettings {
|
|
||||||
pub fn enabled_inlay_hint_kinds(&self) -> HashSet<Option<InlayHintKind>> {
|
|
||||||
let mut kinds = HashSet::default();
|
|
||||||
if self.show_type_hints {
|
|
||||||
kinds.insert(Some(InlayHintKind::Type));
|
|
||||||
}
|
|
||||||
if self.show_parameter_hints {
|
|
||||||
kinds.insert(Some(InlayHintKind::Parameter));
|
|
||||||
}
|
|
||||||
if self.show_other_hints {
|
|
||||||
kinds.insert(None);
|
|
||||||
}
|
|
||||||
kinds
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AllLanguageSettings {
|
|
||||||
pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings {
|
|
||||||
if let Some(name) = language_name {
|
|
||||||
if let Some(overrides) = self.languages.get(name) {
|
|
||||||
return overrides;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&self.defaults
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn copilot_enabled_for_path(&self, path: &Path) -> bool {
|
|
||||||
!self
|
|
||||||
.copilot
|
|
||||||
.disabled_globs
|
|
||||||
.iter()
|
|
||||||
.any(|glob| glob.is_match(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn copilot_enabled(&self, language: Option<&Arc<Language>>, path: Option<&Path>) -> bool {
|
|
||||||
if !self.copilot.feature_enabled {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(path) = path {
|
|
||||||
if !self.copilot_enabled_for_path(path) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.language(language.map(|l| l.name()).as_deref())
|
|
||||||
.show_copilot_suggestions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub enum InlayHintKind {
|
|
||||||
Type,
|
|
||||||
Parameter,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InlayHintKind {
|
|
||||||
pub fn from_name(name: &str) -> Option<Self> {
|
|
||||||
match name {
|
|
||||||
"type" => Some(InlayHintKind::Type),
|
|
||||||
"parameter" => Some(InlayHintKind::Parameter),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
InlayHintKind::Type => "type",
|
|
||||||
InlayHintKind::Parameter => "parameter",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl settings::Settings for AllLanguageSettings {
|
|
||||||
const KEY: Option<&'static str> = None;
|
|
||||||
|
|
||||||
type FileContent = AllLanguageSettingsContent;
|
|
||||||
|
|
||||||
fn load(
|
|
||||||
default_value: &Self::FileContent,
|
|
||||||
user_settings: &[&Self::FileContent],
|
|
||||||
_: &mut AppContext,
|
|
||||||
) -> Result<Self> {
|
|
||||||
// A default is provided for all settings.
|
|
||||||
let mut defaults: LanguageSettings =
|
|
||||||
serde_json::from_value(serde_json::to_value(&default_value.defaults)?)?;
|
|
||||||
|
|
||||||
let mut languages = HashMap::default();
|
|
||||||
for (language_name, settings) in &default_value.languages {
|
|
||||||
let mut language_settings = defaults.clone();
|
|
||||||
merge_settings(&mut language_settings, &settings);
|
|
||||||
languages.insert(language_name.clone(), language_settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut copilot_enabled = default_value
|
|
||||||
.features
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|f| f.copilot)
|
|
||||||
.ok_or_else(Self::missing_default)?;
|
|
||||||
let mut copilot_globs = default_value
|
|
||||||
.copilot
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|c| c.disabled_globs.as_ref())
|
|
||||||
.ok_or_else(Self::missing_default)?;
|
|
||||||
|
|
||||||
for user_settings in user_settings {
|
|
||||||
if let Some(copilot) = user_settings.features.as_ref().and_then(|f| f.copilot) {
|
|
||||||
copilot_enabled = copilot;
|
|
||||||
}
|
|
||||||
if let Some(globs) = user_settings
|
|
||||||
.copilot
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|f| f.disabled_globs.as_ref())
|
|
||||||
{
|
|
||||||
copilot_globs = globs;
|
|
||||||
}
|
|
||||||
|
|
||||||
// A user's global settings override the default global settings and
|
|
||||||
// all default language-specific settings.
|
|
||||||
merge_settings(&mut defaults, &user_settings.defaults);
|
|
||||||
for language_settings in languages.values_mut() {
|
|
||||||
merge_settings(language_settings, &user_settings.defaults);
|
|
||||||
}
|
|
||||||
|
|
||||||
// A user's language-specific settings override default language-specific settings.
|
|
||||||
for (language_name, user_language_settings) in &user_settings.languages {
|
|
||||||
merge_settings(
|
|
||||||
languages
|
|
||||||
.entry(language_name.clone())
|
|
||||||
.or_insert_with(|| defaults.clone()),
|
|
||||||
&user_language_settings,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
copilot: CopilotSettings {
|
|
||||||
feature_enabled: copilot_enabled,
|
|
||||||
disabled_globs: copilot_globs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher()))
|
|
||||||
.collect(),
|
|
||||||
},
|
|
||||||
defaults,
|
|
||||||
languages,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_schema(
|
|
||||||
generator: &mut schemars::gen::SchemaGenerator,
|
|
||||||
params: &settings::SettingsJsonSchemaParams,
|
|
||||||
_: &AppContext,
|
|
||||||
) -> schemars::schema::RootSchema {
|
|
||||||
let mut root_schema = generator.root_schema_for::<Self::FileContent>();
|
|
||||||
|
|
||||||
// Create a schema for a 'languages overrides' object, associating editor
|
|
||||||
// settings with specific languages.
|
|
||||||
assert!(root_schema
|
|
||||||
.definitions
|
|
||||||
.contains_key("LanguageSettingsContent"));
|
|
||||||
|
|
||||||
let languages_object_schema = SchemaObject {
|
|
||||||
instance_type: Some(InstanceType::Object.into()),
|
|
||||||
object: Some(Box::new(ObjectValidation {
|
|
||||||
properties: params
|
|
||||||
.language_names
|
|
||||||
.iter()
|
|
||||||
.map(|name| {
|
|
||||||
(
|
|
||||||
name.clone(),
|
|
||||||
Schema::new_ref("#/definitions/LanguageSettingsContent".into()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
..Default::default()
|
|
||||||
})),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
root_schema
|
|
||||||
.definitions
|
|
||||||
.extend([("Languages".into(), languages_object_schema.into())]);
|
|
||||||
|
|
||||||
root_schema
|
|
||||||
.schema
|
|
||||||
.object
|
|
||||||
.as_mut()
|
|
||||||
.unwrap()
|
|
||||||
.properties
|
|
||||||
.extend([
|
|
||||||
(
|
|
||||||
"languages".to_owned(),
|
|
||||||
Schema::new_ref("#/definitions/Languages".into()),
|
|
||||||
),
|
|
||||||
// For backward compatibility
|
|
||||||
(
|
|
||||||
"language_overrides".to_owned(),
|
|
||||||
Schema::new_ref("#/definitions/Languages".into()),
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
root_schema
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent) {
|
|
||||||
merge(&mut settings.tab_size, src.tab_size);
|
|
||||||
merge(&mut settings.hard_tabs, src.hard_tabs);
|
|
||||||
merge(&mut settings.soft_wrap, src.soft_wrap);
|
|
||||||
merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
|
|
||||||
merge(&mut settings.wrap_guides, src.wrap_guides.clone());
|
|
||||||
|
|
||||||
merge(
|
|
||||||
&mut settings.preferred_line_length,
|
|
||||||
src.preferred_line_length,
|
|
||||||
);
|
|
||||||
merge(&mut settings.formatter, src.formatter.clone());
|
|
||||||
merge(&mut settings.prettier, src.prettier.clone());
|
|
||||||
merge(&mut settings.format_on_save, src.format_on_save.clone());
|
|
||||||
merge(
|
|
||||||
&mut settings.remove_trailing_whitespace_on_save,
|
|
||||||
src.remove_trailing_whitespace_on_save,
|
|
||||||
);
|
|
||||||
merge(
|
|
||||||
&mut settings.ensure_final_newline_on_save,
|
|
||||||
src.ensure_final_newline_on_save,
|
|
||||||
);
|
|
||||||
merge(
|
|
||||||
&mut settings.enable_language_server,
|
|
||||||
src.enable_language_server,
|
|
||||||
);
|
|
||||||
merge(
|
|
||||||
&mut settings.show_copilot_suggestions,
|
|
||||||
src.show_copilot_suggestions,
|
|
||||||
);
|
|
||||||
merge(&mut settings.show_whitespaces, src.show_whitespaces);
|
|
||||||
merge(
|
|
||||||
&mut settings.extend_comment_on_newline,
|
|
||||||
src.extend_comment_on_newline,
|
|
||||||
);
|
|
||||||
merge(&mut settings.inlay_hints, src.inlay_hints);
|
|
||||||
fn merge<T>(target: &mut T, value: Option<T>) {
|
|
||||||
if let Some(value) = value {
|
|
||||||
*target = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,301 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
use std::{ops::Range, path::PathBuf};
|
|
||||||
|
|
||||||
use crate::{HighlightId, Language, LanguageRegistry};
|
|
||||||
use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
|
|
||||||
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ParsedMarkdown {
|
|
||||||
pub text: String,
|
|
||||||
pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
|
|
||||||
pub region_ranges: Vec<Range<usize>>,
|
|
||||||
pub regions: Vec<ParsedRegion>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum MarkdownHighlight {
|
|
||||||
Style(MarkdownHighlightStyle),
|
|
||||||
Code(HighlightId),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MarkdownHighlight {
|
|
||||||
pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option<HighlightStyle> {
|
|
||||||
match self {
|
|
||||||
MarkdownHighlight::Style(style) => {
|
|
||||||
let mut highlight = HighlightStyle::default();
|
|
||||||
|
|
||||||
if style.italic {
|
|
||||||
highlight.font_style = Some(FontStyle::Italic);
|
|
||||||
}
|
|
||||||
|
|
||||||
if style.underline {
|
|
||||||
highlight.underline = Some(UnderlineStyle {
|
|
||||||
thickness: px(1.),
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if style.weight != FontWeight::default() {
|
|
||||||
highlight.font_weight = Some(style.weight);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(highlight)
|
|
||||||
}
|
|
||||||
|
|
||||||
MarkdownHighlight::Code(id) => id.style(theme),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
|
||||||
pub struct MarkdownHighlightStyle {
|
|
||||||
pub italic: bool,
|
|
||||||
pub underline: bool,
|
|
||||||
pub weight: FontWeight,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ParsedRegion {
|
|
||||||
pub code: bool,
|
|
||||||
pub link: Option<Link>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Link {
|
|
||||||
Web { url: String },
|
|
||||||
Path { path: PathBuf },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Link {
|
|
||||||
fn identify(text: String) -> Option<Link> {
|
|
||||||
if text.starts_with("http") {
|
|
||||||
return Some(Link::Web { url: text });
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = PathBuf::from(text);
|
|
||||||
if path.is_absolute() {
|
|
||||||
return Some(Link::Path { path });
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn parse_markdown(
|
|
||||||
markdown: &str,
|
|
||||||
language_registry: &Arc<LanguageRegistry>,
|
|
||||||
language: Option<Arc<Language>>,
|
|
||||||
) -> ParsedMarkdown {
|
|
||||||
let mut text = String::new();
|
|
||||||
let mut highlights = Vec::new();
|
|
||||||
let mut region_ranges = Vec::new();
|
|
||||||
let mut regions = Vec::new();
|
|
||||||
|
|
||||||
parse_markdown_block(
|
|
||||||
markdown,
|
|
||||||
language_registry,
|
|
||||||
language,
|
|
||||||
&mut text,
|
|
||||||
&mut highlights,
|
|
||||||
&mut region_ranges,
|
|
||||||
&mut regions,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
ParsedMarkdown {
|
|
||||||
text,
|
|
||||||
highlights,
|
|
||||||
region_ranges,
|
|
||||||
regions,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn parse_markdown_block(
|
|
||||||
markdown: &str,
|
|
||||||
language_registry: &Arc<LanguageRegistry>,
|
|
||||||
language: Option<Arc<Language>>,
|
|
||||||
text: &mut String,
|
|
||||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
|
||||||
region_ranges: &mut Vec<Range<usize>>,
|
|
||||||
regions: &mut Vec<ParsedRegion>,
|
|
||||||
) {
|
|
||||||
let mut bold_depth = 0;
|
|
||||||
let mut italic_depth = 0;
|
|
||||||
let mut link_url = None;
|
|
||||||
let mut current_language = None;
|
|
||||||
let mut list_stack = Vec::new();
|
|
||||||
|
|
||||||
for event in Parser::new_ext(&markdown, Options::all()) {
|
|
||||||
let prev_len = text.len();
|
|
||||||
match event {
|
|
||||||
Event::Text(t) => {
|
|
||||||
if let Some(language) = ¤t_language {
|
|
||||||
highlight_code(text, highlights, t.as_ref(), language);
|
|
||||||
} else {
|
|
||||||
text.push_str(t.as_ref());
|
|
||||||
|
|
||||||
let mut style = MarkdownHighlightStyle::default();
|
|
||||||
|
|
||||||
if bold_depth > 0 {
|
|
||||||
style.weight = FontWeight::BOLD;
|
|
||||||
}
|
|
||||||
|
|
||||||
if italic_depth > 0 {
|
|
||||||
style.italic = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(link) = link_url.clone().and_then(|u| Link::identify(u)) {
|
|
||||||
region_ranges.push(prev_len..text.len());
|
|
||||||
regions.push(ParsedRegion {
|
|
||||||
code: false,
|
|
||||||
link: Some(link),
|
|
||||||
});
|
|
||||||
style.underline = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if style != MarkdownHighlightStyle::default() {
|
|
||||||
let mut new_highlight = true;
|
|
||||||
if let Some((last_range, MarkdownHighlight::Style(last_style))) =
|
|
||||||
highlights.last_mut()
|
|
||||||
{
|
|
||||||
if last_range.end == prev_len && last_style == &style {
|
|
||||||
last_range.end = text.len();
|
|
||||||
new_highlight = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if new_highlight {
|
|
||||||
let range = prev_len..text.len();
|
|
||||||
highlights.push((range, MarkdownHighlight::Style(style)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Event::Code(t) => {
|
|
||||||
text.push_str(t.as_ref());
|
|
||||||
region_ranges.push(prev_len..text.len());
|
|
||||||
|
|
||||||
let link = link_url.clone().and_then(|u| Link::identify(u));
|
|
||||||
if link.is_some() {
|
|
||||||
highlights.push((
|
|
||||||
prev_len..text.len(),
|
|
||||||
MarkdownHighlight::Style(MarkdownHighlightStyle {
|
|
||||||
underline: true,
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
regions.push(ParsedRegion { code: true, link });
|
|
||||||
}
|
|
||||||
|
|
||||||
Event::Start(tag) => match tag {
|
|
||||||
Tag::Paragraph => new_paragraph(text, &mut list_stack),
|
|
||||||
|
|
||||||
Tag::Heading(_, _, _) => {
|
|
||||||
new_paragraph(text, &mut list_stack);
|
|
||||||
bold_depth += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Tag::CodeBlock(kind) => {
|
|
||||||
new_paragraph(text, &mut list_stack);
|
|
||||||
current_language = if let CodeBlockKind::Fenced(language) = kind {
|
|
||||||
language_registry
|
|
||||||
.language_for_name(language.as_ref())
|
|
||||||
.await
|
|
||||||
.ok()
|
|
||||||
} else {
|
|
||||||
language.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Tag::Emphasis => italic_depth += 1,
|
|
||||||
|
|
||||||
Tag::Strong => bold_depth += 1,
|
|
||||||
|
|
||||||
Tag::Link(_, url, _) => link_url = Some(url.to_string()),
|
|
||||||
|
|
||||||
Tag::List(number) => {
|
|
||||||
list_stack.push((number, false));
|
|
||||||
}
|
|
||||||
|
|
||||||
Tag::Item => {
|
|
||||||
let len = list_stack.len();
|
|
||||||
if let Some((list_number, has_content)) = list_stack.last_mut() {
|
|
||||||
*has_content = false;
|
|
||||||
if !text.is_empty() && !text.ends_with('\n') {
|
|
||||||
text.push('\n');
|
|
||||||
}
|
|
||||||
for _ in 0..len - 1 {
|
|
||||||
text.push_str(" ");
|
|
||||||
}
|
|
||||||
if let Some(number) = list_number {
|
|
||||||
text.push_str(&format!("{}. ", number));
|
|
||||||
*number += 1;
|
|
||||||
*has_content = false;
|
|
||||||
} else {
|
|
||||||
text.push_str("- ");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
|
|
||||||
Event::End(tag) => match tag {
|
|
||||||
Tag::Heading(_, _, _) => bold_depth -= 1,
|
|
||||||
Tag::CodeBlock(_) => current_language = None,
|
|
||||||
Tag::Emphasis => italic_depth -= 1,
|
|
||||||
Tag::Strong => bold_depth -= 1,
|
|
||||||
Tag::Link(_, _, _) => link_url = None,
|
|
||||||
Tag::List(_) => drop(list_stack.pop()),
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
|
|
||||||
Event::HardBreak => text.push('\n'),
|
|
||||||
|
|
||||||
Event::SoftBreak => text.push(' '),
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn highlight_code(
|
|
||||||
text: &mut String,
|
|
||||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
|
||||||
content: &str,
|
|
||||||
language: &Arc<Language>,
|
|
||||||
) {
|
|
||||||
let prev_len = text.len();
|
|
||||||
text.push_str(content);
|
|
||||||
for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
|
|
||||||
let highlight = MarkdownHighlight::Code(highlight_id);
|
|
||||||
highlights.push((prev_len + range.start..prev_len + range.end, highlight));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
|
|
||||||
let mut is_subsequent_paragraph_of_list = false;
|
|
||||||
if let Some((_, has_content)) = list_stack.last_mut() {
|
|
||||||
if *has_content {
|
|
||||||
is_subsequent_paragraph_of_list = true;
|
|
||||||
} else {
|
|
||||||
*has_content = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !text.is_empty() {
|
|
||||||
if !text.ends_with('\n') {
|
|
||||||
text.push('\n');
|
|
||||||
}
|
|
||||||
text.push('\n');
|
|
||||||
}
|
|
||||||
for _ in 0..list_stack.len().saturating_sub(1) {
|
|
||||||
text.push_str(" ");
|
|
||||||
}
|
|
||||||
if is_subsequent_paragraph_of_list {
|
|
||||||
text.push_str(" ");
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,139 +0,0 @@
|
|||||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
|
||||||
use gpui::{BackgroundExecutor, HighlightStyle};
|
|
||||||
use std::ops::Range;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Outline<T> {
|
|
||||||
pub items: Vec<OutlineItem<T>>,
|
|
||||||
candidates: Vec<StringMatchCandidate>,
|
|
||||||
path_candidates: Vec<StringMatchCandidate>,
|
|
||||||
path_candidate_prefixes: Vec<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub struct OutlineItem<T> {
|
|
||||||
pub depth: usize,
|
|
||||||
pub range: Range<T>,
|
|
||||||
pub text: String,
|
|
||||||
pub highlight_ranges: Vec<(Range<usize>, HighlightStyle)>,
|
|
||||||
pub name_ranges: Vec<Range<usize>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Outline<T> {
|
|
||||||
pub fn new(items: Vec<OutlineItem<T>>) -> Self {
|
|
||||||
let mut candidates = Vec::new();
|
|
||||||
let mut path_candidates = Vec::new();
|
|
||||||
let mut path_candidate_prefixes = Vec::new();
|
|
||||||
let mut path_text = String::new();
|
|
||||||
let mut path_stack = Vec::new();
|
|
||||||
|
|
||||||
for (id, item) in items.iter().enumerate() {
|
|
||||||
if item.depth < path_stack.len() {
|
|
||||||
path_stack.truncate(item.depth);
|
|
||||||
path_text.truncate(path_stack.last().copied().unwrap_or(0));
|
|
||||||
}
|
|
||||||
if !path_text.is_empty() {
|
|
||||||
path_text.push(' ');
|
|
||||||
}
|
|
||||||
path_candidate_prefixes.push(path_text.len());
|
|
||||||
path_text.push_str(&item.text);
|
|
||||||
path_stack.push(path_text.len());
|
|
||||||
|
|
||||||
let candidate_text = item
|
|
||||||
.name_ranges
|
|
||||||
.iter()
|
|
||||||
.map(|range| &item.text[range.start as usize..range.end as usize])
|
|
||||||
.collect::<String>();
|
|
||||||
|
|
||||||
path_candidates.push(StringMatchCandidate::new(id, path_text.clone()));
|
|
||||||
candidates.push(StringMatchCandidate::new(id, candidate_text));
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
candidates,
|
|
||||||
path_candidates,
|
|
||||||
path_candidate_prefixes,
|
|
||||||
items,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
|
|
||||||
let query = query.trim_start();
|
|
||||||
let is_path_query = query.contains(' ');
|
|
||||||
let smart_case = query.chars().any(|c| c.is_uppercase());
|
|
||||||
let mut matches = fuzzy::match_strings(
|
|
||||||
if is_path_query {
|
|
||||||
&self.path_candidates
|
|
||||||
} else {
|
|
||||||
&self.candidates
|
|
||||||
},
|
|
||||||
query,
|
|
||||||
smart_case,
|
|
||||||
100,
|
|
||||||
&Default::default(),
|
|
||||||
executor.clone(),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
matches.sort_unstable_by_key(|m| m.candidate_id);
|
|
||||||
|
|
||||||
let mut tree_matches = Vec::new();
|
|
||||||
|
|
||||||
let mut prev_item_ix = 0;
|
|
||||||
for mut string_match in matches {
|
|
||||||
let outline_match = &self.items[string_match.candidate_id];
|
|
||||||
string_match.string = outline_match.text.clone();
|
|
||||||
|
|
||||||
if is_path_query {
|
|
||||||
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
|
|
||||||
string_match
|
|
||||||
.positions
|
|
||||||
.retain(|position| *position >= prefix_len);
|
|
||||||
for position in &mut string_match.positions {
|
|
||||||
*position -= prefix_len;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut name_ranges = outline_match.name_ranges.iter();
|
|
||||||
let mut name_range = name_ranges.next().unwrap();
|
|
||||||
let mut preceding_ranges_len = 0;
|
|
||||||
for position in &mut string_match.positions {
|
|
||||||
while *position >= preceding_ranges_len + name_range.len() as usize {
|
|
||||||
preceding_ranges_len += name_range.len();
|
|
||||||
name_range = name_ranges.next().unwrap();
|
|
||||||
}
|
|
||||||
*position = name_range.start as usize + (*position - preceding_ranges_len);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let insertion_ix = tree_matches.len();
|
|
||||||
let mut cur_depth = outline_match.depth;
|
|
||||||
for (ix, item) in self.items[prev_item_ix..string_match.candidate_id]
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.rev()
|
|
||||||
{
|
|
||||||
if cur_depth == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let candidate_index = ix + prev_item_ix;
|
|
||||||
if item.depth == cur_depth - 1 {
|
|
||||||
tree_matches.insert(
|
|
||||||
insertion_ix,
|
|
||||||
StringMatch {
|
|
||||||
candidate_id: candidate_index,
|
|
||||||
score: Default::default(),
|
|
||||||
positions: Default::default(),
|
|
||||||
string: Default::default(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
cur_depth -= 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_item_ix = string_match.candidate_id + 1;
|
|
||||||
tree_matches.push(string_match);
|
|
||||||
}
|
|
||||||
|
|
||||||
tree_matches
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,590 +0,0 @@
|
|||||||
use crate::{
|
|
||||||
diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, CursorShape, Diagnostic,
|
|
||||||
Language,
|
|
||||||
};
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use clock::ReplicaId;
|
|
||||||
use lsp::{DiagnosticSeverity, LanguageServerId};
|
|
||||||
use rpc::proto;
|
|
||||||
use std::{ops::Range, sync::Arc};
|
|
||||||
use text::*;
|
|
||||||
|
|
||||||
pub use proto::{BufferState, Operation};
|
|
||||||
|
|
||||||
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
|
|
||||||
fingerprint.to_hex()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
|
|
||||||
RopeFingerprint::from_hex(fingerprint)
|
|
||||||
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
|
|
||||||
match message {
|
|
||||||
proto::LineEnding::Unix => text::LineEnding::Unix,
|
|
||||||
proto::LineEnding::Windows => text::LineEnding::Windows,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
|
|
||||||
match message {
|
|
||||||
text::LineEnding::Unix => proto::LineEnding::Unix,
|
|
||||||
text::LineEnding::Windows => proto::LineEnding::Windows,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
|
|
||||||
proto::Operation {
|
|
||||||
variant: Some(match operation {
|
|
||||||
crate::Operation::Buffer(text::Operation::Edit(edit)) => {
|
|
||||||
proto::operation::Variant::Edit(serialize_edit_operation(edit))
|
|
||||||
}
|
|
||||||
|
|
||||||
crate::Operation::Buffer(text::Operation::Undo(undo)) => {
|
|
||||||
proto::operation::Variant::Undo(proto::operation::Undo {
|
|
||||||
replica_id: undo.timestamp.replica_id as u32,
|
|
||||||
lamport_timestamp: undo.timestamp.value,
|
|
||||||
version: serialize_version(&undo.version),
|
|
||||||
counts: undo
|
|
||||||
.counts
|
|
||||||
.iter()
|
|
||||||
.map(|(edit_id, count)| proto::UndoCount {
|
|
||||||
replica_id: edit_id.replica_id as u32,
|
|
||||||
lamport_timestamp: edit_id.value,
|
|
||||||
count: *count,
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
crate::Operation::UpdateSelections {
|
|
||||||
selections,
|
|
||||||
line_mode,
|
|
||||||
lamport_timestamp,
|
|
||||||
cursor_shape,
|
|
||||||
} => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
|
|
||||||
replica_id: lamport_timestamp.replica_id as u32,
|
|
||||||
lamport_timestamp: lamport_timestamp.value,
|
|
||||||
selections: serialize_selections(selections),
|
|
||||||
line_mode: *line_mode,
|
|
||||||
cursor_shape: serialize_cursor_shape(cursor_shape) as i32,
|
|
||||||
}),
|
|
||||||
|
|
||||||
crate::Operation::UpdateDiagnostics {
|
|
||||||
lamport_timestamp,
|
|
||||||
server_id,
|
|
||||||
diagnostics,
|
|
||||||
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
|
|
||||||
replica_id: lamport_timestamp.replica_id as u32,
|
|
||||||
lamport_timestamp: lamport_timestamp.value,
|
|
||||||
server_id: server_id.0 as u64,
|
|
||||||
diagnostics: serialize_diagnostics(diagnostics.iter()),
|
|
||||||
}),
|
|
||||||
|
|
||||||
crate::Operation::UpdateCompletionTriggers {
|
|
||||||
triggers,
|
|
||||||
lamport_timestamp,
|
|
||||||
} => proto::operation::Variant::UpdateCompletionTriggers(
|
|
||||||
proto::operation::UpdateCompletionTriggers {
|
|
||||||
replica_id: lamport_timestamp.replica_id as u32,
|
|
||||||
lamport_timestamp: lamport_timestamp.value,
|
|
||||||
triggers: triggers.clone(),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
|
|
||||||
proto::operation::Edit {
|
|
||||||
replica_id: operation.timestamp.replica_id as u32,
|
|
||||||
lamport_timestamp: operation.timestamp.value,
|
|
||||||
version: serialize_version(&operation.version),
|
|
||||||
ranges: operation.ranges.iter().map(serialize_range).collect(),
|
|
||||||
new_text: operation
|
|
||||||
.new_text
|
|
||||||
.iter()
|
|
||||||
.map(|text| text.to_string())
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_undo_map_entry(
|
|
||||||
(edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]),
|
|
||||||
) -> proto::UndoMapEntry {
|
|
||||||
proto::UndoMapEntry {
|
|
||||||
replica_id: edit_id.replica_id as u32,
|
|
||||||
local_timestamp: edit_id.value,
|
|
||||||
counts: counts
|
|
||||||
.iter()
|
|
||||||
.map(|(undo_id, count)| proto::UndoCount {
|
|
||||||
replica_id: undo_id.replica_id as u32,
|
|
||||||
lamport_timestamp: undo_id.value,
|
|
||||||
count: *count,
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn split_operations(
|
|
||||||
mut operations: Vec<proto::Operation>,
|
|
||||||
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
const CHUNK_SIZE: usize = 5;
|
|
||||||
|
|
||||||
#[cfg(not(any(test, feature = "test-support")))]
|
|
||||||
const CHUNK_SIZE: usize = 100;
|
|
||||||
|
|
||||||
let mut done = false;
|
|
||||||
std::iter::from_fn(move || {
|
|
||||||
if done {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let operations = operations
|
|
||||||
.drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if operations.is_empty() {
|
|
||||||
done = true;
|
|
||||||
}
|
|
||||||
Some(operations)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
|
|
||||||
selections.iter().map(serialize_selection).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
|
|
||||||
proto::Selection {
|
|
||||||
id: selection.id as u64,
|
|
||||||
start: Some(proto::EditorAnchor {
|
|
||||||
anchor: Some(serialize_anchor(&selection.start)),
|
|
||||||
excerpt_id: 0,
|
|
||||||
}),
|
|
||||||
end: Some(proto::EditorAnchor {
|
|
||||||
anchor: Some(serialize_anchor(&selection.end)),
|
|
||||||
excerpt_id: 0,
|
|
||||||
}),
|
|
||||||
reversed: selection.reversed,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape {
|
|
||||||
match cursor_shape {
|
|
||||||
CursorShape::Bar => proto::CursorShape::CursorBar,
|
|
||||||
CursorShape::Block => proto::CursorShape::CursorBlock,
|
|
||||||
CursorShape::Underscore => proto::CursorShape::CursorUnderscore,
|
|
||||||
CursorShape::Hollow => proto::CursorShape::CursorHollow,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape {
|
|
||||||
match cursor_shape {
|
|
||||||
proto::CursorShape::CursorBar => CursorShape::Bar,
|
|
||||||
proto::CursorShape::CursorBlock => CursorShape::Block,
|
|
||||||
proto::CursorShape::CursorUnderscore => CursorShape::Underscore,
|
|
||||||
proto::CursorShape::CursorHollow => CursorShape::Hollow,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_diagnostics<'a>(
|
|
||||||
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
|
|
||||||
) -> Vec<proto::Diagnostic> {
|
|
||||||
diagnostics
|
|
||||||
.into_iter()
|
|
||||||
.map(|entry| proto::Diagnostic {
|
|
||||||
source: entry.diagnostic.source.clone(),
|
|
||||||
start: Some(serialize_anchor(&entry.range.start)),
|
|
||||||
end: Some(serialize_anchor(&entry.range.end)),
|
|
||||||
message: entry.diagnostic.message.clone(),
|
|
||||||
severity: match entry.diagnostic.severity {
|
|
||||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
|
||||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
|
||||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
|
||||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
|
||||||
_ => proto::diagnostic::Severity::None,
|
|
||||||
} as i32,
|
|
||||||
group_id: entry.diagnostic.group_id as u64,
|
|
||||||
is_primary: entry.diagnostic.is_primary,
|
|
||||||
is_valid: entry.diagnostic.is_valid,
|
|
||||||
code: entry.diagnostic.code.clone(),
|
|
||||||
is_disk_based: entry.diagnostic.is_disk_based,
|
|
||||||
is_unnecessary: entry.diagnostic.is_unnecessary,
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
|
||||||
proto::Anchor {
|
|
||||||
replica_id: anchor.timestamp.replica_id as u32,
|
|
||||||
timestamp: anchor.timestamp.value,
|
|
||||||
offset: anchor.offset as u64,
|
|
||||||
bias: match anchor.bias {
|
|
||||||
Bias::Left => proto::Bias::Left as i32,
|
|
||||||
Bias::Right => proto::Bias::Right as i32,
|
|
||||||
},
|
|
||||||
buffer_id: anchor.buffer_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This behavior is currently copied in the collab database, for snapshotting channel notes
|
|
||||||
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
|
|
||||||
Ok(
|
|
||||||
match message
|
|
||||||
.variant
|
|
||||||
.ok_or_else(|| anyhow!("missing operation variant"))?
|
|
||||||
{
|
|
||||||
proto::operation::Variant::Edit(edit) => {
|
|
||||||
crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
|
|
||||||
}
|
|
||||||
proto::operation::Variant::Undo(undo) => {
|
|
||||||
crate::Operation::Buffer(text::Operation::Undo(UndoOperation {
|
|
||||||
timestamp: clock::Lamport {
|
|
||||||
replica_id: undo.replica_id as ReplicaId,
|
|
||||||
value: undo.lamport_timestamp,
|
|
||||||
},
|
|
||||||
version: deserialize_version(&undo.version),
|
|
||||||
counts: undo
|
|
||||||
.counts
|
|
||||||
.into_iter()
|
|
||||||
.map(|c| {
|
|
||||||
(
|
|
||||||
clock::Lamport {
|
|
||||||
replica_id: c.replica_id as ReplicaId,
|
|
||||||
value: c.lamport_timestamp,
|
|
||||||
},
|
|
||||||
c.count,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateSelections(message) => {
|
|
||||||
let selections = message
|
|
||||||
.selections
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|selection| {
|
|
||||||
Some(Selection {
|
|
||||||
id: selection.id as usize,
|
|
||||||
start: deserialize_anchor(selection.start?.anchor?)?,
|
|
||||||
end: deserialize_anchor(selection.end?.anchor?)?,
|
|
||||||
reversed: selection.reversed,
|
|
||||||
goal: SelectionGoal::None,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
crate::Operation::UpdateSelections {
|
|
||||||
lamport_timestamp: clock::Lamport {
|
|
||||||
replica_id: message.replica_id as ReplicaId,
|
|
||||||
value: message.lamport_timestamp,
|
|
||||||
},
|
|
||||||
selections: Arc::from(selections),
|
|
||||||
line_mode: message.line_mode,
|
|
||||||
cursor_shape: deserialize_cursor_shape(
|
|
||||||
proto::CursorShape::from_i32(message.cursor_shape)
|
|
||||||
.ok_or_else(|| anyhow!("Missing cursor shape"))?,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateDiagnostics(message) => {
|
|
||||||
crate::Operation::UpdateDiagnostics {
|
|
||||||
lamport_timestamp: clock::Lamport {
|
|
||||||
replica_id: message.replica_id as ReplicaId,
|
|
||||||
value: message.lamport_timestamp,
|
|
||||||
},
|
|
||||||
server_id: LanguageServerId(message.server_id as usize),
|
|
||||||
diagnostics: deserialize_diagnostics(message.diagnostics),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateCompletionTriggers(message) => {
|
|
||||||
crate::Operation::UpdateCompletionTriggers {
|
|
||||||
triggers: message.triggers,
|
|
||||||
lamport_timestamp: clock::Lamport {
|
|
||||||
replica_id: message.replica_id as ReplicaId,
|
|
||||||
value: message.lamport_timestamp,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
|
|
||||||
EditOperation {
|
|
||||||
timestamp: clock::Lamport {
|
|
||||||
replica_id: edit.replica_id as ReplicaId,
|
|
||||||
value: edit.lamport_timestamp,
|
|
||||||
},
|
|
||||||
version: deserialize_version(&edit.version),
|
|
||||||
ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
|
|
||||||
new_text: edit.new_text.into_iter().map(Arc::from).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_undo_map_entry(
|
|
||||||
entry: proto::UndoMapEntry,
|
|
||||||
) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) {
|
|
||||||
(
|
|
||||||
clock::Lamport {
|
|
||||||
replica_id: entry.replica_id as u16,
|
|
||||||
value: entry.local_timestamp,
|
|
||||||
},
|
|
||||||
entry
|
|
||||||
.counts
|
|
||||||
.into_iter()
|
|
||||||
.map(|undo_count| {
|
|
||||||
(
|
|
||||||
clock::Lamport {
|
|
||||||
replica_id: undo_count.replica_id as u16,
|
|
||||||
value: undo_count.lamport_timestamp,
|
|
||||||
},
|
|
||||||
undo_count.count,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
|
|
||||||
Arc::from(
|
|
||||||
selections
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(deserialize_selection)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
|
|
||||||
Some(Selection {
|
|
||||||
id: selection.id as usize,
|
|
||||||
start: deserialize_anchor(selection.start?.anchor?)?,
|
|
||||||
end: deserialize_anchor(selection.end?.anchor?)?,
|
|
||||||
reversed: selection.reversed,
|
|
||||||
goal: SelectionGoal::None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_diagnostics(
|
|
||||||
diagnostics: Vec<proto::Diagnostic>,
|
|
||||||
) -> Arc<[DiagnosticEntry<Anchor>]> {
|
|
||||||
diagnostics
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|diagnostic| {
|
|
||||||
Some(DiagnosticEntry {
|
|
||||||
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
source: diagnostic.source,
|
|
||||||
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
|
|
||||||
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
|
||||||
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
|
||||||
proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
|
|
||||||
proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
|
|
||||||
proto::diagnostic::Severity::None => return None,
|
|
||||||
},
|
|
||||||
message: diagnostic.message,
|
|
||||||
group_id: diagnostic.group_id as usize,
|
|
||||||
code: diagnostic.code,
|
|
||||||
is_valid: diagnostic.is_valid,
|
|
||||||
is_primary: diagnostic.is_primary,
|
|
||||||
is_disk_based: diagnostic.is_disk_based,
|
|
||||||
is_unnecessary: diagnostic.is_unnecessary,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
|
||||||
Some(Anchor {
|
|
||||||
timestamp: clock::Lamport {
|
|
||||||
replica_id: anchor.replica_id as ReplicaId,
|
|
||||||
value: anchor.timestamp,
|
|
||||||
},
|
|
||||||
offset: anchor.offset as usize,
|
|
||||||
bias: match proto::Bias::from_i32(anchor.bias)? {
|
|
||||||
proto::Bias::Left => Bias::Left,
|
|
||||||
proto::Bias::Right => Bias::Right,
|
|
||||||
},
|
|
||||||
buffer_id: anchor.buffer_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
|
|
||||||
let replica_id;
|
|
||||||
let value;
|
|
||||||
match operation.variant.as_ref()? {
|
|
||||||
proto::operation::Variant::Edit(op) => {
|
|
||||||
replica_id = op.replica_id;
|
|
||||||
value = op.lamport_timestamp;
|
|
||||||
}
|
|
||||||
proto::operation::Variant::Undo(op) => {
|
|
||||||
replica_id = op.replica_id;
|
|
||||||
value = op.lamport_timestamp;
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateDiagnostics(op) => {
|
|
||||||
replica_id = op.replica_id;
|
|
||||||
value = op.lamport_timestamp;
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateSelections(op) => {
|
|
||||||
replica_id = op.replica_id;
|
|
||||||
value = op.lamport_timestamp;
|
|
||||||
}
|
|
||||||
proto::operation::Variant::UpdateCompletionTriggers(op) => {
|
|
||||||
replica_id = op.replica_id;
|
|
||||||
value = op.lamport_timestamp;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(clock::Lamport {
|
|
||||||
replica_id: replica_id as ReplicaId,
|
|
||||||
value,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_completion(completion: &Completion) -> proto::Completion {
|
|
||||||
proto::Completion {
|
|
||||||
old_start: Some(serialize_anchor(&completion.old_range.start)),
|
|
||||||
old_end: Some(serialize_anchor(&completion.old_range.end)),
|
|
||||||
new_text: completion.new_text.clone(),
|
|
||||||
server_id: completion.server_id.0 as u64,
|
|
||||||
lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn deserialize_completion(
|
|
||||||
completion: proto::Completion,
|
|
||||||
language: Option<Arc<Language>>,
|
|
||||||
) -> Result<Completion> {
|
|
||||||
let old_start = completion
|
|
||||||
.old_start
|
|
||||||
.and_then(deserialize_anchor)
|
|
||||||
.ok_or_else(|| anyhow!("invalid old start"))?;
|
|
||||||
let old_end = completion
|
|
||||||
.old_end
|
|
||||||
.and_then(deserialize_anchor)
|
|
||||||
.ok_or_else(|| anyhow!("invalid old end"))?;
|
|
||||||
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
|
|
||||||
|
|
||||||
let mut label = None;
|
|
||||||
if let Some(language) = language {
|
|
||||||
label = language.label_for_completion(&lsp_completion).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Completion {
|
|
||||||
old_range: old_start..old_end,
|
|
||||||
new_text: completion.new_text,
|
|
||||||
label: label.unwrap_or_else(|| {
|
|
||||||
CodeLabel::plain(
|
|
||||||
lsp_completion.label.clone(),
|
|
||||||
lsp_completion.filter_text.as_deref(),
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
documentation: None,
|
|
||||||
server_id: LanguageServerId(completion.server_id as usize),
|
|
||||||
lsp_completion,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
|
|
||||||
proto::CodeAction {
|
|
||||||
server_id: action.server_id.0 as u64,
|
|
||||||
start: Some(serialize_anchor(&action.range.start)),
|
|
||||||
end: Some(serialize_anchor(&action.range.end)),
|
|
||||||
lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
|
|
||||||
let start = action
|
|
||||||
.start
|
|
||||||
.and_then(deserialize_anchor)
|
|
||||||
.ok_or_else(|| anyhow!("invalid start"))?;
|
|
||||||
let end = action
|
|
||||||
.end
|
|
||||||
.and_then(deserialize_anchor)
|
|
||||||
.ok_or_else(|| anyhow!("invalid end"))?;
|
|
||||||
let lsp_action = serde_json::from_slice(&action.lsp_action)?;
|
|
||||||
Ok(CodeAction {
|
|
||||||
server_id: LanguageServerId(action.server_id as usize),
|
|
||||||
range: start..end,
|
|
||||||
lsp_action,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
|
|
||||||
proto::Transaction {
|
|
||||||
id: Some(serialize_timestamp(transaction.id)),
|
|
||||||
edit_ids: transaction
|
|
||||||
.edit_ids
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.map(serialize_timestamp)
|
|
||||||
.collect(),
|
|
||||||
start: serialize_version(&transaction.start),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
|
|
||||||
Ok(Transaction {
|
|
||||||
id: deserialize_timestamp(
|
|
||||||
transaction
|
|
||||||
.id
|
|
||||||
.ok_or_else(|| anyhow!("missing transaction id"))?,
|
|
||||||
),
|
|
||||||
edit_ids: transaction
|
|
||||||
.edit_ids
|
|
||||||
.into_iter()
|
|
||||||
.map(deserialize_timestamp)
|
|
||||||
.collect(),
|
|
||||||
start: deserialize_version(&transaction.start),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp {
|
|
||||||
proto::LamportTimestamp {
|
|
||||||
replica_id: timestamp.replica_id as u32,
|
|
||||||
value: timestamp.value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport {
|
|
||||||
clock::Lamport {
|
|
||||||
replica_id: timestamp.replica_id as ReplicaId,
|
|
||||||
value: timestamp.value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
|
|
||||||
proto::Range {
|
|
||||||
start: range.start.0 as u64,
|
|
||||||
end: range.end.0 as u64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
|
|
||||||
FullOffset(range.start as usize)..FullOffset(range.end as usize)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
|
|
||||||
let mut version = clock::Global::new();
|
|
||||||
for entry in message {
|
|
||||||
version.observe(clock::Lamport {
|
|
||||||
replica_id: entry.replica_id as ReplicaId,
|
|
||||||
value: entry.timestamp,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
version
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize_version(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
|
|
||||||
version
|
|
||||||
.iter()
|
|
||||||
.map(|entry| proto::VectorClockEntry {
|
|
||||||
replica_id: entry.replica_id as u32,
|
|
||||||
timestamp: entry.value,
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -10,8 +10,8 @@ doctest = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
|
@ -13,13 +13,13 @@ collections = { path = "../collections" }
|
|||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
|
@ -13,7 +13,7 @@ test-support = ["async-pipe"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
gpui = { path = "../gpui" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
@ -29,7 +29,7 @@ serde_json.workspace = true
|
|||||||
smol.workspace = true
|
smol.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
|
|
||||||
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
|
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
|
||||||
|
@ -5,7 +5,7 @@ pub use lsp_types::*;
|
|||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite, FutureExt};
|
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite, FutureExt};
|
||||||
use gpui::{executor, AsyncAppContext, Task};
|
use gpui::{AsyncAppContext, BackgroundExecutor, Task};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use postage::{barrier, prelude::Stream};
|
use postage::{barrier, prelude::Stream};
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||||
@ -62,7 +62,7 @@ pub struct LanguageServer {
|
|||||||
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||||
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
|
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
|
||||||
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
|
||||||
executor: Arc<executor::Background>,
|
executor: BackgroundExecutor,
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||||
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
||||||
@ -210,7 +210,7 @@ impl LanguageServer {
|
|||||||
Stdin: AsyncWrite + Unpin + Send + 'static,
|
Stdin: AsyncWrite + Unpin + Send + 'static,
|
||||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||||
Stderr: AsyncRead + Unpin + Send + 'static,
|
Stderr: AsyncRead + Unpin + Send + 'static,
|
||||||
F: FnMut(AnyNotification) + 'static + Send + Clone,
|
F: FnMut(AnyNotification) + 'static + Send + Sync + Clone,
|
||||||
{
|
{
|
||||||
let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
|
let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
|
||||||
let (output_done_tx, output_done_rx) = barrier::channel();
|
let (output_done_tx, output_done_rx) = barrier::channel();
|
||||||
@ -220,30 +220,35 @@ impl LanguageServer {
|
|||||||
Arc::new(Mutex::new(Some(HashMap::<_, ResponseHandler>::default())));
|
Arc::new(Mutex::new(Some(HashMap::<_, ResponseHandler>::default())));
|
||||||
let io_handlers = Arc::new(Mutex::new(HashMap::default()));
|
let io_handlers = Arc::new(Mutex::new(HashMap::default()));
|
||||||
|
|
||||||
let stdout_input_task = cx.spawn(|cx| {
|
let stdout_input_task = cx.spawn({
|
||||||
Self::handle_input(
|
let on_unhandled_notification = on_unhandled_notification.clone();
|
||||||
stdout,
|
let notification_handlers = notification_handlers.clone();
|
||||||
on_unhandled_notification.clone(),
|
let response_handlers = response_handlers.clone();
|
||||||
notification_handlers.clone(),
|
let io_handlers = io_handlers.clone();
|
||||||
response_handlers.clone(),
|
move |cx| {
|
||||||
io_handlers.clone(),
|
Self::handle_input(
|
||||||
cx,
|
stdout,
|
||||||
)
|
on_unhandled_notification,
|
||||||
.log_err()
|
notification_handlers,
|
||||||
|
response_handlers,
|
||||||
|
io_handlers,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.log_err()
|
||||||
|
}
|
||||||
});
|
});
|
||||||
let stderr_input_task = stderr
|
let stderr_input_task = stderr
|
||||||
.map(|stderr| {
|
.map(|stderr| {
|
||||||
cx.spawn(|_| {
|
let io_handlers = io_handlers.clone();
|
||||||
Self::handle_stderr(stderr, io_handlers.clone(), stderr_capture.clone())
|
let stderr_captures = stderr_capture.clone();
|
||||||
.log_err()
|
cx.spawn(|_| Self::handle_stderr(stderr, io_handlers, stderr_captures).log_err())
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| Task::Ready(Some(None)));
|
.unwrap_or_else(|| Task::Ready(Some(None)));
|
||||||
let input_task = cx.spawn(|_| async move {
|
let input_task = cx.spawn(|_| async move {
|
||||||
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
|
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
|
||||||
stdout.or(stderr)
|
stdout.or(stderr)
|
||||||
});
|
});
|
||||||
let output_task = cx.background().spawn({
|
let output_task = cx.background_executor().spawn({
|
||||||
Self::handle_output(
|
Self::handle_output(
|
||||||
stdin,
|
stdin,
|
||||||
outbound_rx,
|
outbound_rx,
|
||||||
@ -264,7 +269,7 @@ impl LanguageServer {
|
|||||||
code_action_kinds,
|
code_action_kinds,
|
||||||
next_id: Default::default(),
|
next_id: Default::default(),
|
||||||
outbound_tx,
|
outbound_tx,
|
||||||
executor: cx.background(),
|
executor: cx.background_executor().clone(),
|
||||||
io_tasks: Mutex::new(Some((input_task, output_task))),
|
io_tasks: Mutex::new(Some((input_task, output_task))),
|
||||||
output_done_rx: Mutex::new(Some(output_done_rx)),
|
output_done_rx: Mutex::new(Some(output_done_rx)),
|
||||||
root_path: root_path.to_path_buf(),
|
root_path: root_path.to_path_buf(),
|
||||||
@ -481,10 +486,7 @@ impl LanguageServer {
|
|||||||
completion_item: Some(CompletionItemCapability {
|
completion_item: Some(CompletionItemCapability {
|
||||||
snippet_support: Some(true),
|
snippet_support: Some(true),
|
||||||
resolve_support: Some(CompletionItemCapabilityResolveSupport {
|
resolve_support: Some(CompletionItemCapabilityResolveSupport {
|
||||||
properties: vec![
|
properties: vec!["additionalTextEdits".to_string()],
|
||||||
"documentation".to_string(),
|
|
||||||
"additionalTextEdits".to_string(),
|
|
||||||
],
|
|
||||||
}),
|
}),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}),
|
}),
|
||||||
@ -610,7 +612,7 @@ impl LanguageServer {
|
|||||||
where
|
where
|
||||||
T: request::Request,
|
T: request::Request,
|
||||||
T::Params: 'static + Send,
|
T::Params: 'static + Send,
|
||||||
F: 'static + Send + FnMut(T::Params, AsyncAppContext) -> Fut,
|
F: 'static + FnMut(T::Params, AsyncAppContext) -> Fut + Send,
|
||||||
Fut: 'static + Future<Output = Result<T::Result>>,
|
Fut: 'static + Future<Output = Result<T::Result>>,
|
||||||
{
|
{
|
||||||
self.on_custom_request(T::METHOD, f)
|
self.on_custom_request(T::METHOD, f)
|
||||||
@ -644,7 +646,7 @@ impl LanguageServer {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
|
pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
|
||||||
where
|
where
|
||||||
F: 'static + Send + FnMut(Params, AsyncAppContext),
|
F: 'static + FnMut(Params, AsyncAppContext) + Send,
|
||||||
Params: DeserializeOwned,
|
Params: DeserializeOwned,
|
||||||
{
|
{
|
||||||
let prev_handler = self.notification_handlers.lock().insert(
|
let prev_handler = self.notification_handlers.lock().insert(
|
||||||
@ -672,7 +674,7 @@ impl LanguageServer {
|
|||||||
mut f: F,
|
mut f: F,
|
||||||
) -> Subscription
|
) -> Subscription
|
||||||
where
|
where
|
||||||
F: 'static + Send + FnMut(Params, AsyncAppContext) -> Fut,
|
F: 'static + FnMut(Params, AsyncAppContext) -> Fut + Send,
|
||||||
Fut: 'static + Future<Output = Result<Res>>,
|
Fut: 'static + Future<Output = Result<Res>>,
|
||||||
Params: DeserializeOwned + Send + 'static,
|
Params: DeserializeOwned + Send + 'static,
|
||||||
Res: Serialize,
|
Res: Serialize,
|
||||||
@ -685,7 +687,7 @@ impl LanguageServer {
|
|||||||
match serde_json::from_str(params) {
|
match serde_json::from_str(params) {
|
||||||
Ok(params) => {
|
Ok(params) => {
|
||||||
let response = f(params, cx.clone());
|
let response = f(params, cx.clone());
|
||||||
cx.foreground()
|
cx.foreground_executor()
|
||||||
.spawn({
|
.spawn({
|
||||||
let outbound_tx = outbound_tx.clone();
|
let outbound_tx = outbound_tx.clone();
|
||||||
async move {
|
async move {
|
||||||
@ -780,20 +782,11 @@ impl LanguageServer {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// some child of string literal (be it "" or ``) which is the child of an attribute
|
|
||||||
|
|
||||||
// <Foo className="bar" />
|
|
||||||
// <Foo className={`bar`} />
|
|
||||||
// <Foo className={something + "bar"} />
|
|
||||||
// <Foo className={something + "bar"} />
|
|
||||||
// const classes = "awesome ";
|
|
||||||
// <Foo className={classes} />
|
|
||||||
|
|
||||||
fn request_internal<T: request::Request>(
|
fn request_internal<T: request::Request>(
|
||||||
next_id: &AtomicUsize,
|
next_id: &AtomicUsize,
|
||||||
response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
|
response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
|
||||||
outbound_tx: &channel::Sender<String>,
|
outbound_tx: &channel::Sender<String>,
|
||||||
executor: &Arc<executor::Background>,
|
executor: &BackgroundExecutor,
|
||||||
params: T::Params,
|
params: T::Params,
|
||||||
) -> impl 'static + Future<Output = anyhow::Result<T::Result>>
|
) -> impl 'static + Future<Output = anyhow::Result<T::Result>>
|
||||||
where
|
where
|
||||||
@ -1071,8 +1064,9 @@ impl FakeLanguageServer {
|
|||||||
.on_request::<T, _, _>(move |params, cx| {
|
.on_request::<T, _, _>(move |params, cx| {
|
||||||
let result = handler(params, cx.clone());
|
let result = handler(params, cx.clone());
|
||||||
let responded_tx = responded_tx.clone();
|
let responded_tx = responded_tx.clone();
|
||||||
|
let executor = cx.background_executor().clone();
|
||||||
async move {
|
async move {
|
||||||
cx.background().simulate_random_delay().await;
|
executor.simulate_random_delay().await;
|
||||||
let result = result.await;
|
let result = result.await;
|
||||||
responded_tx.unbounded_send(()).ok();
|
responded_tx.unbounded_send(()).ok();
|
||||||
result
|
result
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "lsp2"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/lsp2.rs"
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[features]
|
|
||||||
test-support = ["async-pipe"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
collections = { path = "../collections" }
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
|
||||||
util = { path = "../util" }
|
|
||||||
|
|
||||||
anyhow.workspace = true
|
|
||||||
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true }
|
|
||||||
futures.workspace = true
|
|
||||||
log.workspace = true
|
|
||||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", branch = "updated-completion-list-item-defaults" }
|
|
||||||
parking_lot.workspace = true
|
|
||||||
postage.workspace = true
|
|
||||||
serde.workspace = true
|
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
smol.workspace = true
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
|
||||||
util = { path = "../util", features = ["test-support"] }
|
|
||||||
|
|
||||||
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
|
|
||||||
ctor.workspace = true
|
|
||||||
env_logger.workspace = true
|
|
||||||
unindent.workspace = true
|
|
File diff suppressed because it is too large
Load Diff
@ -25,8 +25,8 @@ clock = { path = "../clock" }
|
|||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
git = { package = "git3", path = "../git3" }
|
git = { package = "git3", path = "../git3" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
rich_text = { path = "../rich_text" }
|
rich_text = { path = "../rich_text" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
snippet = { path = "../snippet" }
|
snippet = { path = "../snippet" }
|
||||||
@ -61,8 +61,8 @@ tree-sitter-typescript = { workspace = true, optional = true }
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
copilot = { path = "../copilot", features = ["test-support"] }
|
copilot = { path = "../copilot", features = ["test-support"] }
|
||||||
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
|
@ -10,10 +10,10 @@ doctest = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
text = { package = "text2", path = "../text2" }
|
text = { package = "text2", path = "../text2" }
|
||||||
|
@ -14,10 +14,10 @@ test-support = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
client = { package = "client2", path = "../client2" }
|
client = { package = "client2", path = "../client2" }
|
||||||
collections = { path = "../collections"}
|
collections = { path = "../collections"}
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
node_runtime = { path = "../node_runtime"}
|
node_runtime = { path = "../node_runtime"}
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
||||||
@ -30,6 +30,6 @@ futures.workspace = true
|
|||||||
parking_lot.workspace = true
|
parking_lot.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
||||||
|
@ -28,11 +28,11 @@ collections = { path = "../collections" }
|
|||||||
db = { package = "db2", path = "../db2" }
|
db = { package = "db2", path = "../db2" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
fsevent = { path = "../fsevent" }
|
fsevent = { path = "../fsevent" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
git = { package = "git3", path = "../git3" }
|
git = { package = "git3", path = "../git3" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
prettier = { package = "prettier2", path = "../prettier2" }
|
prettier = { package = "prettier2", path = "../prettier2" }
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
@ -74,8 +74,8 @@ collections = { path = "../collections", features = ["test-support"] }
|
|||||||
db = { package = "db2", path = "../db2", features = ["test-support"] }
|
db = { package = "db2", path = "../db2", features = ["test-support"] }
|
||||||
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
prettier = { package = "prettier2", path = "../prettier2", features = ["test-support"] }
|
prettier = { package = "prettier2", path = "../prettier2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
|
@ -34,7 +34,7 @@ unicase = "2.6"
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
client = { path = "../client2", package = "client2", features = ["test-support"] }
|
client = { path = "../client2", package = "client2", features = ["test-support"] }
|
||||||
language = { path = "../language2", package = "language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
|
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
@ -10,7 +10,7 @@ doctest = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
@ -30,8 +30,8 @@ futures.workspace = true
|
|||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
@ -10,9 +10,9 @@ doctest = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
text = { package = "text2", path = "../text2" }
|
text = { package = "text2", path = "../text2" }
|
||||||
|
@ -19,7 +19,7 @@ collections = { path = "../collections" }
|
|||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
@ -13,7 +13,7 @@ bitflags = "1"
|
|||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
@ -12,7 +12,7 @@ doctest = false
|
|||||||
ai = { path = "../ai" }
|
ai = { path = "../ai" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
@ -42,7 +42,7 @@ ndarray = { version = "0.15.0" }
|
|||||||
ai = { path = "../ai", features = ["test-support"] }
|
ai = { path = "../ai", features = ["test-support"] }
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
@ -16,11 +16,11 @@ chrono = "0.4"
|
|||||||
clap = { version = "4.4", features = ["derive", "string"] }
|
clap = { version = "4.4", features = ["derive", "string"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
itertools = "0.11.0"
|
itertools = "0.11.0"
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
rust-embed.workspace = true
|
rust-embed.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
@ -10,7 +10,7 @@ doctest = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
# search = { path = "../search" }
|
# search = { path = "../search" }
|
||||||
|
@ -13,7 +13,7 @@ client = { package = "client2", path = "../client2" }
|
|||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
feature_flags = { path = "../feature_flags" }
|
feature_flags = { path = "../feature_flags" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
@ -6,7 +6,7 @@ publish = false
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
fuzzy = {package = "fuzzy2", path = "../fuzzy2"}
|
fuzzy = { path = "../fuzzy"}
|
||||||
fs = {package = "fs2", path = "../fs2"}
|
fs = {package = "fs2", path = "../fs2"}
|
||||||
gpui = {package = "gpui2", path = "../gpui2"}
|
gpui = {package = "gpui2", path = "../gpui2"}
|
||||||
picker = {path = "../picker"}
|
picker = {path = "../picker"}
|
||||||
|
@ -28,7 +28,7 @@ collections = { path = "../collections" }
|
|||||||
command_palette = { path = "../command_palette" }
|
command_palette = { path = "../command_palette" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
@ -44,10 +44,10 @@ futures.workspace = true
|
|||||||
|
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
|
@ -14,7 +14,7 @@ test-support = []
|
|||||||
client = { package = "client2", path = "../client2" }
|
client = { package = "client2", path = "../client2" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
db = { package = "db2", path = "../db2" }
|
db = { package = "db2", path = "../db2" }
|
||||||
|
@ -27,7 +27,7 @@ collections = { path = "../collections" }
|
|||||||
fs = { path = "../fs2", package = "fs2" }
|
fs = { path = "../fs2", package = "fs2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
install_cli = { path = "../install_cli" }
|
install_cli = { path = "../install_cli" }
|
||||||
language = { path = "../language2", package = "language2" }
|
language = { path = "../language" }
|
||||||
#menu = { path = "../menu" }
|
#menu = { path = "../menu" }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
|
@ -43,9 +43,9 @@ go_to_line = { path = "../go_to_line" }
|
|||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
install_cli = { path = "../install_cli" }
|
install_cli = { path = "../install_cli" }
|
||||||
journal = { path = "../journal" }
|
journal = { path = "../journal" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { path = "../language" }
|
||||||
language_selector = { path = "../language_selector" }
|
language_selector = { path = "../language_selector" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { path = "../lsp" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
language_tools = { path = "../language_tools" }
|
language_tools = { path = "../language_tools" }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
@ -149,7 +149,7 @@ call = { package = "call2", path = "../call2", features = ["test-support"] }
|
|||||||
# editor = { path = "../editor", features = ["test-support"] }
|
# editor = { path = "../editor", features = ["test-support"] }
|
||||||
# gpui = { path = "../gpui", features = ["test-support"] }
|
# gpui = { path = "../gpui", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
# lsp = { path = "../lsp", features = ["test-support"] }
|
# lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
# rpc = { path = "../rpc", features = ["test-support"] }
|
# rpc = { path = "../rpc", features = ["test-support"] }
|
||||||
|
@ -3,7 +3,7 @@ use async_trait::async_trait;
|
|||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use gpui::executor::Background;
|
use gpui::executor::Background;
|
||||||
use language2::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use lsp2::LanguageServerBinary;
|
use lsp2::LanguageServerBinary;
|
||||||
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
|
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
|
Loading…
Reference in New Issue
Block a user