mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Merge pull request #1252 from zed-industries/plugin
Language Server WebAssembly Plugin Integration (Part 2)
This commit is contained in:
commit
afc8e9050c
22
.github/workflows/ci.yml
vendored
22
.github/workflows/ci.yml
vendored
@ -28,7 +28,15 @@ jobs:
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: x86_64-apple-darwin
|
||||
target: aarch64-apple-darwin
|
||||
profile: minimal
|
||||
default: true
|
||||
|
||||
- name: Install Rust wasm32-wasi target
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: wasm32-wasi
|
||||
profile: minimal
|
||||
|
||||
- name: Install Node
|
||||
@ -58,6 +66,14 @@ jobs:
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
steps:
|
||||
- name: Install Rust aarch64-apple-darwin target
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: aarch64-apple-darwin
|
||||
profile: minimal
|
||||
default: true
|
||||
|
||||
- name: Install Rust x86_64-apple-darwin target
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@ -65,11 +81,11 @@ jobs:
|
||||
target: x86_64-apple-darwin
|
||||
profile: minimal
|
||||
|
||||
- name: Install Rust aarch64-apple-darwin target
|
||||
- name: Install Rust wasm32-wasi target
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: aarch64-apple-darwin
|
||||
target: wasm32-wasi
|
||||
profile: minimal
|
||||
|
||||
- name: Install Node
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,6 +1,7 @@
|
||||
/target
|
||||
**/target
|
||||
/zed.xcworkspace
|
||||
.DS_Store
|
||||
/plugins/bin
|
||||
/script/node_modules
|
||||
/styles/node_modules
|
||||
/crates/collab/.env.toml
|
||||
|
1146
Cargo.lock
generated
1146
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -13,6 +13,8 @@ cocoa-foundation = { git = "https://github.com/servo/core-foundation-rs", rev =
|
||||
core-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
|
||||
core-foundation-sys = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
|
||||
core-graphics = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
|
||||
# TODO - Remove when a new version of RustRocksDB is released
|
||||
rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "39dc822dde743b2a26eb160b660e8fbdab079d49" }
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
|
@ -1471,7 +1471,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
// Share a project as client A
|
||||
@ -1706,7 +1706,8 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_language_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
@ -1715,7 +1716,8 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -1959,7 +1961,7 @@ async fn test_formatting_buffer(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
// Here we insert a fake tree with a directory that exists on disk. This is needed
|
||||
@ -2045,7 +2047,7 @@ async fn test_definition(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -2154,7 +2156,7 @@ async fn test_references(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -2334,7 +2336,7 @@ async fn test_document_highlights(cx_a: &mut TestAppContext, cx_b: &mut TestAppC
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
|
||||
@ -2431,7 +2433,7 @@ async fn test_lsp_hover(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
|
||||
@ -2519,7 +2521,7 @@ async fn test_project_symbols(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -2622,7 +2624,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -2693,7 +2695,7 @@ async fn test_collaborating_with_code_actions(
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -2898,7 +2900,8 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_language_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
|
||||
prepare_provider: Some(true),
|
||||
@ -2907,7 +2910,8 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -3082,10 +3086,12 @@ async fn test_language_server_statuses(
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_language_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "the-language-server",
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
client_a.language_registry.add(Arc::new(language));
|
||||
|
||||
client_a
|
||||
@ -4608,7 +4614,8 @@ async fn test_random_collaboration(
|
||||
},
|
||||
None,
|
||||
);
|
||||
let _fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let _fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "the-fake-language-server",
|
||||
capabilities: lsp::LanguageServer::full_capabilities(),
|
||||
initializer: Some(Box::new({
|
||||
@ -4616,7 +4623,8 @@ async fn test_random_collaboration(
|
||||
let fs = fs.clone();
|
||||
let project = host_project.downgrade();
|
||||
move |fake_server: &mut FakeLanguageServer| {
|
||||
fake_server.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
|
||||
fake_server.handle_request::<lsp::request::Completion, _, _>(
|
||||
|_, _| async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
@ -4629,7 +4637,8 @@ async fn test_random_collaboration(
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
|
||||
|_, _| async move {
|
||||
@ -4694,7 +4703,8 @@ async fn test_random_collaboration(
|
||||
project.find_local_worktree(&path, cx)?;
|
||||
let project_path =
|
||||
ProjectPath::from((worktree.read(cx).id(), relative_path));
|
||||
let buffer = project.get_open_buffer(&project_path, cx)?.read(cx);
|
||||
let buffer =
|
||||
project.get_open_buffer(&project_path, cx)?.read(cx);
|
||||
|
||||
let mut highlights = Vec::new();
|
||||
let highlight_count = rng.lock().gen_range(1..=5);
|
||||
@ -4720,7 +4730,8 @@ async fn test_random_collaboration(
|
||||
}
|
||||
})),
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
host_language_registry.add(Arc::new(language));
|
||||
|
||||
let op_start_signal = futures::channel::mpsc::unbounded();
|
||||
|
@ -9302,13 +9302,15 @@ mod tests {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
document_formatting_provider: Some(lsp::OneOf::Left(true)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background().clone());
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
@ -9414,13 +9416,15 @@ mod tests {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background().clone());
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
@ -9526,7 +9530,8 @@ mod tests {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
|
||||
@ -9535,7 +9540,8 @@ mod tests {
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let text = "
|
||||
one
|
||||
|
@ -449,10 +449,12 @@ impl<'a> EditorLspTestContext<'a> {
|
||||
.unwrap_or(&"txt".to_string())
|
||||
);
|
||||
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities,
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let project = Project::test(params.fs.clone(), [], cx).await;
|
||||
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
|
||||
|
@ -7,6 +7,7 @@ pub mod proto;
|
||||
mod tests;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use collections::HashMap;
|
||||
use futures::{
|
||||
@ -17,6 +18,7 @@ use gpui::{MutableAppContext, Task};
|
||||
use highlight_map::HighlightMap;
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use postage::watch;
|
||||
use regex::Regex;
|
||||
use serde::{de, Deserialize, Deserializer};
|
||||
use serde_json::Value;
|
||||
@ -29,7 +31,7 @@ use std::{
|
||||
str,
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::SyntaxTheme;
|
||||
use theme::{SyntaxTheme, Theme};
|
||||
use tree_sitter::{self, Query};
|
||||
use util::ResultExt;
|
||||
|
||||
@ -63,48 +65,141 @@ pub trait ToLspPosition {
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LanguageServerName(pub Arc<str>);
|
||||
|
||||
pub trait LspAdapter: 'static + Send + Sync {
|
||||
fn name(&self) -> LanguageServerName;
|
||||
fn fetch_latest_server_version(
|
||||
/// Represents a Language Server, with certain cached sync properties.
|
||||
/// Uses [`LspAdapter`] under the hood, but calls all 'static' methods
|
||||
/// once at startup, and caches the results.
|
||||
pub struct CachedLspAdapter {
|
||||
pub name: LanguageServerName,
|
||||
pub server_args: Vec<String>,
|
||||
pub initialization_options: Option<Value>,
|
||||
pub disk_based_diagnostic_sources: Vec<String>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub id_for_language: Option<String>,
|
||||
pub adapter: Box<dyn LspAdapter>,
|
||||
}
|
||||
|
||||
impl CachedLspAdapter {
|
||||
pub async fn new<T: LspAdapter>(adapter: T) -> Arc<Self> {
|
||||
let adapter = Box::new(adapter);
|
||||
let name = adapter.name().await;
|
||||
let server_args = adapter.server_args().await;
|
||||
let initialization_options = adapter.initialization_options().await;
|
||||
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token().await;
|
||||
let id_for_language = adapter.id_for_language(name.0.as_ref()).await;
|
||||
|
||||
Arc::new(CachedLspAdapter {
|
||||
name,
|
||||
server_args,
|
||||
initialization_options,
|
||||
disk_based_diagnostic_sources,
|
||||
disk_based_diagnostics_progress_token,
|
||||
id_for_language,
|
||||
adapter,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn fetch_latest_server_version(
|
||||
&self,
|
||||
http: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>>;
|
||||
fn fetch_server_binary(
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
self.adapter.fetch_latest_server_version(http).await
|
||||
}
|
||||
|
||||
pub async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>>;
|
||||
fn cached_server_binary(&self, container_dir: Arc<Path>)
|
||||
-> BoxFuture<'static, Option<PathBuf>>;
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
self.adapter
|
||||
.fetch_server_binary(version, http, container_dir)
|
||||
.await
|
||||
}
|
||||
|
||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
pub async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
self.adapter.cached_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
fn label_for_completion(&self, _: &lsp::CompletionItem, _: &Language) -> Option<CodeLabel> {
|
||||
pub async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
|
||||
self.adapter.process_diagnostics(params).await
|
||||
}
|
||||
|
||||
pub async fn label_for_completion(
|
||||
&self,
|
||||
completion_item: &lsp::CompletionItem,
|
||||
language: &Language,
|
||||
) -> Option<CodeLabel> {
|
||||
self.adapter
|
||||
.label_for_completion(completion_item, language)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn label_for_symbol(
|
||||
&self,
|
||||
name: &str,
|
||||
kind: lsp::SymbolKind,
|
||||
language: &Language,
|
||||
) -> Option<CodeLabel> {
|
||||
self.adapter.label_for_symbol(name, kind, language).await
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait LspAdapter: 'static + Send + Sync {
|
||||
async fn name(&self) -> LanguageServerName;
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
http: Arc<dyn HttpClient>,
|
||||
) -> Result<Box<dyn 'static + Send + Any>>;
|
||||
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf>;
|
||||
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf>;
|
||||
|
||||
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
_: &lsp::CompletionItem,
|
||||
_: &Language,
|
||||
) -> Option<CodeLabel> {
|
||||
None
|
||||
}
|
||||
|
||||
fn label_for_symbol(&self, _: &str, _: lsp::SymbolKind, _: &Language) -> Option<CodeLabel> {
|
||||
async fn label_for_symbol(
|
||||
&self,
|
||||
_: &str,
|
||||
_: lsp::SymbolKind,
|
||||
_: &Language,
|
||||
) -> Option<CodeLabel> {
|
||||
None
|
||||
}
|
||||
|
||||
fn server_args(&self) -> &[&str] {
|
||||
&[]
|
||||
async fn server_args(&self) -> Vec<String> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn initialization_options(&self) -> Option<Value> {
|
||||
async fn initialization_options(&self) -> Option<Value> {
|
||||
None
|
||||
}
|
||||
|
||||
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn id_for_language(&self, _name: &str) -> Option<String> {
|
||||
async fn id_for_language(&self, _name: &str) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -165,8 +260,8 @@ pub struct FakeLspAdapter {
|
||||
pub name: &'static str,
|
||||
pub capabilities: lsp::ServerCapabilities,
|
||||
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
||||
pub disk_based_diagnostics_progress_token: Option<&'static str>,
|
||||
pub disk_based_diagnostics_sources: &'static [&'static str],
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub disk_based_diagnostics_sources: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
@ -180,7 +275,7 @@ pub struct BracketPair {
|
||||
pub struct Language {
|
||||
pub(crate) config: LanguageConfig,
|
||||
pub(crate) grammar: Option<Arc<Grammar>>,
|
||||
pub(crate) adapter: Option<Arc<dyn LspAdapter>>,
|
||||
pub(crate) adapter: Option<Arc<CachedLspAdapter>>,
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fake_adapter: Option<(
|
||||
@ -219,6 +314,8 @@ pub struct LanguageRegistry {
|
||||
Shared<BoxFuture<'static, Result<PathBuf, Arc<anyhow::Error>>>>,
|
||||
>,
|
||||
>,
|
||||
subscription: RwLock<(watch::Sender<()>, watch::Receiver<()>)>,
|
||||
theme: RwLock<Option<Arc<Theme>>>,
|
||||
}
|
||||
|
||||
impl LanguageRegistry {
|
||||
@ -231,6 +328,8 @@ impl LanguageRegistry {
|
||||
lsp_binary_statuses_rx,
|
||||
login_shell_env_loaded: login_shell_env_loaded.shared(),
|
||||
lsp_binary_paths: Default::default(),
|
||||
subscription: RwLock::new(watch::channel()),
|
||||
theme: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -240,12 +339,21 @@ impl LanguageRegistry {
|
||||
}
|
||||
|
||||
pub fn add(&self, language: Arc<Language>) {
|
||||
if let Some(theme) = self.theme.read().clone() {
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
}
|
||||
self.languages.write().push(language.clone());
|
||||
*self.subscription.write().0.borrow_mut() = ();
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
pub fn subscribe(&self) -> watch::Receiver<()> {
|
||||
self.subscription.read().1.clone()
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: Arc<Theme>) {
|
||||
*self.theme.write() = Some(theme.clone());
|
||||
for language in self.languages.read().iter() {
|
||||
language.set_theme(theme);
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
}
|
||||
}
|
||||
|
||||
@ -345,7 +453,7 @@ impl LanguageRegistry {
|
||||
let server_binary_path = this
|
||||
.lsp_binary_paths
|
||||
.lock()
|
||||
.entry(adapter.name())
|
||||
.entry(adapter.name.clone())
|
||||
.or_insert_with(|| {
|
||||
get_server_binary_path(
|
||||
adapter.clone(),
|
||||
@ -362,11 +470,11 @@ impl LanguageRegistry {
|
||||
.map_err(|e| anyhow!(e));
|
||||
|
||||
let server_binary_path = server_binary_path.await?;
|
||||
let server_args = adapter.server_args();
|
||||
let server_args = &adapter.server_args;
|
||||
let server = lsp::LanguageServer::new(
|
||||
server_id,
|
||||
&server_binary_path,
|
||||
server_args,
|
||||
&server_args,
|
||||
&root_path,
|
||||
cx,
|
||||
)?;
|
||||
@ -382,13 +490,13 @@ impl LanguageRegistry {
|
||||
}
|
||||
|
||||
async fn get_server_binary_path(
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
language: Arc<Language>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
download_dir: Arc<Path>,
|
||||
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||
) -> Result<PathBuf> {
|
||||
let container_dir: Arc<Path> = download_dir.join(adapter.name().0.as_ref()).into();
|
||||
let container_dir = download_dir.join(adapter.name.0.as_ref());
|
||||
if !container_dir.exists() {
|
||||
smol::fs::create_dir_all(&container_dir)
|
||||
.await
|
||||
@ -424,7 +532,7 @@ async fn get_server_binary_path(
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_binary_path(
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
language: Arc<Language>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
container_dir: &Path,
|
||||
@ -444,7 +552,7 @@ async fn fetch_latest_server_binary_path(
|
||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
|
||||
.await?;
|
||||
let path = adapter
|
||||
.fetch_server_binary(version_info, http_client, container_dir.clone())
|
||||
.fetch_server_binary(version_info, http_client, container_dir.to_path_buf())
|
||||
.await?;
|
||||
lsp_binary_statuses_tx
|
||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
|
||||
@ -473,7 +581,7 @@ impl Language {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lsp_adapter(&self) -> Option<Arc<dyn LspAdapter>> {
|
||||
pub fn lsp_adapter(&self) -> Option<Arc<CachedLspAdapter>> {
|
||||
self.adapter.clone()
|
||||
}
|
||||
|
||||
@ -505,19 +613,19 @@ impl Language {
|
||||
Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
|
||||
}
|
||||
|
||||
pub fn with_lsp_adapter(mut self, lsp_adapter: Arc<dyn LspAdapter>) -> Self {
|
||||
pub fn with_lsp_adapter(mut self, lsp_adapter: Arc<CachedLspAdapter>) -> Self {
|
||||
self.adapter = Some(lsp_adapter);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_fake_lsp_adapter(
|
||||
pub async fn set_fake_lsp_adapter(
|
||||
&mut self,
|
||||
fake_lsp_adapter: FakeLspAdapter,
|
||||
fake_lsp_adapter: Arc<FakeLspAdapter>,
|
||||
) -> mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
|
||||
let (servers_tx, servers_rx) = mpsc::unbounded();
|
||||
let adapter = Arc::new(fake_lsp_adapter);
|
||||
self.fake_adapter = Some((servers_tx, adapter.clone()));
|
||||
self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone()));
|
||||
let adapter = CachedLspAdapter::new(fake_lsp_adapter).await;
|
||||
self.adapter = Some(adapter);
|
||||
servers_rx
|
||||
}
|
||||
@ -530,32 +638,42 @@ impl Language {
|
||||
self.config.line_comment.as_deref()
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||
self.adapter.as_ref().map_or(&[] as &[_], |adapter| {
|
||||
adapter.disk_based_diagnostic_sources()
|
||||
})
|
||||
pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
|
||||
match self.adapter.as_ref() {
|
||||
Some(adapter) => &adapter.disk_based_diagnostic_sources,
|
||||
None => &[],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||
self.adapter
|
||||
.as_ref()
|
||||
.and_then(|adapter| adapter.disk_based_diagnostics_progress_token())
|
||||
pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> {
|
||||
if let Some(adapter) = self.adapter.as_ref() {
|
||||
adapter.disk_based_diagnostics_progress_token.as_deref()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
|
||||
pub async fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
|
||||
if let Some(processor) = self.adapter.as_ref() {
|
||||
processor.process_diagnostics(diagnostics);
|
||||
processor.process_diagnostics(diagnostics).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn label_for_completion(&self, completion: &lsp::CompletionItem) -> Option<CodeLabel> {
|
||||
pub async fn label_for_completion(
|
||||
&self,
|
||||
completion: &lsp::CompletionItem,
|
||||
) -> Option<CodeLabel> {
|
||||
self.adapter
|
||||
.as_ref()?
|
||||
.label_for_completion(completion, self)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> {
|
||||
self.adapter.as_ref()?.label_for_symbol(name, kind, self)
|
||||
pub async fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> {
|
||||
self.adapter
|
||||
.as_ref()?
|
||||
.label_for_symbol(name, kind, self)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn highlight_text<'a>(
|
||||
@ -664,45 +782,46 @@ impl Default for FakeLspAdapter {
|
||||
capabilities: lsp::LanguageServer::full_capabilities(),
|
||||
initializer: None,
|
||||
disk_based_diagnostics_progress_token: None,
|
||||
disk_based_diagnostics_sources: &[],
|
||||
disk_based_diagnostics_sources: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl LspAdapter for FakeLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
#[async_trait]
|
||||
impl LspAdapter for Arc<FakeLspAdapter> {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName(self.name.into())
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
_: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
_: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
_: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn cached_server_binary(&self, _: Arc<Path>) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async fn cached_server_binary(&self, _: PathBuf) -> Option<PathBuf> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
|
||||
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||
self.disk_based_diagnostics_sources
|
||||
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
|
||||
self.disk_based_diagnostics_sources.clone()
|
||||
}
|
||||
|
||||
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||
self.disk_based_diagnostics_progress_token
|
||||
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
|
||||
self.disk_based_diagnostics_progress_token.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -397,9 +397,9 @@ pub fn serialize_completion(completion: &Completion) -> proto::Completion {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_completion(
|
||||
pub async fn deserialize_completion(
|
||||
completion: proto::Completion,
|
||||
language: Option<&Arc<Language>>,
|
||||
language: Option<Arc<Language>>,
|
||||
) -> Result<Completion> {
|
||||
let old_start = completion
|
||||
.old_start
|
||||
@ -410,12 +410,15 @@ pub fn deserialize_completion(
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid old end"))?;
|
||||
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
|
||||
let label = match language {
|
||||
Some(l) => l.label_for_completion(&lsp_completion).await,
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(Completion {
|
||||
old_range: old_start..old_end,
|
||||
new_text: completion.new_text,
|
||||
label: language
|
||||
.and_then(|l| l.label_for_completion(&lsp_completion))
|
||||
.unwrap_or(CodeLabel::plain(
|
||||
label: label.unwrap_or(CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)),
|
||||
|
@ -101,10 +101,10 @@ struct Error {
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
pub fn new(
|
||||
pub fn new<T: AsRef<std::ffi::OsStr>>(
|
||||
server_id: usize,
|
||||
binary_path: &Path,
|
||||
args: &[&str],
|
||||
args: &[T],
|
||||
root_path: &Path,
|
||||
cx: AsyncAppContext,
|
||||
) -> Result<Self> {
|
||||
@ -258,6 +258,9 @@ impl LanguageServer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Initializes a language server.
|
||||
/// Note that `options` is used directly to construct [`InitializeParams`],
|
||||
/// which is why it is owned.
|
||||
pub async fn initialize(mut self, options: Option<Value>) -> Result<Arc<Self>> {
|
||||
let root_uri = Url::from_file_path(&self.root_path).unwrap();
|
||||
#[allow(deprecated)]
|
||||
|
9
crates/plugin/Cargo.toml
Normal file
9
crates/plugin/Cargo.toml
Normal file
@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "plugin"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = "1.0"
|
||||
bincode = "1.3"
|
||||
plugin_macros = { path = "../plugin_macros" }
|
61
crates/plugin/src/lib.rs
Normal file
61
crates/plugin/src/lib.rs
Normal file
@ -0,0 +1,61 @@
|
||||
pub use bincode;
|
||||
pub use serde;
|
||||
|
||||
/// This is the buffer that is used Wasm side.
|
||||
/// Note that it mirrors the functionality of
|
||||
/// the `WasiBuffer` found in `plugin_runtime/src/plugin.rs`,
|
||||
/// But has a few different methods.
|
||||
pub struct __Buffer {
|
||||
pub ptr: u32, // *const u8,
|
||||
pub len: u32, // usize,
|
||||
}
|
||||
|
||||
impl __Buffer {
|
||||
pub fn into_u64(self) -> u64 {
|
||||
((self.ptr as u64) << 32) | (self.len as u64)
|
||||
}
|
||||
|
||||
pub fn from_u64(packed: u64) -> Self {
|
||||
__Buffer {
|
||||
ptr: (packed >> 32) as u32,
|
||||
len: packed as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates a buffer with an exact size.
|
||||
/// We don't return the size because it has to be passed in anyway.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn __alloc_buffer(len: u32) -> u32 {
|
||||
let vec = vec![0; len as usize];
|
||||
let buffer = unsafe { __Buffer::from_vec(vec) };
|
||||
return buffer.ptr;
|
||||
}
|
||||
|
||||
/// Frees a given buffer, requires the size.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn __free_buffer(buffer: u64) {
|
||||
let vec = unsafe { __Buffer::from_u64(buffer).to_vec() };
|
||||
std::mem::drop(vec);
|
||||
}
|
||||
|
||||
impl __Buffer {
|
||||
#[inline(always)]
|
||||
pub unsafe fn to_vec(&self) -> Vec<u8> {
|
||||
core::slice::from_raw_parts(self.ptr as *const u8, self.len as usize).to_vec()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub unsafe fn from_vec(mut vec: Vec<u8>) -> __Buffer {
|
||||
vec.shrink_to(0);
|
||||
let ptr = vec.as_ptr() as u32;
|
||||
let len = vec.len() as u32;
|
||||
std::mem::forget(vec);
|
||||
__Buffer { ptr, len }
|
||||
}
|
||||
}
|
||||
|
||||
pub mod prelude {
|
||||
pub use super::{__Buffer, __alloc_buffer};
|
||||
pub use plugin_macros::{export, import};
|
||||
}
|
14
crates/plugin_macros/Cargo.toml
Normal file
14
crates/plugin_macros/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "plugin_macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "1.0", features = ["full", "extra-traits"] }
|
||||
quote = "1.0"
|
||||
proc-macro2 = "1.0"
|
||||
serde = "1.0"
|
||||
bincode = "1.3"
|
168
crates/plugin_macros/src/lib.rs
Normal file
168
crates/plugin_macros/src/lib.rs
Normal file
@ -0,0 +1,168 @@
|
||||
use core::panic;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{parse_macro_input, Block, FnArg, ForeignItemFn, Ident, ItemFn, Pat, Type, Visibility};
|
||||
|
||||
/// Attribute macro to be used guest-side within a plugin.
|
||||
/// ```ignore
|
||||
/// #[export]
|
||||
/// pub fn say_hello() -> String {
|
||||
/// "Hello from Wasm".into()
|
||||
/// }
|
||||
/// ```
|
||||
/// This macro makes a function defined guest-side avaliable host-side.
|
||||
/// Note that all arguments and return types must be `serde`.
|
||||
#[proc_macro_attribute]
|
||||
pub fn export(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
if !args.is_empty() {
|
||||
panic!("The export attribute does not take any arguments");
|
||||
}
|
||||
|
||||
let inner_fn = parse_macro_input!(function as ItemFn);
|
||||
|
||||
if !inner_fn.sig.generics.params.is_empty() {
|
||||
panic!("Exported functions can not take generic parameters");
|
||||
}
|
||||
|
||||
if let Visibility::Public(_) = inner_fn.vis {
|
||||
} else {
|
||||
panic!("The export attribute only works for public functions");
|
||||
}
|
||||
|
||||
let inner_fn_name = format_ident!("{}", inner_fn.sig.ident);
|
||||
let outer_fn_name = format_ident!("__{}", inner_fn_name);
|
||||
|
||||
let variadic = inner_fn.sig.inputs.len();
|
||||
let i = (0..variadic).map(syn::Index::from);
|
||||
let t: Vec<Type> = inner_fn
|
||||
.sig
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
FnArg::Receiver(_) => {
|
||||
panic!("All arguments must have specified types, no `self` allowed")
|
||||
}
|
||||
FnArg::Typed(item) => *item.ty.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
// this is cursed...
|
||||
let (args, ty) = if variadic != 1 {
|
||||
(
|
||||
quote! {
|
||||
#( data.#i ),*
|
||||
},
|
||||
quote! {
|
||||
( #( #t ),* )
|
||||
},
|
||||
)
|
||||
} else {
|
||||
let ty = &t[0];
|
||||
(quote! { data }, quote! { #ty })
|
||||
};
|
||||
|
||||
TokenStream::from(quote! {
|
||||
#[no_mangle]
|
||||
#inner_fn
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn #outer_fn_name(packed_buffer: u64) -> u64 {
|
||||
// setup
|
||||
let data = unsafe { ::plugin::__Buffer::from_u64(packed_buffer).to_vec() };
|
||||
|
||||
// operation
|
||||
let data: #ty = match ::plugin::bincode::deserialize(&data) {
|
||||
Ok(d) => d,
|
||||
Err(e) => panic!("Data passed to function not deserializable."),
|
||||
};
|
||||
let result = #inner_fn_name(#args);
|
||||
let new_data: Result<Vec<u8>, _> = ::plugin::bincode::serialize(&result);
|
||||
let new_data = new_data.unwrap();
|
||||
|
||||
// teardown
|
||||
let new_buffer = unsafe { ::plugin::__Buffer::from_vec(new_data) }.into_u64();
|
||||
return new_buffer;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Attribute macro to be used guest-side within a plugin.
|
||||
/// ```ignore
|
||||
/// #[import]
|
||||
/// pub fn operating_system_name() -> String;
|
||||
/// ```
|
||||
/// This macro makes a function defined host-side avaliable guest-side.
|
||||
/// Note that all arguments and return types must be `serde`.
|
||||
/// All that's provided is a signature, as the function is implemented host-side.
|
||||
#[proc_macro_attribute]
|
||||
pub fn import(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
if !args.is_empty() {
|
||||
panic!("The import attribute does not take any arguments");
|
||||
}
|
||||
|
||||
let fn_declare = parse_macro_input!(function as ForeignItemFn);
|
||||
|
||||
if !fn_declare.sig.generics.params.is_empty() {
|
||||
panic!("Exported functions can not take generic parameters");
|
||||
}
|
||||
|
||||
// let inner_fn_name = format_ident!("{}", fn_declare.sig.ident);
|
||||
let extern_fn_name = format_ident!("__{}", fn_declare.sig.ident);
|
||||
|
||||
let (args, tys): (Vec<Ident>, Vec<Type>) = fn_declare
|
||||
.sig
|
||||
.inputs
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|x| match x {
|
||||
FnArg::Receiver(_) => {
|
||||
panic!("All arguments must have specified types, no `self` allowed")
|
||||
}
|
||||
FnArg::Typed(t) => {
|
||||
if let Pat::Ident(i) = *t.pat {
|
||||
(i.ident, *t.ty)
|
||||
} else {
|
||||
panic!("All function arguments must be identifiers");
|
||||
}
|
||||
}
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let body = TokenStream::from(quote! {
|
||||
{
|
||||
// setup
|
||||
let data: (#( #tys ),*) = (#( #args ),*);
|
||||
let data = ::plugin::bincode::serialize(&data).unwrap();
|
||||
let buffer = unsafe { ::plugin::__Buffer::from_vec(data) };
|
||||
|
||||
// operation
|
||||
let new_buffer = unsafe { #extern_fn_name(buffer.into_u64()) };
|
||||
let new_data = unsafe { ::plugin::__Buffer::from_u64(new_buffer).to_vec() };
|
||||
|
||||
// teardown
|
||||
match ::plugin::bincode::deserialize(&new_data) {
|
||||
Ok(d) => d,
|
||||
Err(e) => panic!("Data returned from function not deserializable."),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let block = parse_macro_input!(body as Block);
|
||||
|
||||
let inner_fn = ItemFn {
|
||||
attrs: fn_declare.attrs,
|
||||
vis: fn_declare.vis,
|
||||
sig: fn_declare.sig,
|
||||
block: Box::new(block),
|
||||
};
|
||||
|
||||
TokenStream::from(quote! {
|
||||
extern "C" {
|
||||
fn #extern_fn_name(buffer: u64) -> u64;
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
#inner_fn
|
||||
})
|
||||
}
|
18
crates/plugin_runtime/Cargo.toml
Normal file
18
crates/plugin_runtime/Cargo.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "plugin_runtime"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
wasmtime = "0.38"
|
||||
wasmtime-wasi = "0.38"
|
||||
wasi-common = "0.38"
|
||||
anyhow = { version = "1.0", features = ["std"] }
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
bincode = "1.3"
|
||||
pollster = "0.2.5"
|
||||
smol = "1.2.5"
|
||||
|
||||
[build-dependencies]
|
||||
wasmtime = "0.38"
|
58
crates/plugin_runtime/README.md
Normal file
58
crates/plugin_runtime/README.md
Normal file
@ -0,0 +1,58 @@
|
||||
# Zed's Plugin Runner
|
||||
Wasm plugins can be run through `wasmtime`, with supported for sandboxed system integration through WASI. There are three `plugin` crates that implement different things:
|
||||
|
||||
1. `plugin_runtime` loads and runs compiled `Wasm` plugins, and handles setting up system bindings.
|
||||
|
||||
2. `plugin` is the crate that Rust Wasm plugins should depend on. It re-exports some required crates (e.g. `serde`, `bincode`) and provides some necessary macros for generating bindings that `plugin_runtime` can hook into.
|
||||
|
||||
3. `plugin_macros` implements the proc macros required by `plugin`, like the `#[bind]` attribute macro.
|
||||
|
||||
## ABI
|
||||
The interface between the host Rust runtime ('Runtime') and plugins implemented in Wasm ('Plugin') is pretty simple.
|
||||
|
||||
`Buffer` is a pair of two 4-byte (`u32`) fields, encoded as a single `u64`.
|
||||
|
||||
```
|
||||
struct Buffer {
|
||||
ptr: u32,
|
||||
len: u32,
|
||||
}
|
||||
```
|
||||
|
||||
All functions that Plugin exports must have the following properties:
|
||||
|
||||
- Have the signature `fn(ptr: u64) -> u64`, where both the argument and return types are a `Buffer`:
|
||||
|
||||
- The input `Buffer` will contain the input arguments serialized to `bincode`.
|
||||
- The output `Buffer` will contain the output arguments serialized to `bincode`.
|
||||
|
||||
- Have a name starting with two underscores.
|
||||
|
||||
Additionally, Plugin must export an:
|
||||
|
||||
- `__alloc_buffer` function that, given a `u32` length, returns a `u32` pointer to a buffer of that length.
|
||||
- `__free_buffer` function that, given a buffer encoded as a `u64`, frees the buffer at the given location, and does not return anything.
|
||||
|
||||
Note that all of these requirements are automatically fullfilled for any Rust Wasm plugin that uses the `plugin` crate, and imports the `prelude`.
|
||||
|
||||
Here's an example Rust Wasm plugin that doubles the value of every float in a `Vec<f64>` passed into it:
|
||||
|
||||
```rust
|
||||
use plugin::prelude::*;
|
||||
|
||||
#[export]
|
||||
pub fn double(mut x: Vec<f64>) -> Vec<f64> {
|
||||
x.into_iter().map(|x| x * 2.0).collect()
|
||||
}
|
||||
```
|
||||
|
||||
All the serialization code is automatically generated by `#[export]`.
|
||||
|
||||
You can specify functions that must be defined host-side by using the `#[import]` attribute. This attribute must be attached to a function signature:
|
||||
|
||||
```rust
|
||||
#[import]
|
||||
fn run(command: String) -> Vec<u8>;
|
||||
```
|
||||
|
||||
The `#[import]` macro will generate a function body that performs the proper serialization/deserialization needed to call out to the host rust runtime. Note that the same ABI is used for both `#[import]` and `#[export]`.
|
79
crates/plugin_runtime/build.rs
Normal file
79
crates/plugin_runtime/build.rs
Normal file
@ -0,0 +1,79 @@
|
||||
use std::{io::Write, path::Path};
|
||||
use wasmtime::{Config, Engine};
|
||||
|
||||
fn main() {
|
||||
let base = Path::new("../../plugins");
|
||||
|
||||
println!("cargo:rerun-if-changed={}", base.display());
|
||||
|
||||
let _ = std::fs::remove_dir_all(base.join("bin"));
|
||||
let _ =
|
||||
std::fs::create_dir_all(base.join("bin")).expect("Could not make plugins bin directory");
|
||||
|
||||
let (profile_flags, profile_target) = match std::env::var("PROFILE").unwrap().as_str() {
|
||||
"debug" => (&[][..], "debug"),
|
||||
"release" => (&["--release"][..], "release"),
|
||||
unknown => panic!("unknown profile `{}`", unknown),
|
||||
};
|
||||
|
||||
let build_successful = std::process::Command::new("cargo")
|
||||
.args([
|
||||
"build",
|
||||
"--target",
|
||||
"wasm32-wasi",
|
||||
"--manifest-path",
|
||||
base.join("Cargo.toml").to_str().unwrap(),
|
||||
])
|
||||
.args(profile_flags)
|
||||
.status()
|
||||
.expect("Could not build plugins")
|
||||
.success();
|
||||
assert!(build_successful);
|
||||
|
||||
let binaries = std::fs::read_dir(base.join("target/wasm32-wasi").join(profile_target))
|
||||
.expect("Could not find compiled plugins in target");
|
||||
|
||||
let engine = create_default_engine();
|
||||
|
||||
for file in binaries {
|
||||
let is_wasm = || {
|
||||
let path = file.ok()?.path();
|
||||
if path.extension()? == "wasm" {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(path) = is_wasm() {
|
||||
let out_path = base.join("bin").join(path.file_name().unwrap());
|
||||
std::fs::copy(&path, &out_path).expect("Could not copy compiled plugin to bin");
|
||||
precompile(&out_path, &engine);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a default engine for compiling Wasm.
|
||||
/// N.B.: this must create the same `Engine` as
|
||||
/// the `create_default_engine` function
|
||||
/// in `plugin_runtime/src/plugin.rs`.
|
||||
fn create_default_engine() -> Engine {
|
||||
let mut config = Config::default();
|
||||
config.async_support(true);
|
||||
// config.epoch_interruption(true);
|
||||
Engine::new(&config).expect("Could not create engine")
|
||||
}
|
||||
|
||||
fn precompile(path: &Path, engine: &Engine) {
|
||||
let bytes = std::fs::read(path).expect("Could not read wasm module");
|
||||
let compiled = engine
|
||||
.precompile_module(&bytes)
|
||||
.expect("Could not precompile module");
|
||||
let out_path = path.parent().unwrap().join(&format!(
|
||||
"{}.pre",
|
||||
path.file_name().unwrap().to_string_lossy()
|
||||
));
|
||||
let mut out_file = std::fs::File::create(out_path)
|
||||
.expect("Could not create output file for precompiled module");
|
||||
out_file.write_all(&compiled).unwrap();
|
||||
}
|
93
crates/plugin_runtime/src/lib.rs
Normal file
93
crates/plugin_runtime/src/lib.rs
Normal file
@ -0,0 +1,93 @@
|
||||
pub mod plugin;
|
||||
pub use plugin::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pollster::FutureExt as _;
|
||||
|
||||
#[test]
|
||||
pub fn test_plugin() {
|
||||
pub struct TestPlugin {
|
||||
noop: WasiFn<(), ()>,
|
||||
constant: WasiFn<(), u32>,
|
||||
identity: WasiFn<u32, u32>,
|
||||
add: WasiFn<(u32, u32), u32>,
|
||||
swap: WasiFn<(u32, u32), (u32, u32)>,
|
||||
sort: WasiFn<Vec<u32>, Vec<u32>>,
|
||||
print: WasiFn<String, ()>,
|
||||
and_back: WasiFn<u32, u32>,
|
||||
imports: WasiFn<u32, u32>,
|
||||
half_async: WasiFn<u32, u32>,
|
||||
echo_async: WasiFn<String, String>,
|
||||
}
|
||||
|
||||
async {
|
||||
let mut runtime = PluginBuilder::new_with_default_ctx()
|
||||
.unwrap()
|
||||
.host_function("mystery_number", |input: u32| input + 7)
|
||||
.unwrap()
|
||||
.host_function("import_noop", |_: ()| ())
|
||||
.unwrap()
|
||||
.host_function("import_identity", |input: u32| input)
|
||||
.unwrap()
|
||||
.host_function("import_swap", |(a, b): (u32, u32)| (b, a))
|
||||
.unwrap()
|
||||
.host_function_async("import_half", |a: u32| async move { a / 2 })
|
||||
.unwrap()
|
||||
.host_function_async("command_async", |command: String| async move {
|
||||
let mut args = command.split(' ');
|
||||
let command = args.next().unwrap();
|
||||
smol::process::Command::new(command)
|
||||
.args(args)
|
||||
.output()
|
||||
.await
|
||||
.ok()
|
||||
.map(|output| output.stdout)
|
||||
})
|
||||
.unwrap()
|
||||
.init(
|
||||
false,
|
||||
include_bytes!("../../../plugins/bin/test_plugin.wasm"),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let plugin = TestPlugin {
|
||||
noop: runtime.function("noop").unwrap(),
|
||||
constant: runtime.function("constant").unwrap(),
|
||||
identity: runtime.function("identity").unwrap(),
|
||||
add: runtime.function("add").unwrap(),
|
||||
swap: runtime.function("swap").unwrap(),
|
||||
sort: runtime.function("sort").unwrap(),
|
||||
print: runtime.function("print").unwrap(),
|
||||
and_back: runtime.function("and_back").unwrap(),
|
||||
imports: runtime.function("imports").unwrap(),
|
||||
half_async: runtime.function("half_async").unwrap(),
|
||||
echo_async: runtime.function("echo_async").unwrap(),
|
||||
};
|
||||
|
||||
let unsorted = vec![1, 3, 4, 2, 5];
|
||||
let sorted = vec![1, 2, 3, 4, 5];
|
||||
|
||||
assert_eq!(runtime.call(&plugin.noop, ()).await.unwrap(), ());
|
||||
assert_eq!(runtime.call(&plugin.constant, ()).await.unwrap(), 27);
|
||||
assert_eq!(runtime.call(&plugin.identity, 58).await.unwrap(), 58);
|
||||
assert_eq!(runtime.call(&plugin.add, (3, 4)).await.unwrap(), 7);
|
||||
assert_eq!(runtime.call(&plugin.swap, (1, 2)).await.unwrap(), (2, 1));
|
||||
assert_eq!(runtime.call(&plugin.sort, unsorted).await.unwrap(), sorted);
|
||||
assert_eq!(runtime.call(&plugin.print, "Hi!".into()).await.unwrap(), ());
|
||||
assert_eq!(runtime.call(&plugin.and_back, 1).await.unwrap(), 8);
|
||||
assert_eq!(runtime.call(&plugin.imports, 1).await.unwrap(), 8);
|
||||
assert_eq!(runtime.call(&plugin.half_async, 4).await.unwrap(), 2);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.call(&plugin.echo_async, "eko".into())
|
||||
.await
|
||||
.unwrap(),
|
||||
"eko\n"
|
||||
);
|
||||
}
|
||||
.block_on()
|
||||
}
|
||||
}
|
564
crates/plugin_runtime/src/plugin.rs
Normal file
564
crates/plugin_runtime/src/plugin.rs
Normal file
@ -0,0 +1,564 @@
|
||||
use std::future::Future;
|
||||
|
||||
use std::{fs::File, marker::PhantomData, path::Path};
|
||||
|
||||
use anyhow::{anyhow, Error};
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
|
||||
use wasi_common::{dir, file};
|
||||
use wasmtime::Memory;
|
||||
use wasmtime::{
|
||||
AsContext, AsContextMut, Caller, Config, Engine, Extern, Instance, Linker, Module, Store, Trap,
|
||||
TypedFunc,
|
||||
};
|
||||
use wasmtime_wasi::{Dir, WasiCtx, WasiCtxBuilder};
|
||||
|
||||
/// Represents a resource currently managed by the plugin, like a file descriptor.
|
||||
pub struct PluginResource(u32);
|
||||
|
||||
/// This is the buffer that is used Host side.
|
||||
/// Note that it mirrors the functionality of
|
||||
/// the `__Buffer` found in the `plugin/src/lib.rs` prelude.
|
||||
struct WasiBuffer {
|
||||
ptr: u32,
|
||||
len: u32,
|
||||
}
|
||||
|
||||
impl WasiBuffer {
|
||||
pub fn into_u64(self) -> u64 {
|
||||
((self.ptr as u64) << 32) | (self.len as u64)
|
||||
}
|
||||
|
||||
pub fn from_u64(packed: u64) -> Self {
|
||||
WasiBuffer {
|
||||
ptr: (packed >> 32) as u32,
|
||||
len: packed as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a typed WebAssembly function.
|
||||
pub struct WasiFn<A: Serialize, R: DeserializeOwned> {
|
||||
function: TypedFunc<u64, u64>,
|
||||
_function_type: PhantomData<fn(A) -> R>,
|
||||
}
|
||||
|
||||
impl<A: Serialize, R: DeserializeOwned> Copy for WasiFn<A, R> {}
|
||||
|
||||
impl<A: Serialize, R: DeserializeOwned> Clone for WasiFn<A, R> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
function: self.function,
|
||||
_function_type: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This struct is used to build a new [`Plugin`], using the builder pattern.
|
||||
/// Create a new default plugin with `PluginBuilder::new_with_default_ctx`,
|
||||
/// and add host-side exported functions using `host_function` and `host_function_async`.
|
||||
/// Finalize the plugin by calling [`init`].
|
||||
pub struct PluginBuilder {
|
||||
wasi_ctx: WasiCtx,
|
||||
engine: Engine,
|
||||
linker: Linker<WasiCtxAlloc>,
|
||||
}
|
||||
|
||||
/// Creates a default engine for compiling Wasm.
|
||||
/// N.B.: this must create the same `Engine` as
|
||||
/// the `create_default_engine` function
|
||||
/// in `plugin_runtime/build.rs`.
|
||||
pub fn create_default_engine() -> Result<Engine, Error> {
|
||||
let mut config = Config::default();
|
||||
config.async_support(true);
|
||||
// config.epoch_interruption(true);
|
||||
Engine::new(&config)
|
||||
}
|
||||
|
||||
impl PluginBuilder {
|
||||
/// Create a new [`PluginBuilder`] with the given WASI context.
|
||||
/// Using the default context is a safe bet, see [`new_with_default_context`].
|
||||
pub fn new(wasi_ctx: WasiCtx) -> Result<Self, Error> {
|
||||
let engine = create_default_engine()?;
|
||||
let linker = Linker::new(&engine);
|
||||
|
||||
Ok(PluginBuilder {
|
||||
// host_functions: HashMap::new(),
|
||||
wasi_ctx,
|
||||
engine,
|
||||
linker,
|
||||
})
|
||||
}
|
||||
|
||||
/// Create a new `PluginBuilder` that inherits the
|
||||
/// host processes' access to `stdout` and `stderr`.
|
||||
pub fn new_with_default_ctx() -> Result<Self, Error> {
|
||||
let wasi_ctx = WasiCtxBuilder::new()
|
||||
.inherit_stdout()
|
||||
.inherit_stderr()
|
||||
.build();
|
||||
Self::new(wasi_ctx)
|
||||
}
|
||||
|
||||
/// Add an `async` host function. See [`host_function`] for details.
|
||||
pub fn host_function_async<F, A, R, Fut>(
|
||||
mut self,
|
||||
name: &str,
|
||||
function: F,
|
||||
) -> Result<Self, Error>
|
||||
where
|
||||
F: Fn(A) -> Fut + Send + Sync + 'static,
|
||||
Fut: Future<Output = R> + Send + 'static,
|
||||
A: DeserializeOwned + Send + 'static,
|
||||
R: Serialize + Send + Sync + 'static,
|
||||
{
|
||||
self.linker.func_wrap1_async(
|
||||
"env",
|
||||
&format!("__{}", name),
|
||||
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
||||
// TODO: use try block once avaliable
|
||||
let result: Result<(WasiBuffer, Memory, _), Trap> = (|| {
|
||||
// grab a handle to the memory
|
||||
let mut plugin_memory = match caller.get_export("memory") {
|
||||
Some(Extern::Memory(mem)) => mem,
|
||||
_ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
|
||||
};
|
||||
|
||||
let buffer = WasiBuffer::from_u64(packed_buffer);
|
||||
|
||||
// get the args passed from Guest
|
||||
let args =
|
||||
Plugin::buffer_to_bytes(&mut plugin_memory, caller.as_context(), &buffer)?;
|
||||
|
||||
let args: A = Plugin::deserialize_to_type(&args)?;
|
||||
|
||||
// Call the Host-side function
|
||||
let result = function(args);
|
||||
|
||||
Ok((buffer, plugin_memory, result))
|
||||
})();
|
||||
|
||||
Box::new(async move {
|
||||
let (buffer, mut plugin_memory, future) = result?;
|
||||
|
||||
let result: R = future.await;
|
||||
let result: Result<Vec<u8>, Error> = Plugin::serialize_to_bytes(result)
|
||||
.map_err(|_| {
|
||||
Trap::new("Could not serialize value returned from function").into()
|
||||
});
|
||||
let result = result?;
|
||||
|
||||
Plugin::buffer_to_free(caller.data().free_buffer(), &mut caller, buffer)
|
||||
.await?;
|
||||
|
||||
let buffer = Plugin::bytes_to_buffer(
|
||||
caller.data().alloc_buffer(),
|
||||
&mut plugin_memory,
|
||||
&mut caller,
|
||||
result,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(buffer.into_u64())
|
||||
})
|
||||
},
|
||||
)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Add a new host function to the given `PluginBuilder`.
|
||||
/// A host function is a function defined host-side, in Rust,
|
||||
/// that is accessible guest-side, in WebAssembly.
|
||||
/// You can specify host-side functions to import using
|
||||
/// the `#[input]` macro attribute:
|
||||
/// ```ignore
|
||||
/// #[input]
|
||||
/// fn total(counts: Vec<f64>) -> f64;
|
||||
/// ```
|
||||
/// When loading a plugin, you need to provide all host functions the plugin imports:
|
||||
/// ```ignore
|
||||
/// let plugin = PluginBuilder::new_with_default_context()
|
||||
/// .host_function("total", |counts| counts.iter().fold(0.0, |tot, n| tot + n))
|
||||
/// // and so on...
|
||||
/// ```
|
||||
/// And that's a wrap!
|
||||
pub fn host_function<A, R>(
|
||||
mut self,
|
||||
name: &str,
|
||||
function: impl Fn(A) -> R + Send + Sync + 'static,
|
||||
) -> Result<Self, Error>
|
||||
where
|
||||
A: DeserializeOwned + Send,
|
||||
R: Serialize + Send + Sync,
|
||||
{
|
||||
self.linker.func_wrap1_async(
|
||||
"env",
|
||||
&format!("__{}", name),
|
||||
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
||||
// TODO: use try block once avaliable
|
||||
let result: Result<(WasiBuffer, Memory, Vec<u8>), Trap> = (|| {
|
||||
// grab a handle to the memory
|
||||
let mut plugin_memory = match caller.get_export("memory") {
|
||||
Some(Extern::Memory(mem)) => mem,
|
||||
_ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
|
||||
};
|
||||
|
||||
let buffer = WasiBuffer::from_u64(packed_buffer);
|
||||
|
||||
// get the args passed from Guest
|
||||
let args = Plugin::buffer_to_type(&mut plugin_memory, &mut caller, &buffer)?;
|
||||
|
||||
// Call the Host-side function
|
||||
let result: R = function(args);
|
||||
|
||||
// Serialize the result back to guest
|
||||
let result = Plugin::serialize_to_bytes(result).map_err(|_| {
|
||||
Trap::new("Could not serialize value returned from function")
|
||||
})?;
|
||||
|
||||
Ok((buffer, plugin_memory, result))
|
||||
})();
|
||||
|
||||
Box::new(async move {
|
||||
let (buffer, mut plugin_memory, result) = result?;
|
||||
|
||||
Plugin::buffer_to_free(caller.data().free_buffer(), &mut caller, buffer)
|
||||
.await?;
|
||||
|
||||
let buffer = Plugin::bytes_to_buffer(
|
||||
caller.data().alloc_buffer(),
|
||||
&mut plugin_memory,
|
||||
&mut caller,
|
||||
result,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(buffer.into_u64())
|
||||
})
|
||||
},
|
||||
)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Initializes a [`Plugin`] from a given compiled Wasm module.
|
||||
/// Both binary (`.wasm`) and text (`.wat`) module formats are supported.
|
||||
pub async fn init<T: AsRef<[u8]>>(self, precompiled: bool, module: T) -> Result<Plugin, Error> {
|
||||
Plugin::init(precompiled, module.as_ref().to_vec(), self).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct WasiAlloc {
|
||||
alloc_buffer: TypedFunc<u32, u32>,
|
||||
free_buffer: TypedFunc<u64, ()>,
|
||||
}
|
||||
|
||||
struct WasiCtxAlloc {
|
||||
wasi_ctx: WasiCtx,
|
||||
alloc: Option<WasiAlloc>,
|
||||
}
|
||||
|
||||
impl WasiCtxAlloc {
|
||||
fn alloc_buffer(&self) -> TypedFunc<u32, u32> {
|
||||
self.alloc
|
||||
.expect("allocator has been not initialized, cannot allocate buffer!")
|
||||
.alloc_buffer
|
||||
}
|
||||
|
||||
fn free_buffer(&self) -> TypedFunc<u64, ()> {
|
||||
self.alloc
|
||||
.expect("allocator has been not initialized, cannot free buffer!")
|
||||
.free_buffer
|
||||
}
|
||||
|
||||
fn init_alloc(&mut self, alloc: WasiAlloc) {
|
||||
self.alloc = Some(alloc)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a WebAssembly plugin, with access to the WebAssembly System Inferface.
|
||||
/// Build a new plugin using [`PluginBuilder`].
|
||||
pub struct Plugin {
|
||||
store: Store<WasiCtxAlloc>,
|
||||
instance: Instance,
|
||||
}
|
||||
|
||||
impl Plugin {
|
||||
/// Dumps the *entirety* of Wasm linear memory to `stdout`.
|
||||
/// Don't call this unless you're debugging a memory issue!
|
||||
pub fn dump_memory(data: &[u8]) {
|
||||
for (i, byte) in data.iter().enumerate() {
|
||||
if i % 32 == 0 {
|
||||
println!();
|
||||
}
|
||||
if i % 4 == 0 {
|
||||
print!("|");
|
||||
}
|
||||
if *byte == 0 {
|
||||
print!("__")
|
||||
} else {
|
||||
print!("{:02x}", byte);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
async fn init(
|
||||
precompiled: bool,
|
||||
module: Vec<u8>,
|
||||
plugin: PluginBuilder,
|
||||
) -> Result<Self, Error> {
|
||||
// initialize the WebAssembly System Interface context
|
||||
let engine = plugin.engine;
|
||||
let mut linker = plugin.linker;
|
||||
wasmtime_wasi::add_to_linker(&mut linker, |s| &mut s.wasi_ctx)?;
|
||||
|
||||
// create a store, note that we can't initialize the allocator,
|
||||
// because we can't grab the functions until initialized.
|
||||
let mut store: Store<WasiCtxAlloc> = Store::new(
|
||||
&engine,
|
||||
WasiCtxAlloc {
|
||||
wasi_ctx: plugin.wasi_ctx,
|
||||
alloc: None,
|
||||
},
|
||||
);
|
||||
// store.epoch_deadline_async_yield_and_update(todo!());
|
||||
let module = if precompiled {
|
||||
unsafe { Module::deserialize(&engine, module)? }
|
||||
} else {
|
||||
Module::new(&engine, module)?
|
||||
};
|
||||
|
||||
// load the provided module into the asynchronous runtime
|
||||
linker.module_async(&mut store, "", &module).await?;
|
||||
let instance = linker.instantiate_async(&mut store, &module).await?;
|
||||
|
||||
// now that the module is initialized,
|
||||
// we can initialize the store's allocator
|
||||
let alloc_buffer = instance.get_typed_func(&mut store, "__alloc_buffer")?;
|
||||
let free_buffer = instance.get_typed_func(&mut store, "__free_buffer")?;
|
||||
store.data_mut().init_alloc(WasiAlloc {
|
||||
alloc_buffer,
|
||||
free_buffer,
|
||||
});
|
||||
|
||||
Ok(Plugin { store, instance })
|
||||
}
|
||||
|
||||
/// Attaches a file or directory the the given system path to the runtime.
|
||||
/// Note that the resource must be freed by calling `remove_resource` afterwards.
|
||||
pub fn attach_path<T: AsRef<Path>>(&mut self, path: T) -> Result<PluginResource, Error> {
|
||||
// grab the WASI context
|
||||
let ctx = self.store.data_mut();
|
||||
|
||||
// open the file we want, and convert it into the right type
|
||||
// this is a footgun and a half
|
||||
let file = File::open(&path).unwrap();
|
||||
let dir = Dir::from_std_file(file);
|
||||
let dir = Box::new(wasmtime_wasi::dir::Dir::from_cap_std(dir));
|
||||
|
||||
// grab an empty file descriptor, specify capabilities
|
||||
let fd = ctx.wasi_ctx.table().push(Box::new(()))?;
|
||||
let caps = dir::DirCaps::all();
|
||||
let file_caps = file::FileCaps::all();
|
||||
|
||||
// insert the directory at the given fd,
|
||||
// return a handle to the resource
|
||||
ctx.wasi_ctx
|
||||
.insert_dir(fd, dir, caps, file_caps, path.as_ref().to_path_buf());
|
||||
Ok(PluginResource(fd))
|
||||
}
|
||||
|
||||
/// Returns `true` if the resource existed and was removed.
|
||||
/// Currently the only resource we support is adding scoped paths (e.g. folders and files)
|
||||
/// to plugins using [`attach_path`].
|
||||
pub fn remove_resource(&mut self, resource: PluginResource) -> Result<(), Error> {
|
||||
self.store
|
||||
.data_mut()
|
||||
.wasi_ctx
|
||||
.table()
|
||||
.delete(resource.0)
|
||||
.ok_or_else(|| anyhow!("Resource did not exist, but a valid handle was passed in"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// So this call function is kinda a dance, I figured it'd be a good idea to document it.
|
||||
// the high level is we take a serde type, serialize it to a byte array,
|
||||
// (we're doing this using bincode for now)
|
||||
// then toss that byte array into webassembly.
|
||||
// webassembly grabs that byte array, does some magic,
|
||||
// and serializes the result into yet another byte array.
|
||||
// we then grab *that* result byte array and deserialize it into a result.
|
||||
//
|
||||
// phew...
|
||||
//
|
||||
// now the problem is, webassambly doesn't support buffers.
|
||||
// only really like i32s, that's it (yeah, it's sad. Not even unsigned!)
|
||||
// (ok, I'm exaggerating a bit).
|
||||
//
|
||||
// the Wasm function that this calls must have a very specific signature:
|
||||
//
|
||||
// fn(pointer to byte array: i32, length of byte array: i32)
|
||||
// -> pointer to (
|
||||
// pointer to byte_array: i32,
|
||||
// length of byte array: i32,
|
||||
// ): i32
|
||||
//
|
||||
// This pair `(pointer to byte array, length of byte array)` is called a `Buffer`
|
||||
// and can be found in the cargo_test plugin.
|
||||
//
|
||||
// so on the wasm side, we grab the two parameters to the function,
|
||||
// stuff them into a `Buffer`,
|
||||
// and then pray to the `unsafe` Rust gods above that a valid byte array pops out.
|
||||
//
|
||||
// On the flip side, when returning from a wasm function,
|
||||
// we convert whatever serialized result we get into byte array,
|
||||
// which we stuff into a Buffer and allocate on the heap,
|
||||
// which pointer to we then return.
|
||||
// Note the double indirection!
|
||||
//
|
||||
// So when returning from a function, we actually leak memory *twice*:
|
||||
//
|
||||
// 1) once when we leak the byte array
|
||||
// 2) again when we leak the allocated `Buffer`
|
||||
//
|
||||
// This isn't a problem because Wasm stops executing after the function returns,
|
||||
// so the heap is still valid for our inspection when we want to pull things out.
|
||||
|
||||
/// Serializes a given type to bytes.
|
||||
fn serialize_to_bytes<A: Serialize>(item: A) -> Result<Vec<u8>, Error> {
|
||||
// serialize the argument using bincode
|
||||
let bytes = bincode::serialize(&item)?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Deserializes a given type from bytes.
|
||||
fn deserialize_to_type<R: DeserializeOwned>(bytes: &[u8]) -> Result<R, Error> {
|
||||
// serialize the argument using bincode
|
||||
let bytes = bincode::deserialize(bytes)?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
// fn deserialize<R: DeserializeOwned>(
|
||||
// plugin_memory: &mut Memory,
|
||||
// mut store: impl AsContextMut<Data = WasiCtxAlloc>,
|
||||
// buffer: WasiBuffer,
|
||||
// ) -> Result<R, Error> {
|
||||
// let buffer_start = buffer.ptr as usize;
|
||||
// let buffer_end = buffer_start + buffer.len as usize;
|
||||
|
||||
// // read the buffer at this point into a byte array
|
||||
// // deserialize the byte array into the provided serde type
|
||||
// let item = &plugin_memory.data(store.as_context())[buffer_start..buffer_end];
|
||||
// let item = bincode::deserialize(bytes)?;
|
||||
// Ok(item)
|
||||
// }
|
||||
|
||||
/// Takes an item, allocates a buffer, serializes the argument to that buffer,
|
||||
/// and returns a (ptr, len) pair to that buffer.
|
||||
async fn bytes_to_buffer(
|
||||
alloc_buffer: TypedFunc<u32, u32>,
|
||||
plugin_memory: &mut Memory,
|
||||
mut store: impl AsContextMut<Data = WasiCtxAlloc>,
|
||||
item: Vec<u8>,
|
||||
) -> Result<WasiBuffer, Error> {
|
||||
// allocate a buffer and write the argument to that buffer
|
||||
let len = item.len() as u32;
|
||||
let ptr = alloc_buffer.call_async(&mut store, len).await?;
|
||||
plugin_memory.write(&mut store, ptr as usize, &item)?;
|
||||
Ok(WasiBuffer { ptr, len })
|
||||
}
|
||||
|
||||
/// Takes a `(ptr, len)` pair and returns the corresponding deserialized buffer.
|
||||
fn buffer_to_type<R: DeserializeOwned>(
|
||||
plugin_memory: &Memory,
|
||||
store: impl AsContext<Data = WasiCtxAlloc>,
|
||||
buffer: &WasiBuffer,
|
||||
) -> Result<R, Error> {
|
||||
let buffer_start = buffer.ptr as usize;
|
||||
let buffer_end = buffer_start + buffer.len as usize;
|
||||
|
||||
// read the buffer at this point into a byte array
|
||||
// deserialize the byte array into the provided serde type
|
||||
let result = &plugin_memory.data(store.as_context())[buffer_start..buffer_end];
|
||||
let result = bincode::deserialize(result)?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Takes a `(ptr, len)` pair and returns the corresponding deserialized buffer.
|
||||
fn buffer_to_bytes<'a>(
|
||||
plugin_memory: &'a Memory,
|
||||
store: wasmtime::StoreContext<'a, WasiCtxAlloc>,
|
||||
buffer: &'a WasiBuffer,
|
||||
) -> Result<&'a [u8], Error> {
|
||||
let buffer_start = buffer.ptr as usize;
|
||||
let buffer_end = buffer_start + buffer.len as usize;
|
||||
|
||||
// read the buffer at this point into a byte array
|
||||
// deserialize the byte array into the provided serde type
|
||||
let result = &plugin_memory.data(store)[buffer_start..buffer_end];
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
async fn buffer_to_free(
|
||||
free_buffer: TypedFunc<u64, ()>,
|
||||
mut store: impl AsContextMut<Data = WasiCtxAlloc>,
|
||||
buffer: WasiBuffer,
|
||||
) -> Result<(), Error> {
|
||||
// deallocate the argument buffer
|
||||
Ok(free_buffer
|
||||
.call_async(&mut store, buffer.into_u64())
|
||||
.await?)
|
||||
}
|
||||
|
||||
/// Retrieves the handle to a function of a given type.
|
||||
pub fn function<A: Serialize, R: DeserializeOwned, T: AsRef<str>>(
|
||||
&mut self,
|
||||
name: T,
|
||||
) -> Result<WasiFn<A, R>, Error> {
|
||||
let fun_name = format!("__{}", name.as_ref());
|
||||
let fun = self
|
||||
.instance
|
||||
.get_typed_func::<u64, u64, _>(&mut self.store, &fun_name)?;
|
||||
Ok(WasiFn {
|
||||
function: fun,
|
||||
_function_type: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Asynchronously calls a function defined Guest-side.
|
||||
pub async fn call<A: Serialize, R: DeserializeOwned>(
|
||||
&mut self,
|
||||
handle: &WasiFn<A, R>,
|
||||
arg: A,
|
||||
) -> Result<R, Error> {
|
||||
let mut plugin_memory = self
|
||||
.instance
|
||||
.get_memory(&mut self.store, "memory")
|
||||
.ok_or_else(|| anyhow!("Could not grab slice of plugin memory"))?;
|
||||
|
||||
// write the argument to linear memory
|
||||
// this returns a (ptr, lentgh) pair
|
||||
let arg_buffer = Self::bytes_to_buffer(
|
||||
self.store.data().alloc_buffer(),
|
||||
&mut plugin_memory,
|
||||
&mut self.store,
|
||||
Self::serialize_to_bytes(arg)?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// call the function, passing in the buffer and its length
|
||||
// this returns a ptr to a (ptr, lentgh) pair
|
||||
let result_buffer = handle
|
||||
.function
|
||||
.call_async(&mut self.store, arg_buffer.into_u64())
|
||||
.await?;
|
||||
|
||||
Self::buffer_to_type(
|
||||
&mut plugin_memory,
|
||||
&mut self.store,
|
||||
&WasiBuffer::from_u64(result_buffer),
|
||||
)
|
||||
}
|
||||
}
|
@ -389,7 +389,7 @@ impl LspCommand for GetDefinition {
|
||||
this.open_local_buffer_via_lsp(
|
||||
target_uri,
|
||||
language_server.server_id(),
|
||||
lsp_adapter.name(),
|
||||
lsp_adapter.name.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@ -610,7 +610,7 @@ impl LspCommand for GetReferences {
|
||||
this.open_local_buffer_via_lsp(
|
||||
lsp_location.uri,
|
||||
language_server.server_id(),
|
||||
lsp_adapter.name(),
|
||||
lsp_adapter.name.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
|
@ -23,9 +23,9 @@ use language::{
|
||||
deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
|
||||
serialize_version,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
|
||||
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
|
||||
Language, LanguageRegistry, LanguageServerName, LineEnding, LocalFile, LspAdapter,
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
|
||||
File as _, Language, LanguageRegistry, LanguageServerName, LineEnding, LocalFile,
|
||||
OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16,
|
||||
Transaction,
|
||||
};
|
||||
@ -124,6 +124,7 @@ pub struct Project {
|
||||
buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
|
||||
nonce: u128,
|
||||
initialized_persistent_state: bool,
|
||||
_maintain_buffer_languages: Task<()>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
@ -199,7 +200,7 @@ pub enum Event {
|
||||
pub enum LanguageServerState {
|
||||
Starting(Task<Option<Arc<LanguageServer>>>),
|
||||
Running {
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
server: Arc<LanguageServer>,
|
||||
},
|
||||
}
|
||||
@ -472,6 +473,7 @@ impl Project {
|
||||
opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
active_entry: None,
|
||||
languages,
|
||||
client,
|
||||
@ -549,6 +551,7 @@ impl Project {
|
||||
loading_local_worktrees: Default::default(),
|
||||
active_entry: None,
|
||||
collaborators: Default::default(),
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
languages,
|
||||
user_store: user_store.clone(),
|
||||
project_store,
|
||||
@ -733,9 +736,9 @@ impl Project {
|
||||
for language in self.languages.to_vec() {
|
||||
if let Some(lsp_adapter) = language.lsp_adapter() {
|
||||
if !settings.enable_language_server(Some(&language.name())) {
|
||||
let lsp_name = lsp_adapter.name();
|
||||
let lsp_name = &lsp_adapter.name;
|
||||
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
|
||||
if lsp_name == *started_lsp_name {
|
||||
if lsp_name == started_lsp_name {
|
||||
language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
|
||||
}
|
||||
}
|
||||
@ -1628,6 +1631,7 @@ impl Project {
|
||||
})
|
||||
}
|
||||
|
||||
/// LanguageServerName is owned, because it is inserted into a map
|
||||
fn open_local_buffer_via_lsp(
|
||||
&mut self,
|
||||
abs_path: lsp::Url,
|
||||
@ -1817,10 +1821,10 @@ impl Project {
|
||||
if let Some(language) = buffer.language() {
|
||||
let worktree_id = file.worktree_id(cx);
|
||||
if let Some(adapter) = language.lsp_adapter() {
|
||||
language_id = adapter.id_for_language(language.name().as_ref());
|
||||
language_id = adapter.id_for_language.clone();
|
||||
language_server = self
|
||||
.language_server_ids
|
||||
.get(&(worktree_id, adapter.name()))
|
||||
.get(&(worktree_id, adapter.name.clone()))
|
||||
.and_then(|id| self.language_servers.get(&id))
|
||||
.and_then(|server_state| {
|
||||
if let LanguageServerState::Running { server, .. } = server_state {
|
||||
@ -1984,10 +1988,7 @@ impl Project {
|
||||
// that don't support a disk-based progress token.
|
||||
let (lsp_adapter, language_server) =
|
||||
self.language_server_for_buffer(buffer.read(cx), cx)?;
|
||||
if lsp_adapter
|
||||
.disk_based_diagnostics_progress_token()
|
||||
.is_none()
|
||||
{
|
||||
if lsp_adapter.disk_based_diagnostics_progress_token.is_none() {
|
||||
let server_id = language_server.server_id();
|
||||
self.disk_based_diagnostics_finished(server_id, cx);
|
||||
self.broadcast_language_server_update(
|
||||
@ -2007,7 +2008,7 @@ impl Project {
|
||||
fn language_servers_for_worktree(
|
||||
&self,
|
||||
worktree_id: WorktreeId,
|
||||
) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
|
||||
) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
|
||||
self.language_server_ids
|
||||
.iter()
|
||||
.filter_map(move |((language_server_worktree_id, _), id)| {
|
||||
@ -2022,6 +2023,34 @@ impl Project {
|
||||
})
|
||||
}
|
||||
|
||||
fn maintain_buffer_languages(
|
||||
languages: &LanguageRegistry,
|
||||
cx: &mut ModelContext<Project>,
|
||||
) -> Task<()> {
|
||||
let mut subscription = languages.subscribe();
|
||||
cx.spawn_weak(|project, mut cx| async move {
|
||||
while let Some(()) = subscription.next().await {
|
||||
if let Some(project) = project.upgrade(&cx) {
|
||||
project.update(&mut cx, |project, cx| {
|
||||
let mut buffers_without_language = Vec::new();
|
||||
for buffer in project.opened_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
if buffer.read(cx).language().is_none() {
|
||||
buffers_without_language.push(buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for buffer in buffers_without_language {
|
||||
project.assign_language_to_buffer(&buffer, cx);
|
||||
project.register_buffer_with_language_server(&buffer, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn assign_language_to_buffer(
|
||||
&mut self,
|
||||
buffer: &ModelHandle<Buffer>,
|
||||
@ -2062,7 +2091,7 @@ impl Project {
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
let key = (worktree_id, adapter.name());
|
||||
let key = (worktree_id, adapter.name.clone());
|
||||
|
||||
self.language_server_ids
|
||||
.entry(key.clone())
|
||||
@ -2080,25 +2109,33 @@ impl Project {
|
||||
LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
|
||||
let language_server = language_server?.await.log_err()?;
|
||||
let language_server = language_server
|
||||
.initialize(adapter.initialization_options())
|
||||
.initialize(adapter.initialization_options.clone())
|
||||
.await
|
||||
.log_err()?;
|
||||
let this = this.upgrade(&cx)?;
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token();
|
||||
|
||||
language_server
|
||||
.on_notification::<lsp::notification::PublishDiagnostics, _>({
|
||||
let this = this.downgrade();
|
||||
let adapter = adapter.clone();
|
||||
move |params, mut cx| {
|
||||
move |mut params, cx| {
|
||||
let this = this.clone();
|
||||
let adapter = adapter.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
adapter.process_diagnostics(&mut params).await;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.on_lsp_diagnostics_published(
|
||||
server_id, params, &adapter, cx,
|
||||
);
|
||||
this.update_diagnostics(
|
||||
server_id,
|
||||
params,
|
||||
&adapter.disk_based_diagnostic_sources,
|
||||
cx,
|
||||
)
|
||||
.log_err();
|
||||
});
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
@ -2178,6 +2215,9 @@ impl Project {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token.clone();
|
||||
|
||||
language_server
|
||||
.on_notification::<lsp::notification::Progress, _>({
|
||||
let this = this.downgrade();
|
||||
@ -2187,7 +2227,7 @@ impl Project {
|
||||
this.on_lsp_progress(
|
||||
params,
|
||||
server_id,
|
||||
disk_based_diagnostics_progress_token,
|
||||
disk_based_diagnostics_progress_token.clone(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@ -2261,7 +2301,7 @@ impl Project {
|
||||
continue;
|
||||
};
|
||||
if file.worktree.read(cx).id() != key.0
|
||||
|| language.lsp_adapter().map(|a| a.name())
|
||||
|| language.lsp_adapter().map(|a| a.name.clone())
|
||||
!= Some(key.1.clone())
|
||||
{
|
||||
continue;
|
||||
@ -2274,14 +2314,15 @@ impl Project {
|
||||
.or_insert_with(|| vec![(0, buffer.text_snapshot())]);
|
||||
let (version, initial_snapshot) = versions.last().unwrap();
|
||||
let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
|
||||
let language_id =
|
||||
adapter.id_for_language(language.name().as_ref());
|
||||
language_server
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
uri,
|
||||
language_id.unwrap_or_default(),
|
||||
adapter
|
||||
.id_for_language
|
||||
.clone()
|
||||
.unwrap_or_default(),
|
||||
*version,
|
||||
initial_snapshot.text(),
|
||||
),
|
||||
@ -2407,7 +2448,7 @@ impl Project {
|
||||
return;
|
||||
};
|
||||
|
||||
let server_name = adapter.name();
|
||||
let server_name = adapter.name.clone();
|
||||
let stop = self.stop_language_server(worktree_id, server_name.clone(), cx);
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let (original_root_path, orphaned_worktrees) = stop.await;
|
||||
@ -2440,28 +2481,11 @@ impl Project {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn on_lsp_diagnostics_published(
|
||||
&mut self,
|
||||
server_id: usize,
|
||||
mut params: lsp::PublishDiagnosticsParams,
|
||||
adapter: &Arc<dyn LspAdapter>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
adapter.process_diagnostics(&mut params);
|
||||
self.update_diagnostics(
|
||||
server_id,
|
||||
params,
|
||||
adapter.disk_based_diagnostic_sources(),
|
||||
cx,
|
||||
)
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn on_lsp_progress(
|
||||
&mut self,
|
||||
progress: lsp::ProgressParams,
|
||||
server_id: usize,
|
||||
disk_based_diagnostics_progress_token: Option<&str>,
|
||||
disk_based_diagnostics_progress_token: Option<String>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let token = match progress.token {
|
||||
@ -2485,9 +2509,12 @@ impl Project {
|
||||
return;
|
||||
}
|
||||
|
||||
let is_disk_based_diagnostics_progress =
|
||||
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_ref().map(|x| &**x);
|
||||
|
||||
match progress {
|
||||
lsp::WorkDoneProgress::Begin(report) => {
|
||||
if Some(token.as_str()) == disk_based_diagnostics_progress_token {
|
||||
if is_disk_based_diagnostics_progress {
|
||||
language_server_status.has_pending_diagnostic_updates = true;
|
||||
self.disk_based_diagnostics_started(server_id, cx);
|
||||
self.broadcast_language_server_update(
|
||||
@ -2518,7 +2545,7 @@ impl Project {
|
||||
}
|
||||
}
|
||||
lsp::WorkDoneProgress::Report(report) => {
|
||||
if Some(token.as_str()) != disk_based_diagnostics_progress_token {
|
||||
if !is_disk_based_diagnostics_progress {
|
||||
self.on_lsp_work_progress(
|
||||
server_id,
|
||||
token.clone(),
|
||||
@ -2544,7 +2571,7 @@ impl Project {
|
||||
lsp::WorkDoneProgress::End(_) => {
|
||||
language_server_status.progress_tokens.remove(&token);
|
||||
|
||||
if Some(token.as_str()) == disk_based_diagnostics_progress_token {
|
||||
if is_disk_based_diagnostics_progress {
|
||||
language_server_status.has_pending_diagnostic_updates = false;
|
||||
self.disk_based_diagnostics_finished(server_id, cx);
|
||||
self.broadcast_language_server_update(
|
||||
@ -2622,7 +2649,7 @@ impl Project {
|
||||
this: WeakModelHandle<Self>,
|
||||
params: lsp::ApplyWorkspaceEditParams,
|
||||
server_id: usize,
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
language_server: Arc<LanguageServer>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<lsp::ApplyWorkspaceEditResponse> {
|
||||
@ -2693,7 +2720,7 @@ impl Project {
|
||||
&mut self,
|
||||
language_server_id: usize,
|
||||
params: lsp::PublishDiagnosticsParams,
|
||||
disk_based_sources: &[&str],
|
||||
disk_based_sources: &[String],
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let abs_path = params
|
||||
@ -2735,9 +2762,8 @@ impl Project {
|
||||
);
|
||||
} else {
|
||||
let group_id = post_inc(&mut self.next_diagnostic_group_id);
|
||||
let is_disk_based = source.map_or(false, |source| {
|
||||
disk_based_sources.contains(&source.as_str())
|
||||
});
|
||||
let is_disk_based =
|
||||
source.map_or(false, |source| disk_based_sources.contains(&source));
|
||||
|
||||
sources_by_group_id.insert(group_id, source);
|
||||
primary_diagnostic_group_ids
|
||||
@ -3241,7 +3267,6 @@ impl Project {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<DocumentHighlight>>> {
|
||||
let position = position.to_point_utf16(buffer.read(cx));
|
||||
|
||||
self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
|
||||
}
|
||||
|
||||
@ -3288,7 +3313,7 @@ impl Project {
|
||||
} else {
|
||||
return Ok(Default::default());
|
||||
};
|
||||
this.read_with(&cx, |this, cx| {
|
||||
let symbols = this.read_with(&cx, |this, cx| {
|
||||
let mut symbols = Vec::new();
|
||||
for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
|
||||
symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
|
||||
@ -3304,30 +3329,38 @@ impl Project {
|
||||
path = relativize_path(&worktree_abs_path, &abs_path);
|
||||
}
|
||||
|
||||
let label = this
|
||||
.languages
|
||||
.select_language(&path)
|
||||
.and_then(|language| {
|
||||
language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
|
||||
})
|
||||
.unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
|
||||
let signature = this.symbol_signature(worktree_id, &path);
|
||||
let language = this.languages.select_language(&path);
|
||||
let language_server_name = adapter.name.clone();
|
||||
|
||||
Some(Symbol {
|
||||
Some(async move {
|
||||
let label = if let Some(language) = language {
|
||||
language
|
||||
.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
|
||||
.await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Symbol {
|
||||
source_worktree_id,
|
||||
worktree_id,
|
||||
language_server_name: adapter.name(),
|
||||
name: lsp_symbol.name,
|
||||
language_server_name,
|
||||
label: label.unwrap_or_else(|| {
|
||||
CodeLabel::plain(lsp_symbol.name.clone(), None)
|
||||
}),
|
||||
kind: lsp_symbol.kind,
|
||||
label,
|
||||
name: lsp_symbol.name,
|
||||
path,
|
||||
range: range_from_lsp(lsp_symbol.location.range),
|
||||
signature,
|
||||
}
|
||||
})
|
||||
}));
|
||||
}
|
||||
Ok(symbols)
|
||||
})
|
||||
symbols
|
||||
});
|
||||
Ok(futures::future::join_all(symbols).await)
|
||||
})
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let request = self.client.request(proto::GetProjectSymbols {
|
||||
@ -3338,14 +3371,18 @@ impl Project {
|
||||
let response = request.await?;
|
||||
let mut symbols = Vec::new();
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.read_with(&cx, |this, _| {
|
||||
symbols.extend(
|
||||
let new_symbols = this.read_with(&cx, |this, _| {
|
||||
response
|
||||
.symbols
|
||||
.into_iter()
|
||||
.filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
|
||||
);
|
||||
})
|
||||
.map(|symbol| this.deserialize_symbol(symbol))
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
symbols = futures::future::join_all(new_symbols)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|symbol| symbol.log_err())
|
||||
.collect::<Vec<_>>();
|
||||
}
|
||||
Ok(symbols)
|
||||
})
|
||||
@ -3475,13 +3512,11 @@ impl Project {
|
||||
Default::default()
|
||||
};
|
||||
|
||||
source_buffer_handle.read_with(&cx, |this, _| {
|
||||
let completions = source_buffer_handle.read_with(&cx, |this, _| {
|
||||
let snapshot = this.snapshot();
|
||||
let clipped_position = this.clip_point_utf16(position, Bias::Left);
|
||||
let mut range_for_token = None;
|
||||
Ok(completions
|
||||
.into_iter()
|
||||
.filter_map(|lsp_completion| {
|
||||
completions.into_iter().filter_map(move |lsp_completion| {
|
||||
// For now, we can only handle additional edits if they are returned
|
||||
// when resolving the completion, not if they are present initially.
|
||||
if lsp_completion
|
||||
@ -3492,8 +3527,7 @@ impl Project {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref()
|
||||
{
|
||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
||||
// If the language server provides a range to overwrite, then
|
||||
// check that the range is valid.
|
||||
Some(lsp::CompletionTextEdit::Edit(edit)) => {
|
||||
@ -3544,23 +3578,29 @@ impl Project {
|
||||
};
|
||||
|
||||
LineEnding::normalize(&mut new_text);
|
||||
Some(Completion {
|
||||
let language = language.clone();
|
||||
Some(async move {
|
||||
let label = if let Some(language) = language {
|
||||
language.label_for_completion(&lsp_completion).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Completion {
|
||||
old_range,
|
||||
new_text,
|
||||
label: language
|
||||
.as_ref()
|
||||
.and_then(|l| l.label_for_completion(&lsp_completion))
|
||||
.unwrap_or_else(|| {
|
||||
label: label.unwrap_or_else(|| {
|
||||
CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
lsp_completion,
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
});
|
||||
|
||||
Ok(futures::future::join_all(completions).await)
|
||||
})
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let rpc = self.client.clone();
|
||||
@ -3579,13 +3619,10 @@ impl Project {
|
||||
})
|
||||
.await;
|
||||
|
||||
response
|
||||
.completions
|
||||
.into_iter()
|
||||
.map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.as_ref())
|
||||
})
|
||||
.collect()
|
||||
let completions = response.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
@ -3881,7 +3918,7 @@ impl Project {
|
||||
this: ModelHandle<Self>,
|
||||
edit: lsp::WorkspaceEdit,
|
||||
push_to_history: bool,
|
||||
lsp_adapter: Arc<dyn LspAdapter>,
|
||||
lsp_adapter: Arc<CachedLspAdapter>,
|
||||
language_server: Arc<LanguageServer>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<ProjectTransaction> {
|
||||
@ -3959,7 +3996,7 @@ impl Project {
|
||||
this.open_local_buffer_via_lsp(
|
||||
op.text_document.uri,
|
||||
language_server.server_id(),
|
||||
lsp_adapter.name(),
|
||||
lsp_adapter.name.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@ -5190,7 +5227,7 @@ impl Project {
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
|
||||
let apply_additional_edits = this.update(&mut cx, |this, cx| {
|
||||
let (buffer, completion) = this.update(&mut cx, |this, cx| {
|
||||
let buffer = this
|
||||
.opened_buffers
|
||||
.get(&envelope.payload.buffer_id)
|
||||
@ -5202,13 +5239,17 @@ impl Project {
|
||||
.payload
|
||||
.completion
|
||||
.ok_or_else(|| anyhow!("invalid completion"))?,
|
||||
language,
|
||||
)?;
|
||||
Ok::<_, anyhow::Error>(
|
||||
this.apply_additional_edits_for_completion(buffer, completion, false, cx),
|
||||
)
|
||||
language.cloned(),
|
||||
);
|
||||
Ok::<_, anyhow::Error>((buffer, completion))
|
||||
})?;
|
||||
|
||||
let completion = completion.await?;
|
||||
|
||||
let apply_additional_edits = this.update(&mut cx, |this, cx| {
|
||||
this.apply_additional_edits_for_completion(buffer, completion, false, cx)
|
||||
});
|
||||
|
||||
Ok(proto::ApplyCompletionAdditionalEditsResponse {
|
||||
transaction: apply_additional_edits
|
||||
.await?
|
||||
@ -5390,8 +5431,10 @@ impl Project {
|
||||
.payload
|
||||
.symbol
|
||||
.ok_or_else(|| anyhow!("invalid symbol"))?;
|
||||
let symbol = this
|
||||
.read_with(&cx, |this, _| this.deserialize_symbol(symbol))
|
||||
.await?;
|
||||
let symbol = this.read_with(&cx, |this, _| {
|
||||
let symbol = this.deserialize_symbol(symbol)?;
|
||||
let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
|
||||
if signature == symbol.signature {
|
||||
Ok(symbol)
|
||||
@ -5596,7 +5639,12 @@ impl Project {
|
||||
})
|
||||
}
|
||||
|
||||
fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
|
||||
fn deserialize_symbol(
|
||||
&self,
|
||||
serialized_symbol: proto::Symbol,
|
||||
) -> impl Future<Output = Result<Symbol>> {
|
||||
let languages = self.languages.clone();
|
||||
async move {
|
||||
let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
|
||||
let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
|
||||
let start = serialized_symbol
|
||||
@ -5607,17 +5655,29 @@ impl Project {
|
||||
.ok_or_else(|| anyhow!("invalid end"))?;
|
||||
let kind = unsafe { mem::transmute(serialized_symbol.kind) };
|
||||
let path = PathBuf::from(serialized_symbol.path);
|
||||
let language = self.languages.select_language(&path);
|
||||
let language = languages.select_language(&path);
|
||||
Ok(Symbol {
|
||||
source_worktree_id,
|
||||
worktree_id,
|
||||
language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
|
||||
label: language
|
||||
.and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
|
||||
.unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
|
||||
language_server_name: LanguageServerName(
|
||||
serialized_symbol.language_server_name.into(),
|
||||
),
|
||||
label: {
|
||||
match language {
|
||||
Some(language) => {
|
||||
language
|
||||
.label_for_symbol(&serialized_symbol.name, kind)
|
||||
.await
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
.unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
|
||||
},
|
||||
|
||||
name: serialized_symbol.name,
|
||||
path,
|
||||
range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
|
||||
range: PointUtf16::new(start.row, start.column)
|
||||
..PointUtf16::new(end.row, end.column),
|
||||
kind,
|
||||
signature: serialized_symbol
|
||||
.signature
|
||||
@ -5625,6 +5685,7 @@ impl Project {
|
||||
.map_err(|_| anyhow!("invalid signature"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_buffer_saved(
|
||||
this: ModelHandle<Self>,
|
||||
@ -5830,10 +5891,11 @@ impl Project {
|
||||
&self,
|
||||
buffer: &Buffer,
|
||||
cx: &AppContext,
|
||||
) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
|
||||
) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
|
||||
if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
|
||||
let name = language.lsp_adapter()?.name.clone();
|
||||
let worktree_id = file.worktree_id(cx);
|
||||
let key = (worktree_id, language.lsp_adapter()?.name());
|
||||
let key = (worktree_id, name);
|
||||
|
||||
if let Some(server_id) = self.language_server_ids.get(&key) {
|
||||
if let Some(LanguageServerState::Running { adapter, server }) =
|
||||
|
@ -49,7 +49,10 @@ async fn test_symlinks(cx: &mut gpui::TestAppContext) {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
async fn test_managing_language_servers(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let mut rust_language = Language::new(
|
||||
@ -68,7 +71,8 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_rust_servers = rust_language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "the-rust-language-server",
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
@ -78,8 +82,10 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
}))
|
||||
.await;
|
||||
let mut fake_json_servers = json_language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "the-json-language-server",
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
@ -89,7 +95,8 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -104,10 +111,6 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| {
|
||||
project.languages.add(Arc::new(rust_language));
|
||||
project.languages.add(Arc::new(json_language));
|
||||
});
|
||||
|
||||
// Open a buffer without an associated language server.
|
||||
let toml_buffer = project
|
||||
@ -117,13 +120,27 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Open a buffer with an associated language server.
|
||||
// Open a buffer with an associated language server before the language for it has been loaded.
|
||||
let rust_buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer("/the-root/test.rs", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
rust_buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(buffer.language().map(|l| l.name()), None);
|
||||
});
|
||||
|
||||
// Now we add the languages to the project, and ensure they get assigned to all
|
||||
// the relevant open buffers.
|
||||
project.update(cx, |project, _| {
|
||||
project.languages.add(Arc::new(json_language));
|
||||
project.languages.add(Arc::new(rust_language));
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
rust_buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
|
||||
});
|
||||
|
||||
// A server is started up, and it is notified about Rust files.
|
||||
let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
|
||||
@ -593,11 +610,13 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
disk_based_diagnostics_progress_token: Some(progress_token),
|
||||
disk_based_diagnostics_sources: &["disk"],
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
disk_based_diagnostics_progress_token: Some(progress_token.into()),
|
||||
disk_based_diagnostics_sources: vec!["disk".into()],
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -716,11 +735,13 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
disk_based_diagnostics_sources: &["disk"],
|
||||
disk_based_diagnostics_progress_token: Some(progress_token),
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
disk_based_diagnostics_sources: vec!["disk".into()],
|
||||
disk_based_diagnostics_progress_token: Some(progress_token.into()),
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
|
||||
@ -795,10 +816,12 @@ async fn test_toggling_enable_language_server(
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_rust_servers = rust
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "rust-lsp",
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
let mut js = Language::new(
|
||||
LanguageConfig {
|
||||
name: Arc::from("JavaScript"),
|
||||
@ -807,10 +830,12 @@ async fn test_toggling_enable_language_server(
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_js_servers = js
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "js-lsp",
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
|
||||
@ -916,10 +941,12 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
disk_based_diagnostics_sources: &["disk"],
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
disk_based_diagnostics_sources: vec!["disk".into()],
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let text = "
|
||||
fn a() { A }
|
||||
@ -1258,7 +1285,7 @@ async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
|
||||
let text = "
|
||||
fn a() {
|
||||
@ -1637,7 +1664,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -1736,7 +1763,7 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_typescript::language_typescript()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -1820,7 +1847,7 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_typescript::language_typescript()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -1873,7 +1900,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -2801,7 +2828,8 @@ async fn test_rename(cx: &mut gpui::TestAppContext) {
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
|
||||
prepare_provider: Some(true),
|
||||
@ -2810,7 +2838,8 @@ async fn test_rename(cx: &mut gpui::TestAppContext) {
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
|
@ -290,7 +290,9 @@ mod tests {
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter::default());
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::<FakeLspAdapter>::default())
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree("/dir", json!({ "test.rs": "" })).await;
|
||||
|
@ -28,7 +28,7 @@ rsa = "0.4"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
smol-timeout = "0.6"
|
||||
tracing = { version = "0.1.34", features = ["log"] }
|
||||
zstd = "0.9"
|
||||
zstd = "0.11"
|
||||
|
||||
[build-dependencies]
|
||||
prost-build = "0.9"
|
||||
|
@ -39,6 +39,7 @@ journal = { path = "../journal" }
|
||||
language = { path = "../language" }
|
||||
lsp = { path = "../lsp" }
|
||||
outline = { path = "../outline" }
|
||||
plugin_runtime = { path = "../plugin_runtime" }
|
||||
project = { path = "../project" }
|
||||
project_panel = { path = "../project_panel" }
|
||||
project_symbols = { path = "../project_symbols" }
|
||||
|
@ -1,12 +1,13 @@
|
||||
use gpui::Task;
|
||||
use gpui::executor::Background;
|
||||
pub use language::*;
|
||||
use rust_embed::RustEmbed;
|
||||
use std::{borrow::Cow, str, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
mod c;
|
||||
mod go;
|
||||
mod installation;
|
||||
mod json;
|
||||
mod language_plugin;
|
||||
mod python;
|
||||
mod rust;
|
||||
mod typescript;
|
||||
@ -16,28 +17,30 @@ mod typescript;
|
||||
#[exclude = "*.rs"]
|
||||
struct LanguageDir;
|
||||
|
||||
pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegistry {
|
||||
let languages = LanguageRegistry::new(login_shell_env_loaded);
|
||||
pub async fn init(languages: Arc<LanguageRegistry>, executor: Arc<Background>) {
|
||||
for (name, grammar, lsp_adapter) in [
|
||||
(
|
||||
"c",
|
||||
tree_sitter_c::language(),
|
||||
Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
|
||||
Some(CachedLspAdapter::new(c::CLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"cpp",
|
||||
tree_sitter_cpp::language(),
|
||||
Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
|
||||
Some(CachedLspAdapter::new(c::CLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"go",
|
||||
tree_sitter_go::language(),
|
||||
Some(Arc::new(go::GoLspAdapter) as Arc<dyn LspAdapter>),
|
||||
Some(CachedLspAdapter::new(go::GoLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"json",
|
||||
tree_sitter_json::language(),
|
||||
Some(Arc::new(json::JsonLspAdapter)),
|
||||
match language_plugin::new_json(executor).await.log_err() {
|
||||
Some(lang) => Some(CachedLspAdapter::new(lang).await),
|
||||
None => None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"markdown",
|
||||
@ -47,12 +50,12 @@ pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegi
|
||||
(
|
||||
"python",
|
||||
tree_sitter_python::language(),
|
||||
Some(Arc::new(python::PythonLspAdapter)),
|
||||
Some(CachedLspAdapter::new(python::PythonLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"rust",
|
||||
tree_sitter_rust::language(),
|
||||
Some(Arc::new(rust::RustLspAdapter)),
|
||||
Some(CachedLspAdapter::new(rust::RustLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"toml",
|
||||
@ -62,28 +65,27 @@ pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegi
|
||||
(
|
||||
"tsx",
|
||||
tree_sitter_typescript::language_tsx(),
|
||||
Some(Arc::new(typescript::TypeScriptLspAdapter)),
|
||||
Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"typescript",
|
||||
tree_sitter_typescript::language_typescript(),
|
||||
Some(Arc::new(typescript::TypeScriptLspAdapter)),
|
||||
Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
|
||||
),
|
||||
(
|
||||
"javascript",
|
||||
tree_sitter_typescript::language_tsx(),
|
||||
Some(Arc::new(typescript::TypeScriptLspAdapter)),
|
||||
Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
|
||||
),
|
||||
] {
|
||||
languages.add(Arc::new(language(name, grammar, lsp_adapter)));
|
||||
}
|
||||
languages
|
||||
}
|
||||
|
||||
pub(crate) fn language(
|
||||
name: &str,
|
||||
grammar: tree_sitter::Language,
|
||||
lsp_adapter: Option<Arc<dyn LspAdapter>>,
|
||||
lsp_adapter: Option<Arc<CachedLspAdapter>>,
|
||||
) -> Language {
|
||||
let config = toml::from_slice(
|
||||
&LanguageDir::get(&format!("{}/config.toml", name))
|
||||
|
@ -1,28 +1,25 @@
|
||||
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use futures::StreamExt;
|
||||
pub use language::*;
|
||||
use smol::fs::{self, File};
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct CLspAdapter;
|
||||
|
||||
#[async_trait]
|
||||
impl super::LspAdapter for CLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("clangd".into())
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
http: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||
async move {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let release = latest_github_release("clangd/clangd", http).await?;
|
||||
let asset_name = format!("clangd-mac-{}.zip", release.name);
|
||||
let asset = release
|
||||
@ -36,17 +33,14 @@ impl super::LspAdapter for CLspAdapter {
|
||||
};
|
||||
Ok(Box::new(version) as Box<_>)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||
async move {
|
||||
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
|
||||
let version_dir = container_dir.join(format!("clangd_{}", version.name));
|
||||
let binary_path = version_dir.join("bin/clangd");
|
||||
@ -89,14 +83,9 @@ impl super::LspAdapter for CLspAdapter {
|
||||
|
||||
Ok(binary_path)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
(|| async move {
|
||||
let mut last_clangd_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
@ -115,12 +104,12 @@ impl super::LspAdapter for CLspAdapter {
|
||||
clangd_dir
|
||||
))
|
||||
}
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn label_for_completion(
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
completion: &lsp::CompletionItem,
|
||||
language: &Language,
|
||||
@ -197,7 +186,7 @@ impl super::LspAdapter for CLspAdapter {
|
||||
Some(CodeLabel::plain(label.to_string(), None))
|
||||
}
|
||||
|
||||
fn label_for_symbol(
|
||||
async fn label_for_symbol(
|
||||
&self,
|
||||
name: &str,
|
||||
kind: lsp::SymbolKind,
|
||||
|
@ -1,19 +1,14 @@
|
||||
use super::installation::latest_github_release;
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use futures::StreamExt;
|
||||
pub use language::*;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use smol::{fs, process};
|
||||
use std::{
|
||||
any::Any,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
str,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct GoLspAdapter;
|
||||
@ -22,20 +17,20 @@ lazy_static! {
|
||||
static ref GOPLS_VERSION_REGEX: Regex = Regex::new(r"\d+\.\d+\.\d+").unwrap();
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl super::LspAdapter for GoLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("gopls".into())
|
||||
}
|
||||
|
||||
fn server_args(&self) -> &[&str] {
|
||||
&["-mode=stdio"]
|
||||
async fn server_args(&self) -> Vec<String> {
|
||||
vec!["-mode=stdio".into()]
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
http: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||
async move {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let release = latest_github_release("golang/tools", http).await?;
|
||||
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
|
||||
if version.is_none() {
|
||||
@ -46,19 +41,16 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
}
|
||||
Ok(Box::new(version) as Box<_>)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let version = version.downcast::<Option<String>>().unwrap();
|
||||
let this = *self;
|
||||
|
||||
async move {
|
||||
if let Some(version) = *version {
|
||||
let binary_path = container_dir.join(&format!("gopls_{version}"));
|
||||
if let Ok(metadata) = fs::metadata(&binary_path).await {
|
||||
@ -112,14 +104,9 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
|
||||
Ok(binary_path.to_path_buf())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
(|| async move {
|
||||
let mut last_binary_path = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
@ -139,12 +126,12 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
} else {
|
||||
Err(anyhow!("no cached binary"))
|
||||
}
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn label_for_completion(
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
completion: &lsp::CompletionItem,
|
||||
language: &Language,
|
||||
@ -244,7 +231,7 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
None
|
||||
}
|
||||
|
||||
fn label_for_symbol(
|
||||
async fn label_for_symbol(
|
||||
&self,
|
||||
name: &str,
|
||||
kind: lsp::SymbolKind,
|
||||
@ -322,12 +309,12 @@ mod tests {
|
||||
use gpui::color::Color;
|
||||
use theme::SyntaxTheme;
|
||||
|
||||
#[test]
|
||||
fn test_go_label_for_completion() {
|
||||
#[gpui::test]
|
||||
async fn test_go_label_for_completion() {
|
||||
let language = language(
|
||||
"go",
|
||||
tree_sitter_go::language(),
|
||||
Some(Arc::new(GoLspAdapter)),
|
||||
Some(CachedLspAdapter::new(GoLspAdapter).await),
|
||||
);
|
||||
|
||||
let theme = SyntaxTheme::new(vec![
|
||||
@ -347,12 +334,14 @@ mod tests {
|
||||
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
label: "Hello".to_string(),
|
||||
detail: Some("func(a B) c.D".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "Hello(a B) c.D".to_string(),
|
||||
filter_range: 0..5,
|
||||
@ -366,12 +355,14 @@ mod tests {
|
||||
|
||||
// Nested methods
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::METHOD),
|
||||
label: "one.two.Three".to_string(),
|
||||
detail: Some("func() [3]interface{}".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "one.two.Three() [3]interface{}".to_string(),
|
||||
filter_range: 0..13,
|
||||
@ -385,12 +376,14 @@ mod tests {
|
||||
|
||||
// Nested fields
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::FIELD),
|
||||
label: "two.Three".to_string(),
|
||||
detail: Some("a.Bcd".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "two.Three a.Bcd".to_string(),
|
||||
filter_range: 0..9,
|
||||
|
@ -1,120 +0,0 @@
|
||||
use super::installation::{npm_install_packages, npm_package_latest_version};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use language::{LanguageServerName, LspAdapter};
|
||||
use serde_json::json;
|
||||
use smol::fs;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub struct JsonLspAdapter;
|
||||
|
||||
impl JsonLspAdapter {
|
||||
const BIN_PATH: &'static str =
|
||||
"node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
|
||||
}
|
||||
|
||||
impl LspAdapter for JsonLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("vscode-json-languageserver".into())
|
||||
}
|
||||
|
||||
fn server_args(&self) -> &[&str] {
|
||||
&["--stdio"]
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Any + Send>>> {
|
||||
async move {
|
||||
Ok(Box::new(npm_package_latest_version("vscode-json-languageserver").await?) as Box<_>)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
let version = version.downcast::<String>().unwrap();
|
||||
async move {
|
||||
let version_dir = container_dir.join(version.as_str());
|
||||
fs::create_dir_all(&version_dir)
|
||||
.await
|
||||
.context("failed to create version directory")?;
|
||||
let binary_path = version_dir.join(Self::BIN_PATH);
|
||||
|
||||
if fs::metadata(&binary_path).await.is_err() {
|
||||
npm_install_packages(
|
||||
[("vscode-json-languageserver", version.as_str())],
|
||||
&version_dir,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
||||
while let Some(entry) = entries.next().await {
|
||||
if let Some(entry) = entry.log_err() {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.as_path() != version_dir {
|
||||
fs::remove_dir_all(&entry_path).await.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(binary_path)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
let mut last_version_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
let entry = entry?;
|
||||
if entry.file_type().await?.is_dir() {
|
||||
last_version_dir = Some(entry.path());
|
||||
}
|
||||
}
|
||||
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
||||
let bin_path = last_version_dir.join(Self::BIN_PATH);
|
||||
if bin_path.exists() {
|
||||
Ok(bin_path)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"missing executable in directory {:?}",
|
||||
last_version_dir
|
||||
))
|
||||
}
|
||||
}
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
Some(json!({
|
||||
"provideFormatter": true
|
||||
}))
|
||||
}
|
||||
|
||||
fn id_for_language(&self, name: &str) -> Option<String> {
|
||||
if name == "JSON" {
|
||||
Some("jsonc".into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
145
crates/zed/src/languages/language_plugin.rs
Normal file
145
crates/zed/src/languages/language_plugin.rs
Normal file
@ -0,0 +1,145 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::lock::Mutex;
|
||||
use gpui::executor::Background;
|
||||
use language::{LanguageServerName, LspAdapter};
|
||||
use plugin_runtime::{Plugin, PluginBuilder, WasiFn};
|
||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub async fn new_json(executor: Arc<Background>) -> Result<PluginLspAdapter> {
|
||||
let plugin = PluginBuilder::new_with_default_ctx()?
|
||||
.host_function_async("command", |command: String| async move {
|
||||
let mut args = command.split(' ');
|
||||
let command = args.next().unwrap();
|
||||
smol::process::Command::new(command)
|
||||
.args(args)
|
||||
.output()
|
||||
.await
|
||||
.log_err()
|
||||
.map(|output| output.stdout)
|
||||
})?
|
||||
.init(
|
||||
true,
|
||||
include_bytes!("../../../../plugins/bin/json_language.wasm.pre"),
|
||||
)
|
||||
.await?;
|
||||
PluginLspAdapter::new(plugin, executor).await
|
||||
}
|
||||
|
||||
pub struct PluginLspAdapter {
|
||||
name: WasiFn<(), String>,
|
||||
server_args: WasiFn<(), Vec<String>>,
|
||||
fetch_latest_server_version: WasiFn<(), Option<String>>,
|
||||
fetch_server_binary: WasiFn<(PathBuf, String), Result<PathBuf, String>>,
|
||||
cached_server_binary: WasiFn<PathBuf, Option<PathBuf>>,
|
||||
initialization_options: WasiFn<(), String>,
|
||||
executor: Arc<Background>,
|
||||
runtime: Arc<Mutex<Plugin>>,
|
||||
}
|
||||
|
||||
impl PluginLspAdapter {
|
||||
pub async fn new(mut plugin: Plugin, executor: Arc<Background>) -> Result<Self> {
|
||||
Ok(Self {
|
||||
name: plugin.function("name")?,
|
||||
server_args: plugin.function("server_args")?,
|
||||
fetch_latest_server_version: plugin.function("fetch_latest_server_version")?,
|
||||
fetch_server_binary: plugin.function("fetch_server_binary")?,
|
||||
cached_server_binary: plugin.function("cached_server_binary")?,
|
||||
initialization_options: plugin.function("initialization_options")?,
|
||||
executor,
|
||||
runtime: Arc::new(Mutex::new(plugin)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LspAdapter for PluginLspAdapter {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
let name: String = self
|
||||
.runtime
|
||||
.lock()
|
||||
.await
|
||||
.call(&self.name, ())
|
||||
.await
|
||||
.unwrap();
|
||||
LanguageServerName(name.into())
|
||||
}
|
||||
|
||||
async fn server_args<'a>(&'a self) -> Vec<String> {
|
||||
self.runtime
|
||||
.lock()
|
||||
.await
|
||||
.call(&self.server_args, ())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: Arc<dyn HttpClient>,
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let runtime = self.runtime.clone();
|
||||
let function = self.fetch_latest_server_version;
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut runtime = runtime.lock().await;
|
||||
let versions: Result<Option<String>> =
|
||||
runtime.call::<_, Option<String>>(&function, ()).await;
|
||||
versions
|
||||
.map_err(|e| anyhow!("{}", e))?
|
||||
.ok_or_else(|| anyhow!("Could not fetch latest server version"))
|
||||
.map(|v| Box::new(v) as Box<_>)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let version = *version.downcast::<String>().unwrap();
|
||||
let runtime = self.runtime.clone();
|
||||
let function = self.fetch_server_binary;
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut runtime = runtime.lock().await;
|
||||
let handle = runtime.attach_path(&container_dir)?;
|
||||
let result: Result<PathBuf, String> =
|
||||
runtime.call(&function, (container_dir, version)).await?;
|
||||
runtime.remove_resource(handle)?;
|
||||
result.map_err(|e| anyhow!("{}", e))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
let runtime = self.runtime.clone();
|
||||
let function = self.cached_server_binary;
|
||||
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut runtime = runtime.lock().await;
|
||||
let handle = runtime.attach_path(&container_dir).ok()?;
|
||||
let result: Option<PathBuf> = runtime.call(&function, container_dir).await.ok()?;
|
||||
runtime.remove_resource(handle).ok()?;
|
||||
result
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
let string: String = self
|
||||
.runtime
|
||||
.lock()
|
||||
.await
|
||||
.call(&self.initialization_options, ())
|
||||
.await
|
||||
.log_err()?;
|
||||
|
||||
serde_json::from_str(&string).ok()
|
||||
}
|
||||
}
|
@ -1,15 +1,12 @@
|
||||
use super::installation::{npm_install_packages, npm_package_latest_version};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use futures::StreamExt;
|
||||
use language::{LanguageServerName, LspAdapter};
|
||||
use smol::fs;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct PythonLspAdapter;
|
||||
|
||||
@ -17,30 +14,30 @@ impl PythonLspAdapter {
|
||||
const BIN_PATH: &'static str = "node_modules/pyright/langserver.index.js";
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LspAdapter for PythonLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("pyright".into())
|
||||
}
|
||||
|
||||
fn server_args(&self) -> &[&str] {
|
||||
&["--stdio"]
|
||||
async fn server_args(&self) -> Vec<String> {
|
||||
vec!["--stdio".into()]
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Any + Send>>> {
|
||||
async move { Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>) }.boxed()
|
||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||
Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>)
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let version = version.downcast::<String>().unwrap();
|
||||
async move {
|
||||
let version_dir = container_dir.join(version.as_str());
|
||||
fs::create_dir_all(&version_dir)
|
||||
.await
|
||||
@ -64,14 +61,9 @@ impl LspAdapter for PythonLspAdapter {
|
||||
|
||||
Ok(binary_path)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
@ -90,12 +82,12 @@ impl LspAdapter for PythonLspAdapter {
|
||||
last_version_dir
|
||||
))
|
||||
}
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn label_for_completion(
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
item: &lsp::CompletionItem,
|
||||
language: &language::Language,
|
||||
@ -116,7 +108,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
fn label_for_symbol(
|
||||
async fn label_for_symbol(
|
||||
&self,
|
||||
name: &str,
|
||||
kind: lsp::SymbolKind,
|
||||
|
@ -1,34 +1,28 @@
|
||||
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, io::BufReader, FutureExt, StreamExt};
|
||||
use futures::{io::BufReader, StreamExt};
|
||||
pub use language::*;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use smol::fs::{self, File};
|
||||
use std::{
|
||||
any::Any,
|
||||
borrow::Cow,
|
||||
env::consts,
|
||||
path::{Path, PathBuf},
|
||||
str,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct RustLspAdapter;
|
||||
|
||||
#[async_trait]
|
||||
impl LspAdapter for RustLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("rust-analyzer".into())
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
http: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||
async move {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?;
|
||||
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
|
||||
let asset = release
|
||||
@ -42,16 +36,13 @@ impl LspAdapter for RustLspAdapter {
|
||||
};
|
||||
Ok(Box::new(version) as Box<_>)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
async move {
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
|
||||
|
||||
@ -83,34 +74,29 @@ impl LspAdapter for RustLspAdapter {
|
||||
|
||||
Ok(destination_path)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
(|| async move {
|
||||
let mut last = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
last = Some(entry?.path());
|
||||
}
|
||||
last.ok_or_else(|| anyhow!("no cached binary"))
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||
&["rustc"]
|
||||
async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
|
||||
vec!["rustc".into()]
|
||||
}
|
||||
|
||||
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||
Some("rustAnalyzer/cargo check")
|
||||
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
|
||||
Some("rustAnalyzer/cargo check".into())
|
||||
}
|
||||
|
||||
fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
|
||||
async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
|
||||
lazy_static! {
|
||||
static ref REGEX: Regex = Regex::new("(?m)`([^`]+)\n`$").unwrap();
|
||||
}
|
||||
@ -130,7 +116,7 @@ impl LspAdapter for RustLspAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
fn label_for_completion(
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
completion: &lsp::CompletionItem,
|
||||
language: &Language,
|
||||
@ -206,7 +192,7 @@ impl LspAdapter for RustLspAdapter {
|
||||
None
|
||||
}
|
||||
|
||||
fn label_for_symbol(
|
||||
async fn label_for_symbol(
|
||||
&self,
|
||||
name: &str,
|
||||
kind: lsp::SymbolKind,
|
||||
@ -269,12 +255,12 @@ impl LspAdapter for RustLspAdapter {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::languages::{language, LspAdapter};
|
||||
use crate::languages::{language, CachedLspAdapter};
|
||||
use gpui::{color::Color, MutableAppContext};
|
||||
use theme::SyntaxTheme;
|
||||
|
||||
#[test]
|
||||
fn test_process_rust_diagnostics() {
|
||||
#[gpui::test]
|
||||
async fn test_process_rust_diagnostics() {
|
||||
let mut params = lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a").unwrap(),
|
||||
version: None,
|
||||
@ -297,7 +283,7 @@ mod tests {
|
||||
},
|
||||
],
|
||||
};
|
||||
RustLspAdapter.process_diagnostics(&mut params);
|
||||
RustLspAdapter.process_diagnostics(&mut params).await;
|
||||
|
||||
assert_eq!(params.diagnostics[0].message, "use of moved value `a`");
|
||||
|
||||
@ -314,12 +300,12 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rust_label_for_completion() {
|
||||
#[gpui::test]
|
||||
async fn test_rust_label_for_completion() {
|
||||
let language = language(
|
||||
"rust",
|
||||
tree_sitter_rust::language(),
|
||||
Some(Arc::new(RustLspAdapter)),
|
||||
Some(CachedLspAdapter::new(RustLspAdapter).await),
|
||||
);
|
||||
let grammar = language.grammar().unwrap();
|
||||
let theme = SyntaxTheme::new(vec![
|
||||
@ -337,12 +323,14 @@ mod tests {
|
||||
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
label: "hello(…)".to_string(),
|
||||
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
|
||||
filter_range: 0..5,
|
||||
@ -358,12 +346,14 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::FIELD),
|
||||
label: "len".to_string(),
|
||||
detail: Some("usize".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "len: usize".to_string(),
|
||||
filter_range: 0..3,
|
||||
@ -372,12 +362,14 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_completion(&lsp::CompletionItem {
|
||||
language
|
||||
.label_for_completion(&lsp::CompletionItem {
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
label: "hello(…)".to_string(),
|
||||
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
|
||||
filter_range: 0..5,
|
||||
@ -393,12 +385,12 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rust_label_for_symbol() {
|
||||
#[gpui::test]
|
||||
async fn test_rust_label_for_symbol() {
|
||||
let language = language(
|
||||
"rust",
|
||||
tree_sitter_rust::language(),
|
||||
Some(Arc::new(RustLspAdapter)),
|
||||
Some(CachedLspAdapter::new(RustLspAdapter).await),
|
||||
);
|
||||
let grammar = language.grammar().unwrap();
|
||||
let theme = SyntaxTheme::new(vec![
|
||||
@ -415,7 +407,9 @@ mod tests {
|
||||
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_symbol("hello", lsp::SymbolKind::FUNCTION),
|
||||
language
|
||||
.label_for_symbol("hello", lsp::SymbolKind::FUNCTION)
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "fn hello".to_string(),
|
||||
filter_range: 3..8,
|
||||
@ -424,7 +418,9 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
language.label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER),
|
||||
language
|
||||
.label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER)
|
||||
.await,
|
||||
Some(CodeLabel {
|
||||
text: "type World".to_string(),
|
||||
filter_range: 5..10,
|
||||
|
@ -1,16 +1,13 @@
|
||||
use super::installation::{npm_install_packages, npm_package_latest_version};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::http::HttpClient;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use futures::StreamExt;
|
||||
use language::{LanguageServerName, LspAdapter};
|
||||
use serde_json::json;
|
||||
use smol::fs;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct TypeScriptLspAdapter;
|
||||
|
||||
@ -23,36 +20,36 @@ struct Versions {
|
||||
server_version: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LspAdapter for TypeScriptLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("typescript-language-server".into())
|
||||
}
|
||||
|
||||
fn server_args(&self) -> &[&str] {
|
||||
&["--stdio", "--tsserver-path", "node_modules/typescript/lib"]
|
||||
async fn server_args(&self) -> Vec<String> {
|
||||
["--stdio", "--tsserver-path", "node_modules/typescript/lib"]
|
||||
.into_iter()
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn fetch_latest_server_version(
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: Arc<dyn HttpClient>,
|
||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||
async move {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
Ok(Box::new(Versions {
|
||||
typescript_version: npm_package_latest_version("typescript").await?,
|
||||
server_version: npm_package_latest_version("typescript-language-server").await?,
|
||||
}) as Box<_>)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn fetch_server_binary(
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
versions: Box<dyn 'static + Send + Any>,
|
||||
_: Arc<dyn HttpClient>,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||
container_dir: PathBuf,
|
||||
) -> Result<PathBuf> {
|
||||
let versions = versions.downcast::<Versions>().unwrap();
|
||||
async move {
|
||||
let version_dir = container_dir.join(&format!(
|
||||
"typescript-{}:server-{}",
|
||||
versions.typescript_version, versions.server_version
|
||||
@ -89,14 +86,9 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
|
||||
Ok(binary_path)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: Arc<Path>,
|
||||
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||
async move {
|
||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
@ -115,12 +107,12 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
last_version_dir
|
||||
))
|
||||
}
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn label_for_completion(
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
item: &lsp::CompletionItem,
|
||||
language: &language::Language,
|
||||
@ -143,7 +135,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
Some(json!({
|
||||
"provideFormatter": true
|
||||
}))
|
||||
|
@ -21,6 +21,7 @@ use futures::{
|
||||
};
|
||||
use gpui::{executor::Background, App, AssetSource, AsyncAppContext, Task};
|
||||
use isahc::{config::Configurable, AsyncBody, Request};
|
||||
use language::LanguageRegistry;
|
||||
use log::LevelFilter;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Fs, ProjectStore};
|
||||
@ -163,7 +164,12 @@ fn main() {
|
||||
|
||||
app.run(move |cx| {
|
||||
let client = client::Client::new(http.clone());
|
||||
let mut languages = languages::build_language_registry(login_shell_env_loaded);
|
||||
let mut languages = LanguageRegistry::new(login_shell_env_loaded);
|
||||
languages.set_language_server_download_dir(zed::ROOT_PATH.clone());
|
||||
let languages = Arc::new(languages);
|
||||
let init_languages = cx
|
||||
.background()
|
||||
.spawn(languages::init(languages.clone(), cx.background().clone()));
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
|
||||
|
||||
context_menu::init(cx);
|
||||
@ -208,17 +214,22 @@ fn main() {
|
||||
})
|
||||
.detach();
|
||||
|
||||
languages.set_language_server_download_dir(zed::ROOT_PATH.clone());
|
||||
let languages = Arc::new(languages);
|
||||
|
||||
cx.observe_global::<Settings, _>({
|
||||
let languages = languages.clone();
|
||||
move |cx| {
|
||||
languages.set_theme(&cx.global::<Settings>().theme.editor.syntax);
|
||||
languages.set_theme(cx.global::<Settings>().theme.clone());
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
cx.set_global(settings);
|
||||
cx.spawn({
|
||||
let languages = languages.clone();
|
||||
|cx| async move {
|
||||
cx.read(|cx| languages.set_theme(cx.global::<Settings>().theme.clone()));
|
||||
init_languages.await;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let project_store = cx.add_model(|_| ProjectStore::new(db.clone()));
|
||||
let app_state = Arc::new(AppState {
|
||||
|
126
plugins/Cargo.lock
generated
Normal file
126
plugins/Cargo.lock
generated
Normal file
@ -0,0 +1,126 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "bincode"
|
||||
version = "1.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
|
||||
|
||||
[[package]]
|
||||
name = "json_language"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"plugin",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "plugin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"plugin_macros",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "plugin_macros"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.137"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.137"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test_plugin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"plugin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee"
|
2
plugins/Cargo.toml
Normal file
2
plugins/Cargo.toml
Normal file
@ -0,0 +1,2 @@
|
||||
[workspace]
|
||||
members = ["./json_language", "./test_plugin"]
|
12
plugins/json_language/Cargo.toml
Normal file
12
plugins/json_language/Cargo.toml
Normal file
@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "json_language"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plugin = { path = "../../crates/plugin" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
103
plugins/json_language/src/lib.rs
Normal file
103
plugins/json_language/src/lib.rs
Normal file
@ -0,0 +1,103 @@
|
||||
use plugin::prelude::*;
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[import]
|
||||
fn command(string: &str) -> Option<Vec<u8>>;
|
||||
|
||||
const BIN_PATH: &'static str =
|
||||
"node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
|
||||
|
||||
#[export]
|
||||
pub fn name() -> &'static str {
|
||||
"vscode-json-languageserver"
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn server_args() -> Vec<String> {
|
||||
vec!["--stdio".into()]
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn fetch_latest_server_version() -> Option<String> {
|
||||
#[derive(Deserialize)]
|
||||
struct NpmInfo {
|
||||
versions: Vec<String>,
|
||||
}
|
||||
|
||||
let output =
|
||||
command("npm info vscode-json-languageserver --json").expect("could not run command");
|
||||
let output = String::from_utf8(output).unwrap();
|
||||
|
||||
let mut info: NpmInfo = serde_json::from_str(&output).ok()?;
|
||||
info.versions.pop()
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn fetch_server_binary(container_dir: PathBuf, version: String) -> Result<PathBuf, String> {
|
||||
let version_dir = container_dir.join(version.as_str());
|
||||
fs::create_dir_all(&version_dir)
|
||||
.map_err(|_| "failed to create version directory".to_string())?;
|
||||
let binary_path = version_dir.join(BIN_PATH);
|
||||
|
||||
if fs::metadata(&binary_path).is_err() {
|
||||
let output = command(&format!(
|
||||
"npm install vscode-json-languageserver@{}",
|
||||
version
|
||||
));
|
||||
let output = output.map(String::from_utf8);
|
||||
if output.is_none() {
|
||||
return Err("failed to install vscode-json-languageserver".to_string());
|
||||
}
|
||||
|
||||
if let Some(mut entries) = fs::read_dir(&container_dir).ok() {
|
||||
while let Some(entry) = entries.next() {
|
||||
if let Some(entry) = entry.ok() {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.as_path() != version_dir {
|
||||
fs::remove_dir_all(&entry_path).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(binary_path)
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn cached_server_binary(container_dir: PathBuf) -> Option<PathBuf> {
|
||||
let mut last_version_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).ok()?;
|
||||
|
||||
while let Some(entry) = entries.next() {
|
||||
let entry = entry.ok()?;
|
||||
if entry.file_type().ok()?.is_dir() {
|
||||
last_version_dir = Some(entry.path());
|
||||
}
|
||||
}
|
||||
|
||||
let last_version_dir = last_version_dir?;
|
||||
let bin_path = last_version_dir.join(BIN_PATH);
|
||||
if bin_path.exists() {
|
||||
Some(bin_path)
|
||||
} else {
|
||||
println!("no binary found");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn initialization_options() -> Option<String> {
|
||||
Some("{ \"provideFormatter\": true }".to_string())
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn id_for_language(name: String) -> Option<String> {
|
||||
if name == "JSON" {
|
||||
Some("jsonc".into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
10
plugins/test_plugin/Cargo.toml
Normal file
10
plugins/test_plugin/Cargo.toml
Normal file
@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "test_plugin"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plugin = { path = "../../crates/plugin" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
82
plugins/test_plugin/src/lib.rs
Normal file
82
plugins/test_plugin/src/lib.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use plugin::prelude::*;
|
||||
|
||||
#[export]
|
||||
pub fn noop() {}
|
||||
|
||||
#[export]
|
||||
pub fn constant() -> u32 {
|
||||
27
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn identity(i: u32) -> u32 {
|
||||
i
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn add(a: u32, b: u32) -> u32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn swap(a: u32, b: u32) -> (u32, u32) {
|
||||
(b, a)
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn sort(mut list: Vec<u32>) -> Vec<u32> {
|
||||
list.sort();
|
||||
list
|
||||
}
|
||||
|
||||
#[export]
|
||||
pub fn print(string: String) {
|
||||
println!("to stdout: {}", string);
|
||||
eprintln!("to stderr: {}", string);
|
||||
}
|
||||
|
||||
#[import]
|
||||
fn mystery_number(input: u32) -> u32;
|
||||
|
||||
#[export]
|
||||
pub fn and_back(secret: u32) -> u32 {
|
||||
mystery_number(secret)
|
||||
}
|
||||
|
||||
#[import]
|
||||
fn import_noop() -> ();
|
||||
|
||||
#[import]
|
||||
fn import_identity(i: u32) -> u32;
|
||||
|
||||
#[import]
|
||||
fn import_swap(a: u32, b: u32) -> (u32, u32);
|
||||
|
||||
#[export]
|
||||
pub fn imports(x: u32) -> u32 {
|
||||
let a = import_identity(7);
|
||||
import_noop();
|
||||
let (b, c) = import_swap(a, x);
|
||||
assert_eq!(a, c);
|
||||
assert_eq!(x, b);
|
||||
a + b // should be 7 + x
|
||||
}
|
||||
|
||||
#[import]
|
||||
fn import_half(a: u32) -> u32;
|
||||
|
||||
#[export]
|
||||
pub fn half_async(a: u32) -> u32 {
|
||||
import_half(a)
|
||||
}
|
||||
|
||||
#[import]
|
||||
fn command_async(command: String) -> Option<Vec<u8>>;
|
||||
|
||||
#[export]
|
||||
pub fn echo_async(message: String) -> String {
|
||||
let command = format!("echo {}", message);
|
||||
let result = command_async(command);
|
||||
let result = result.expect("Could not run command");
|
||||
String::from_utf8_lossy(&result).to_string()
|
||||
}
|
@ -6,6 +6,7 @@ export ZED_BUNDLE=true
|
||||
|
||||
echo "Installing cargo bundle"
|
||||
cargo install cargo-bundle --version 0.5.0
|
||||
rustup target add wasm32-wasi
|
||||
|
||||
# Deal with versions of macOS that don't include libstdc++ headers
|
||||
export CXXFLAGS="-stdlib=libc++"
|
||||
|
Loading…
Reference in New Issue
Block a user