Allow wasm extensions to do arbitrary file I/O in their own directory to install language servers (#9043)

This PR provides WASM extensions with write access to their own specific
working directory under the Zed `extensions` dir. This directory is set
as the extensions `current_dir` when they run. Extensions can return
relative paths from the `Extension::language_server_command` method, and
those relative paths will be interpreted relative to this working dir.

With this functionality, most language servers that we currently build
into zed can be installed using extensions.

Release Notes:

- N/A
This commit is contained in:
Max Brunsfeld 2024-03-08 08:49:27 -08:00 committed by GitHub
parent a550b9cecf
commit 51ebe0eb01
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 421 additions and 215 deletions

View File

@ -86,12 +86,6 @@ jobs:
clean: false
submodules: "recursive"
- name: Install cargo-component
run: |
if ! which cargo-component > /dev/null; then
cargo install cargo-component
fi
- name: cargo clippy
run: cargo xtask clippy

4
Cargo.lock generated
View File

@ -3536,7 +3536,10 @@ dependencies = [
"async-compression",
"async-tar",
"async-trait",
"cap-std",
"collections",
"ctor",
"env_logger",
"fs",
"futures 0.3.28",
"gpui",
@ -3544,6 +3547,7 @@ dependencies = [
"log",
"lsp",
"node_runtime",
"parking_lot 0.11.2",
"project",
"schemars",
"serde",

View File

@ -201,6 +201,7 @@ bitflags = "2.4.2"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "43721bf42d298b7cbee2195ee66f73a5f1c7b2fc" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "43721bf42d298b7cbee2195ee66f73a5f1c7b2fc" }
blade-rwh = { package = "raw-window-handle", version = "0.5" }
cap-std = "2.0"
chrono = { version = "0.4", features = ["serde"] }
clap = "4.4"
clickhouse = { version = "0.11.6" }

View File

@ -20,6 +20,7 @@ anyhow.workspace = true
async-compression.workspace = true
async-tar.workspace = true
async-trait.workspace = true
cap-std.workspace = true
collections.workspace = true
fs.workspace = true
futures.workspace = true
@ -42,6 +43,10 @@ wasmparser.workspace = true
wit-component.workspace = true
[dev-dependencies]
ctor.workspace = true
env_logger.workspace = true
parking_lot.workspace = true
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }

View File

@ -317,7 +317,10 @@ impl ExtensionBuilder {
fs::remove_file(&cache_path).ok();
log::info!("downloading wasi adapter module");
log::info!(
"downloading wasi adapter module to {}",
cache_path.display()
);
let mut response = self
.http
.get(WASI_ADAPTER_URL, AsyncBody::default(), true)
@ -357,6 +360,7 @@ impl ExtensionBuilder {
fs::remove_dir_all(&wasi_sdk_dir).ok();
fs::remove_dir_all(&tar_out_dir).ok();
log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display());
let mut response = self.http.get(&url, AsyncBody::default(), true).await?;
let body = BufReader::new(response.body_mut());
let body = GzipDecoder::new(body);

View File

@ -1,4 +1,4 @@
use crate::wasm_host::{wit::LanguageServerConfig, WasmExtension};
use crate::wasm_host::{wit::LanguageServerConfig, WasmExtension, WasmHost};
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::{Future, FutureExt};
@ -16,7 +16,7 @@ use wasmtime_wasi::preview2::WasiView as _;
pub struct ExtensionLspAdapter {
pub(crate) extension: WasmExtension,
pub(crate) config: LanguageServerConfig,
pub(crate) work_dir: PathBuf,
pub(crate) host: Arc<WasmHost>,
}
#[async_trait]
@ -41,18 +41,23 @@ impl LspAdapter for ExtensionLspAdapter {
|extension, store| {
async move {
let resource = store.data_mut().table().push(delegate)?;
extension
let command = extension
.call_language_server_command(store, &this.config, resource)
.await
.await?
.map_err(|e| anyhow!("{}", e))?;
anyhow::Ok(command)
}
.boxed()
}
})
.await?
.map_err(|e| anyhow!("{}", e))?;
.await?;
let path = self
.host
.path_from_extension(&self.extension.manifest.id, command.command.as_ref());
Ok(LanguageServerBinary {
path: self.work_dir.join(&command.command),
path,
arguments: command.args.into_iter().map(|arg| arg.into()).collect(),
env: Some(command.env.into_iter().collect()),
})

View File

@ -100,6 +100,7 @@ enum ExtensionOperation {
#[derive(Copy, Clone)]
pub enum Event {
ExtensionsUpdated,
StartedReloading,
}
impl EventEmitter<Event> for ExtensionStore {}
@ -148,6 +149,7 @@ pub fn init(
let store = cx.new_model(move |cx| {
ExtensionStore::new(
EXTENSIONS_DIR.clone(),
None,
fs,
http_client,
node_runtime,
@ -159,7 +161,7 @@ pub fn init(
cx.on_action(|_: &ReloadExtensions, cx| {
let store = cx.global::<GlobalExtensionStore>().0.clone();
store.update(cx, |store, _| drop(store.reload(None)));
store.update(cx, |store, cx| drop(store.reload(None, cx)));
});
cx.set_global(GlobalExtensionStore(store));
@ -170,8 +172,10 @@ impl ExtensionStore {
cx.global::<GlobalExtensionStore>().0.clone()
}
#[allow(clippy::too_many_arguments)]
pub fn new(
extensions_dir: PathBuf,
build_dir: Option<PathBuf>,
fs: Arc<dyn Fs>,
http_client: Arc<HttpClientWithUrl>,
node_runtime: Arc<dyn NodeRuntime>,
@ -180,7 +184,7 @@ impl ExtensionStore {
cx: &mut ModelContext<Self>,
) -> Self {
let work_dir = extensions_dir.join("work");
let build_dir = extensions_dir.join("build");
let build_dir = build_dir.unwrap_or_else(|| extensions_dir.join("build"));
let installed_dir = extensions_dir.join("installed");
let index_path = extensions_dir.join("index.json");
@ -226,7 +230,7 @@ impl ExtensionStore {
// it must be asynchronously rebuilt.
let mut extension_index = ExtensionIndex::default();
let mut extension_index_needs_rebuild = true;
if let Some(index_content) = index_content.log_err() {
if let Some(index_content) = index_content.ok() {
if let Some(index) = serde_json::from_str(&index_content).log_err() {
extension_index = index;
if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) =
@ -243,7 +247,7 @@ impl ExtensionStore {
// index needs to be rebuild, then enqueue
let load_initial_extensions = this.extensions_updated(extension_index, cx);
if extension_index_needs_rebuild {
let _ = this.reload(None);
let _ = this.reload(None, cx);
}
// Perform all extension loading in a single task to ensure that we
@ -255,7 +259,7 @@ impl ExtensionStore {
let mut debounce_timer = cx
.background_executor()
.timer(RELOAD_DEBOUNCE_DURATION)
.spawn(futures::future::pending())
.fuse();
loop {
select_biased! {
@ -271,7 +275,8 @@ impl ExtensionStore {
this.update(&mut cx, |this, _| {
this.modified_extensions.extend(extension_id);
})?;
debounce_timer = cx.background_executor()
debounce_timer = cx
.background_executor()
.timer(RELOAD_DEBOUNCE_DURATION)
.fuse();
}
@ -313,12 +318,17 @@ impl ExtensionStore {
this
}
fn reload(&mut self, modified_extension: Option<Arc<str>>) -> impl Future<Output = ()> {
fn reload(
&mut self,
modified_extension: Option<Arc<str>>,
cx: &mut ModelContext<Self>,
) -> impl Future<Output = ()> {
let (tx, rx) = oneshot::channel();
self.reload_complete_senders.push(tx);
self.reload_tx
.unbounded_send(modified_extension)
.expect("reload task exited");
cx.emit(Event::StartedReloading);
async move {
rx.await.ok();
}
@ -444,7 +454,7 @@ impl ExtensionStore {
archive
.unpack(extensions_dir.join(extension_id.as_ref()))
.await?;
this.update(&mut cx, |this, _| this.reload(Some(extension_id)))?
this.update(&mut cx, |this, cx| this.reload(Some(extension_id), cx))?
.await;
anyhow::Ok(())
})
@ -483,7 +493,8 @@ impl ExtensionStore {
)
.await?;
this.update(&mut cx, |this, _| this.reload(None))?.await;
this.update(&mut cx, |this, cx| this.reload(None, cx))?
.await;
anyhow::Ok(())
})
.detach_and_log_err(cx)
@ -493,7 +504,7 @@ impl ExtensionStore {
&mut self,
extension_source_path: PathBuf,
cx: &mut ModelContext<Self>,
) {
) -> Task<Result<()>> {
let extensions_dir = self.extensions_dir();
let fs = self.fs.clone();
let builder = self.builder.clone();
@ -560,11 +571,10 @@ impl ExtensionStore {
fs.create_symlink(output_path, extension_source_path)
.await?;
this.update(&mut cx, |this, _| this.reload(Some(extension_id)))?
this.update(&mut cx, |this, cx| this.reload(None, cx))?
.await;
Ok(())
})
.detach_and_log_err(cx)
}
pub fn rebuild_dev_extension(&mut self, extension_id: Arc<str>, cx: &mut ModelContext<Self>) {
@ -592,7 +602,7 @@ impl ExtensionStore {
})?;
if result.is_ok() {
this.update(&mut cx, |this, _| this.reload(Some(extension_id)))?
this.update(&mut cx, |this, cx| this.reload(Some(extension_id), cx))?
.await;
}
@ -664,9 +674,9 @@ impl ExtensionStore {
log::info!(
"extensions updated. loading {}, reloading {}, unloading {}",
extensions_to_unload.len() - reload_count,
extensions_to_load.len() - reload_count,
reload_count,
extensions_to_load.len() - reload_count
extensions_to_unload.len() - reload_count
);
let themes_to_remove = old_index
@ -839,7 +849,7 @@ impl ExtensionStore {
language_server_config.language.clone(),
Arc::new(ExtensionLspAdapter {
extension: wasm_extension.clone(),
work_dir: this.wasm_host.work_dir.join(manifest.id.as_ref()),
host: this.wasm_host.clone(),
config: wit::LanguageServerConfig {
name: language_server_name.0.to_string(),
language_name: language_server_config.language.to_string(),

View File

@ -1,11 +1,10 @@
use crate::{
build_extension::{CompileExtensionOptions, ExtensionBuilder},
ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry, ExtensionIndexThemeEntry,
ExtensionManifest, ExtensionStore, GrammarManifestEntry, RELOAD_DEBOUNCE_DURATION,
};
use async_compression::futures::bufread::GzipEncoder;
use collections::BTreeMap;
use fs::{FakeFs, Fs};
use fs::{FakeFs, Fs, RealFs};
use futures::{io::BufReader, AsyncReadExt, StreamExt};
use gpui::{Context, TestAppContext};
use language::{
@ -13,6 +12,7 @@ use language::{
LanguageServerName,
};
use node_runtime::FakeNodeRuntime;
use parking_lot::Mutex;
use project::Project;
use serde_json::json;
use settings::SettingsStore;
@ -22,7 +22,18 @@ use std::{
sync::Arc,
};
use theme::ThemeRegistry;
use util::http::{self, FakeHttpClient, Response};
use util::{
http::{FakeHttpClient, Response},
test::temp_tree,
};
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}
#[gpui::test]
async fn test_extension_store(cx: &mut TestAppContext) {
@ -248,6 +259,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
let store = cx.new_model(|cx| {
ExtensionStore::new(
PathBuf::from("/the-extension-dir"),
None,
fs.clone(),
http_client.clone(),
node_runtime.clone(),
@ -335,7 +347,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
},
);
let _ = store.update(cx, |store, _| store.reload(None));
let _ = store.update(cx, |store, cx| store.reload(None, cx));
cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION);
store.read_with(cx, |store, _| {
@ -365,6 +377,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
let store = cx.new_model(|cx| {
ExtensionStore::new(
PathBuf::from("/the-extension-dir"),
None,
fs.clone(),
http_client.clone(),
node_runtime.clone(),
@ -422,6 +435,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let root_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
@ -431,32 +445,19 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
let cache_dir = root_dir.join("target");
let gleam_extension_dir = root_dir.join("extensions").join("gleam");
cx.executor().allow_parking();
ExtensionBuilder::new(cache_dir, http::client())
.compile_extension(
&gleam_extension_dir,
CompileExtensionOptions { release: false },
)
.await
.unwrap();
cx.executor().forbid_parking();
let fs = FakeFs::new(cx.executor());
fs.insert_tree("/the-extension-dir", json!({ "installed": {} }))
.await;
fs.insert_tree_from_real_fs("/the-extension-dir/installed/gleam", gleam_extension_dir)
.await;
fs.insert_tree(
"/the-project-dir",
json!({
".tool-versions": "rust 1.73.0",
let fs = Arc::new(RealFs);
let extensions_dir = temp_tree(json!({
"installed": {},
"work": {}
}));
let project_dir = temp_tree(json!({
"test.gleam": ""
}),
)
.await;
}));
let project = Project::test(fs.clone(), ["/the-project-dir".as_ref()], cx).await;
let extensions_dir = extensions_dir.path().canonicalize().unwrap();
let project_dir = project_dir.path().canonicalize().unwrap();
let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
let theme_registry = Arc::new(ThemeRegistry::new(Box::new(())));
@ -464,55 +465,76 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
let mut status_updates = language_registry.language_server_binary_statuses();
struct FakeLanguageServerVersion {
version: String,
binary_contents: String,
http_request_count: usize,
}
let language_server_version = Arc::new(Mutex::new(FakeLanguageServerVersion {
version: "v1.2.3".into(),
binary_contents: "the-binary-contents".into(),
http_request_count: 0,
}));
let http_client = FakeHttpClient::create({
move |request| async move {
match request.uri().to_string().as_str() {
"https://api.github.com/repos/gleam-lang/gleam/releases" => Ok(Response::new(
let language_server_version = language_server_version.clone();
move |request| {
let language_server_version = language_server_version.clone();
async move {
language_server_version.lock().http_request_count += 1;
let version = language_server_version.lock().version.clone();
let binary_contents = language_server_version.lock().binary_contents.clone();
let github_releases_uri = "https://api.github.com/repos/gleam-lang/gleam/releases";
let asset_download_uri =
format!("https://fake-download.example.com/gleam-{version}");
let uri = request.uri().to_string();
if uri == github_releases_uri {
Ok(Response::new(
json!([
{
"tag_name": "v1.2.3",
"tag_name": version,
"prerelease": false,
"tarball_url": "",
"zipball_url": "",
"assets": [
{
"name": "gleam-v1.2.3-aarch64-apple-darwin.tar.gz",
"browser_download_url": "http://example.com/the-download"
"name": format!("gleam-{version}-aarch64-apple-darwin.tar.gz"),
"browser_download_url": asset_download_uri
}
]
}
])
.to_string()
.into(),
)),
"http://example.com/the-download" => {
))
} else if uri == asset_download_uri {
let mut bytes = Vec::<u8>::new();
let mut archive = async_tar::Builder::new(&mut bytes);
let mut header = async_tar::Header::new_gnu();
let content = "the-gleam-binary-contents".as_bytes();
header.set_size(content.len() as u64);
header.set_size(binary_contents.len() as u64);
archive
.append_data(&mut header, "gleam", content)
.append_data(&mut header, "gleam", binary_contents.as_bytes())
.await
.unwrap();
archive.into_inner().await.unwrap();
let mut gzipped_bytes = Vec::new();
let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice()));
encoder.read_to_end(&mut gzipped_bytes).await.unwrap();
Ok(Response::new(gzipped_bytes.into()))
} else {
Ok(Response::builder().status(404).body("not found".into())?)
}
_ => Ok(Response::builder().status(404).body("not found".into())?),
}
}
});
let _store = cx.new_model(|cx| {
let extension_store = cx.new_model(|cx| {
ExtensionStore::new(
PathBuf::from("/the-extension-dir"),
extensions_dir.clone(),
Some(cache_dir),
fs.clone(),
http_client.clone(),
node_runtime,
@ -522,17 +544,35 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
)
});
cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION);
// Ensure that debounces fire.
let mut events = cx.events(&extension_store);
let executor = cx.executor();
let _task = cx.executor().spawn(async move {
while let Some(event) = events.next().await {
match event {
crate::Event::StartedReloading => {
executor.advance_clock(RELOAD_DEBOUNCE_DURATION);
}
_ => (),
}
}
});
extension_store
.update(cx, |store, cx| {
store.install_dev_extension(gleam_extension_dir.clone(), cx)
})
.await
.unwrap();
let mut fake_servers = language_registry.fake_language_servers("Gleam");
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/the-project-dir/test.gleam", cx)
project.open_local_buffer(project_dir.join("test.gleam"), cx)
})
.await
.unwrap();
project.update(cx, |project, cx| {
project.set_language_for_buffer(
&buffer,
@ -548,20 +588,16 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
});
let fake_server = fake_servers.next().await.unwrap();
let expected_server_path = extensions_dir.join("work/gleam/gleam-v1.2.3/gleam");
let expected_binary_contents = language_server_version.lock().binary_contents.clone();
assert_eq!(
fs.load("/the-extension-dir/work/gleam/gleam-v1.2.3/gleam".as_ref())
.await
.unwrap(),
"the-gleam-binary-contents"
);
assert_eq!(
fake_server.binary.path,
PathBuf::from("/the-extension-dir/work/gleam/gleam-v1.2.3/gleam")
);
assert_eq!(fake_server.binary.path, expected_server_path);
assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]);
assert_eq!(
fs.load(&expected_server_path).await.unwrap(),
expected_binary_contents
);
assert_eq!(language_server_version.lock().http_request_count, 2);
assert_eq!(
[
status_updates.next().await.unwrap(),
@ -583,6 +619,51 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
)
]
);
// Simulate a new version of the language server being released
language_server_version.lock().version = "v2.0.0".into();
language_server_version.lock().binary_contents = "the-new-binary-contents".into();
language_server_version.lock().http_request_count = 0;
// Start a new instance of the language server.
project.update(cx, |project, cx| {
project.restart_language_servers_for_buffers([buffer.clone()], cx)
});
// The extension has cached the binary path, and does not attempt
// to reinstall it.
let fake_server = fake_servers.next().await.unwrap();
assert_eq!(fake_server.binary.path, expected_server_path);
assert_eq!(
fs.load(&expected_server_path).await.unwrap(),
expected_binary_contents
);
assert_eq!(language_server_version.lock().http_request_count, 0);
// Reload the extension, clearing its cache.
// Start a new instance of the language server.
extension_store
.update(cx, |store, cx| store.reload(Some("gleam".into()), cx))
.await;
cx.executor().run_until_parked();
project.update(cx, |project, cx| {
project.restart_language_servers_for_buffers([buffer.clone()], cx)
});
// The extension re-fetches the latest version of the language server.
let fake_server = fake_servers.next().await.unwrap();
let new_expected_server_path = extensions_dir.join("work/gleam/gleam-v2.0.0/gleam");
let expected_binary_contents = language_server_version.lock().binary_contents.clone();
assert_eq!(fake_server.binary.path, new_expected_server_path);
assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]);
assert_eq!(
fs.load(&new_expected_server_path).await.unwrap(),
expected_binary_contents
);
// The old language server directory has been cleaned up.
assert!(fs.metadata(&expected_server_path).await.unwrap().is_none());
}
fn init_test(cx: &mut TestAppContext) {

View File

@ -5,7 +5,10 @@ use async_tar::Archive;
use async_trait::async_trait;
use fs::Fs;
use futures::{
channel::{mpsc::UnboundedSender, oneshot},
channel::{
mpsc::{self, UnboundedSender},
oneshot,
},
future::BoxFuture,
io::BufReader,
Future, FutureExt, StreamExt as _,
@ -14,7 +17,8 @@ use gpui::BackgroundExecutor;
use language::{LanguageRegistry, LanguageServerBinaryStatus, LspAdapterDelegate};
use node_runtime::NodeRuntime;
use std::{
path::PathBuf,
env,
path::{Path, PathBuf},
sync::{Arc, OnceLock},
};
use util::{http::HttpClient, SemanticVersion};
@ -22,7 +26,7 @@ use wasmtime::{
component::{Component, Linker, Resource, ResourceTable},
Engine, Store,
};
use wasmtime_wasi::preview2::{command as wasi_command, WasiCtx, WasiCtxBuilder, WasiView};
use wasmtime_wasi::preview2::{self as wasi, WasiCtx};
pub mod wit {
wasmtime::component::bindgen!({
@ -49,6 +53,7 @@ pub(crate) struct WasmHost {
#[derive(Clone)]
pub struct WasmExtension {
tx: UnboundedSender<ExtensionCall>,
pub(crate) manifest: Arc<ExtensionManifest>,
#[allow(unused)]
zed_api_version: SemanticVersion,
}
@ -56,7 +61,7 @@ pub struct WasmExtension {
pub(crate) struct WasmState {
manifest: Arc<ExtensionManifest>,
table: ResourceTable,
ctx: WasiCtx,
ctx: wasi::WasiCtx,
host: Arc<WasmHost>,
}
@ -67,6 +72,8 @@ type ExtensionCall = Box<
static WASM_ENGINE: OnceLock<wasmtime::Engine> = OnceLock::new();
const EXTENSION_WORK_DIR_PATH: &str = "/zed/work";
impl WasmHost {
pub fn new(
fs: Arc<dyn Fs>,
@ -84,8 +91,8 @@ impl WasmHost {
})
.clone();
let mut linker = Linker::new(&engine);
wasi_command::add_to_linker(&mut linker).unwrap();
wit::Extension::add_to_linker(&mut linker, |state: &mut WasmState| state).unwrap();
wasi::command::add_to_linker(&mut linker).unwrap();
wit::Extension::add_to_linker(&mut linker, wasi_view).unwrap();
Arc::new(Self {
engine,
linker: Arc::new(linker),
@ -112,22 +119,14 @@ impl WasmHost {
for part in wasmparser::Parser::new(0).parse_all(&wasm_bytes) {
if let wasmparser::Payload::CustomSection(s) = part? {
if s.name() == "zed:api-version" {
if s.data().len() != 6 {
zed_api_version = parse_extension_version(s.data());
if zed_api_version.is_none() {
bail!(
"extension {} has invalid zed:api-version section: {:?}",
manifest.id,
s.data()
);
}
let major = u16::from_be_bytes(s.data()[0..2].try_into().unwrap()) as _;
let minor = u16::from_be_bytes(s.data()[2..4].try_into().unwrap()) as _;
let patch = u16::from_be_bytes(s.data()[4..6].try_into().unwrap()) as _;
zed_api_version = Some(SemanticVersion {
major,
minor,
patch,
})
}
}
}
@ -139,36 +138,95 @@ impl WasmHost {
let mut store = wasmtime::Store::new(
&this.engine,
WasmState {
manifest,
ctx: this.build_wasi_ctx(&manifest).await?,
manifest: manifest.clone(),
table: ResourceTable::new(),
ctx: WasiCtxBuilder::new()
.inherit_stdio()
.env("RUST_BACKTRACE", "1")
.build(),
host: this.clone(),
},
);
let (mut extension, instance) =
wit::Extension::instantiate_async(&mut store, &component, &this.linker)
.await
.context("failed to instantiate wasm component")?;
let (tx, mut rx) = futures::channel::mpsc::unbounded::<ExtensionCall>();
.context("failed to instantiate wasm extension")?;
extension
.call_init_extension(&mut store)
.await
.context("failed to initialize wasm extension")?;
let (tx, mut rx) = mpsc::unbounded::<ExtensionCall>();
executor
.spawn(async move {
extension.call_init_extension(&mut store).await.unwrap();
let _instance = instance;
while let Some(call) = rx.next().await {
(call)(&mut extension, &mut store).await;
}
})
.detach();
Ok(WasmExtension {
manifest,
tx,
zed_api_version,
})
}
}
async fn build_wasi_ctx(&self, manifest: &Arc<ExtensionManifest>) -> Result<WasiCtx> {
use cap_std::{ambient_authority, fs::Dir};
let extension_work_dir = self.work_dir.join(manifest.id.as_ref());
self.fs
.create_dir(&extension_work_dir)
.await
.context("failed to create extension work dir")?;
let work_dir_preopen = Dir::open_ambient_dir(extension_work_dir, ambient_authority())
.context("failed to preopen extension work directory")?;
let current_dir_preopen = work_dir_preopen
.try_clone()
.context("failed to preopen extension current directory")?;
let perms = wasi::FilePerms::all();
let dir_perms = wasi::DirPerms::all();
Ok(wasi::WasiCtxBuilder::new()
.inherit_stdio()
.preopened_dir(current_dir_preopen, dir_perms, perms, ".")
.preopened_dir(work_dir_preopen, dir_perms, perms, EXTENSION_WORK_DIR_PATH)
.env("PWD", EXTENSION_WORK_DIR_PATH)
.env("RUST_BACKTRACE", "1")
.build())
}
pub fn path_from_extension(&self, id: &Arc<str>, path: &Path) -> PathBuf {
self.writeable_path_from_extension(id, path)
.unwrap_or_else(|| path.to_path_buf())
}
pub fn writeable_path_from_extension(&self, id: &Arc<str>, path: &Path) -> Option<PathBuf> {
let path = path.strip_prefix(EXTENSION_WORK_DIR_PATH).unwrap_or(path);
if path.is_relative() {
let mut result = self.work_dir.clone();
result.push(id.as_ref());
result.extend(path);
Some(result)
} else {
None
}
}
}
fn parse_extension_version(data: &[u8]) -> Option<SemanticVersion> {
if data.len() == 6 {
Some(SemanticVersion {
major: u16::from_be_bytes([data[0], data[1]]) as _,
minor: u16::from_be_bytes([data[2], data[3]]) as _,
patch: u16::from_be_bytes([data[4], data[5]]) as _,
})
} else {
None
}
}
impl WasmExtension {
@ -194,6 +252,13 @@ impl WasmExtension {
}
}
impl WasmState {
pub fn writeable_path_from_extension(&self, path: &Path) -> Option<PathBuf> {
self.host
.writeable_path_from_extension(&self.manifest.id, path)
}
}
#[async_trait]
impl wit::HostWorktree for WasmState {
async fn read_text_file(
@ -201,7 +266,7 @@ impl wit::HostWorktree for WasmState {
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
path: String,
) -> wasmtime::Result<Result<String, String>> {
let delegate = self.table().get(&delegate)?;
let delegate = self.table.get(&delegate)?;
Ok(delegate
.read_text_file(path.into())
.await
@ -269,13 +334,13 @@ impl wit::ExtensionImports for WasmState {
async fn current_platform(&mut self) -> Result<(wit::Os, wit::Architecture)> {
Ok((
match std::env::consts::OS {
match env::consts::OS {
"macos" => wit::Os::Mac,
"linux" => wit::Os::Linux,
"windows" => wit::Os::Windows,
_ => panic!("unsupported os"),
},
match std::env::consts::ARCH {
match env::consts::ARCH {
"aarch64" => wit::Architecture::Aarch64,
"x86" => wit::Architecture::X86,
"x86_64" => wit::Architecture::X8664,
@ -314,18 +379,24 @@ impl wit::ExtensionImports for WasmState {
async fn download_file(
&mut self,
url: String,
filename: String,
path: String,
file_type: wit::DownloadedFileType,
) -> wasmtime::Result<Result<(), String>> {
let path = PathBuf::from(path);
async fn inner(
this: &mut WasmState,
url: String,
filename: String,
path: PathBuf,
file_type: wit::DownloadedFileType,
) -> anyhow::Result<()> {
this.host.fs.create_dir(&this.host.work_dir).await?;
let container_dir = this.host.work_dir.join(this.manifest.id.as_ref());
let destination_path = container_dir.join(&filename);
let extension_work_dir = this.host.work_dir.join(this.manifest.id.as_ref());
this.host.fs.create_dir(&extension_work_dir).await?;
let destination_path = this
.writeable_path_from_extension(&path)
.ok_or_else(|| anyhow!("cannot write to path {:?}", path))?;
let mut response = this
.host
@ -367,19 +438,24 @@ impl wit::ExtensionImports for WasmState {
.await?;
}
wit::DownloadedFileType::Zip => {
let zip_filename = format!("{filename}.zip");
let file_name = destination_path
.file_name()
.ok_or_else(|| anyhow!("invalid download path"))?
.to_string_lossy();
let zip_filename = format!("{file_name}.zip");
let mut zip_path = destination_path.clone();
zip_path.set_file_name(zip_filename);
futures::pin_mut!(body);
this.host.fs.create_file_with(&zip_path, body).await?;
let unzip_status = std::process::Command::new("unzip")
.current_dir(&container_dir)
.current_dir(&extension_work_dir)
.arg(&zip_path)
.output()?
.status;
if !unzip_status.success() {
Err(anyhow!("failed to unzip {filename} archive"))?;
Err(anyhow!("failed to unzip {} archive", path.display()))?;
}
}
}
@ -387,19 +463,23 @@ impl wit::ExtensionImports for WasmState {
Ok(())
}
Ok(inner(self, url, filename, file_type)
Ok(inner(self, url, path, file_type)
.await
.map(|_| ())
.map_err(|err| err.to_string()))
}
}
impl WasiView for WasmState {
fn wasi_view(state: &mut WasmState) -> &mut WasmState {
state
}
impl wasi::WasiView for WasmState {
fn table(&mut self) -> &mut ResourceTable {
&mut self.table
}
fn ctx(&mut self) -> &mut WasiCtx {
fn ctx(&mut self) -> &mut wasi::WasiCtx {
&mut self.ctx
}
}

View File

@ -20,6 +20,7 @@ macro_rules! register_extension {
($extension_type:ty) => {
#[export_name = "init-extension"]
pub extern "C" fn __init_extension() {
std::env::set_current_dir(std::env::var("PWD").unwrap()).unwrap();
zed_extension_api::register_extension(|| {
Box::new(<$extension_type as zed_extension_api::Extension>::new())
});

View File

@ -45,7 +45,9 @@ pub fn init(cx: &mut AppContext) {
let extension_path = prompt.await.log_err()??.pop()?;
store
.update(&mut cx, |store, cx| {
store.install_dev_extension(extension_path, cx);
store
.install_dev_extension(extension_path, cx)
.detach_and_log_err(cx)
})
.ok()?;
Some(())
@ -93,9 +95,8 @@ impl ExtensionsPage {
let subscriptions = [
cx.observe(&store, |_, _, cx| cx.notify()),
cx.subscribe(&store, |this, _, event, cx| match event {
extension::Event::ExtensionsUpdated => {
this.fetch_extensions_debounced(cx);
}
extension::Event::ExtensionsUpdated => this.fetch_extensions_debounced(cx),
_ => {}
}),
];

View File

@ -9385,7 +9385,7 @@ impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
}
struct ProjectLspAdapterDelegate {
project: Model<Project>,
project: WeakModel<Project>,
worktree: worktree::Snapshot,
fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>,
@ -9395,7 +9395,7 @@ struct ProjectLspAdapterDelegate {
impl ProjectLspAdapterDelegate {
fn new(project: &Project, worktree: &Model<Worktree>, cx: &ModelContext<Project>) -> Arc<Self> {
Arc::new(Self {
project: cx.handle(),
project: cx.weak_model(),
worktree: worktree.read(cx).snapshot(),
fs: project.fs.clone(),
http_client: project.client.http_client(),
@ -9408,7 +9408,8 @@ impl ProjectLspAdapterDelegate {
impl LspAdapterDelegate for ProjectLspAdapterDelegate {
fn show_notification(&self, message: &str, cx: &mut AppContext) {
self.project
.update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
.update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())))
.ok();
}
fn http_client(&self) -> Arc<dyn HttpClient> {

View File

@ -1,29 +1,22 @@
use std::fs;
use zed_extension_api::{self as zed, Result};
struct GleamExtension {
cached_binary_path: Option<String>,
}
impl zed::Extension for GleamExtension {
fn new() -> Self {
Self {
cached_binary_path: None,
}
}
fn language_server_command(
&mut self,
config: zed::LanguageServerConfig,
_worktree: &zed::Worktree,
) -> Result<zed::Command> {
let binary_path = if let Some(path) = &self.cached_binary_path {
impl GleamExtension {
fn language_server_binary_path(&mut self, config: zed::LanguageServerConfig) -> Result<String> {
if let Some(path) = &self.cached_binary_path {
if fs::metadata(path).map_or(false, |stat| stat.is_file()) {
zed::set_language_server_installation_status(
&config.name,
&zed::LanguageServerInstallationStatus::Cached,
);
return Ok(path.clone());
}
}
path.clone()
} else {
zed::set_language_server_installation_status(
&config.name,
&zed::LanguageServerInstallationStatus::CheckingForUpdate,
@ -58,11 +51,15 @@ impl zed::Extension for GleamExtension {
.find(|asset| asset.name == asset_name)
.ok_or_else(|| format!("no asset found matching {:?}", asset_name))?;
let version_dir = format!("gleam-{}", release.version);
let binary_path = format!("{version_dir}/gleam");
if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) {
zed::set_language_server_installation_status(
&config.name,
&zed::LanguageServerInstallationStatus::Downloading,
);
let version_dir = format!("gleam-{}", release.version);
zed::download_file(
&asset.download_url,
&version_dir,
@ -70,18 +67,40 @@ impl zed::Extension for GleamExtension {
)
.map_err(|e| format!("failed to download file: {e}"))?;
let entries =
fs::read_dir(".").map_err(|e| format!("failed to list working directory {e}"))?;
for entry in entries {
let entry = entry.map_err(|e| format!("failed to load directory entry {e}"))?;
if entry.file_name().to_str() != Some(&version_dir) {
fs::remove_dir_all(&entry.path()).ok();
}
}
zed::set_language_server_installation_status(
&config.name,
&zed::LanguageServerInstallationStatus::Downloaded,
);
}
let binary_path = format!("{version_dir}/gleam");
self.cached_binary_path = Some(binary_path.clone());
binary_path
};
Ok(binary_path)
}
}
impl zed::Extension for GleamExtension {
fn new() -> Self {
Self {
cached_binary_path: None,
}
}
fn language_server_command(
&mut self,
config: zed::LanguageServerConfig,
_worktree: &zed::Worktree,
) -> Result<zed::Command> {
Ok(zed::Command {
command: binary_path,
command: self.language_server_binary_path(config)?,
args: vec!["lsp".to_string()],
env: Default::default(),
})