mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-08 07:35:01 +03:00
lsp: Watch paths outside of worktrees at language servers request (#17173)
Context: https://x.com/fasterthanlime/status/1819120238228570598 Up to this PR: - We were not watching paths outside of a worktree when language server requested it. - We expected GlobPattern used for file watching to be always rooted at the worktree root. '1 mattered for observing global files (e.g. global RA config) and both points had impact on "monorepos". Let's picture the following scenario: You're working on a Rust project that has two crates: bin and lib crate: ``` my-rust-project/ bin-crate/ lib-crate/ ``` Up to this PR, making changes like changing field visibility in lib-crate **was not reflected** in bin-crate until RA was restarted. RA for bin-crate asked us to watch lib-crate. Now, depending on if you had this project open as: - a project with one worktree rooted at my-rust-project: - due to '2, we never noticed that we have to notify RA instance for bin-crate about changes in lib-crate. - a project with two worktrees (bin-crate and lib-crate): - due to '1 (as lib-crate is not within bin-crate's worktree), we once again missed the fact that we have to watch for changes in lib-crate. This PR solves this by introducing a side-channel - we just store fs watchers for abs paths at the Project level. Worktree changes handling is left relatively untouched - as it's used for other changes besides LSP change notifying, I've figured to better leave it as is, as right now we have 1 worktree change watcher; if we were to change it, we'd have `(language server) + 1` watchers per worktree, which seems.. pretty horrid. What's the end effect? At the very least fasterthanlime should be a tad happier; in reality though, I expect it to have some impact on LS reliability in monorepo setups. TODO - [x] Wire through FileChangeType into `fs::watch` interface. Release Notes: - Improved language server reliability in multi-worktree projects and monorepo. We now notify the language server more reliably about which files have changed.
This commit is contained in:
parent
d2cb45e9bb
commit
a850731b0e
@ -123,7 +123,7 @@ impl PromptBuilder {
|
||||
if params.fs.is_dir(parent_dir).await {
|
||||
let (mut changes, _watcher) = params.fs.watch(parent_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
let mut log_message = format!("Prompt template overrides directory detected at {}", templates_dir.display());
|
||||
if let Ok(target) = params.fs.read_link(&templates_dir).await {
|
||||
log_message.push_str(" -> ");
|
||||
@ -162,18 +162,18 @@ impl PromptBuilder {
|
||||
let mut combined_changes = futures::stream::select(changes, parent_changes);
|
||||
|
||||
while let Some(changed_paths) = combined_changes.next().await {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
if !params.fs.is_dir(&templates_dir).await {
|
||||
log::info!("Prompt template overrides directory removed. Restoring built-in prompt templates.");
|
||||
Self::register_built_in_templates(&mut handlebars.lock()).log_err();
|
||||
break;
|
||||
}
|
||||
}
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.starts_with(&templates_dir) && changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", changed_path.display());
|
||||
if let Some(content) = params.fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
for event in changed_paths {
|
||||
if event.path.starts_with(&templates_dir) && event.path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", event.path.display());
|
||||
if let Some(content) = params.fs.load(&event.path).await.log_err() {
|
||||
let file_name = event.path.file_stem().unwrap().to_string_lossy();
|
||||
handlebars.lock().register_template_string(&file_name, content).log_err();
|
||||
}
|
||||
}
|
||||
|
@ -369,9 +369,9 @@ impl ExtensionStore {
|
||||
let installed_dir = this.installed_dir.clone();
|
||||
async move {
|
||||
let (mut paths, _) = fs.watch(&installed_dir, FS_WATCH_LATENCY).await;
|
||||
while let Some(paths) = paths.next().await {
|
||||
for path in paths {
|
||||
let Ok(event_path) = path.strip_prefix(&installed_dir) else {
|
||||
while let Some(events) = paths.next().await {
|
||||
for event in events {
|
||||
let Ok(event_path) = event.path.strip_prefix(&installed_dir) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -45,6 +45,25 @@ pub trait Watcher: Send + Sync {
|
||||
fn remove(&self, path: &Path) -> Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum PathEventKind {
|
||||
Removed,
|
||||
Created,
|
||||
Changed,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct PathEvent {
|
||||
pub path: PathBuf,
|
||||
pub kind: Option<PathEventKind>,
|
||||
}
|
||||
|
||||
impl From<PathEvent> for PathBuf {
|
||||
fn from(event: PathEvent) -> Self {
|
||||
event.path
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait Fs: Send + Sync {
|
||||
async fn create_dir(&self, path: &Path) -> Result<()>;
|
||||
@ -92,7 +111,7 @@ pub trait Fs: Send + Sync {
|
||||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
);
|
||||
|
||||
@ -469,16 +488,37 @@ impl Fs for RealFs {
|
||||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use fsevent::EventStream;
|
||||
use fsevent::{EventStream, StreamFlags};
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let (stream, handle) = EventStream::new(&[path], latency);
|
||||
std::thread::spawn(move || {
|
||||
stream.run(move |events| {
|
||||
smol::block_on(tx.send(events.into_iter().map(|event| event.path).collect()))
|
||||
smol::block_on(
|
||||
tx.send(
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let kind = if event.flags.contains(StreamFlags::ITEM_REMOVED) {
|
||||
Some(PathEventKind::Removed)
|
||||
} else if event.flags.contains(StreamFlags::ITEM_CREATED) {
|
||||
Some(PathEventKind::Created)
|
||||
} else if event.flags.contains(StreamFlags::ITEM_MODIFIED) {
|
||||
Some(PathEventKind::Changed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
PathEvent {
|
||||
path: event.path,
|
||||
kind,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
)
|
||||
.is_ok()
|
||||
});
|
||||
});
|
||||
@ -498,32 +538,46 @@ impl Fs for RealFs {
|
||||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use notify::EventKind;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let pending_paths: Arc<Mutex<Vec<PathBuf>>> = Default::default();
|
||||
let pending_paths: Arc<Mutex<Vec<PathEvent>>> = Default::default();
|
||||
let root_path = path.to_path_buf();
|
||||
|
||||
watcher::global(|g| {
|
||||
let tx = tx.clone();
|
||||
let pending_paths = pending_paths.clone();
|
||||
g.add(move |event: ¬ify::Event| {
|
||||
let kind = match event.kind {
|
||||
EventKind::Create(_) => Some(PathEventKind::Created),
|
||||
EventKind::Modify(_) => Some(PathEventKind::Changed),
|
||||
EventKind::Remove(_) => Some(PathEventKind::Removed),
|
||||
_ => None,
|
||||
};
|
||||
let mut paths = event
|
||||
.paths
|
||||
.iter()
|
||||
.filter(|path| path.starts_with(&root_path))
|
||||
.cloned()
|
||||
.filter_map(|path| {
|
||||
path.starts_with(&root_path).then(|| PathEvent {
|
||||
path: path.clone(),
|
||||
kind,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !paths.is_empty() {
|
||||
paths.sort();
|
||||
let mut pending_paths = pending_paths.lock();
|
||||
if pending_paths.is_empty() {
|
||||
tx.try_send(()).ok();
|
||||
}
|
||||
util::extend_sorted(&mut *pending_paths, paths, usize::MAX, PathBuf::cmp);
|
||||
util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| {
|
||||
a.path.cmp(&b.path)
|
||||
});
|
||||
}
|
||||
})
|
||||
})
|
||||
@ -561,10 +615,10 @@ impl Fs for RealFs {
|
||||
path: &Path,
|
||||
_latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use notify::Watcher;
|
||||
use notify::{EventKind, Watcher};
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
|
||||
@ -572,7 +626,21 @@ impl Fs for RealFs {
|
||||
let tx = tx.clone();
|
||||
move |event: Result<notify::Event, _>| {
|
||||
if let Some(event) = event.log_err() {
|
||||
tx.try_send(event.paths).ok();
|
||||
let kind = match event.kind {
|
||||
EventKind::Create(_) => Some(PathEventKind::Created),
|
||||
EventKind::Modify(_) => Some(PathEventKind::Changed),
|
||||
EventKind::Remove(_) => Some(PathEventKind::Removed),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
tx.try_send(
|
||||
event
|
||||
.paths
|
||||
.into_iter()
|
||||
.map(|path| PathEvent { path, kind })
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -682,9 +750,9 @@ struct FakeFsState {
|
||||
root: Arc<Mutex<FakeFsEntry>>,
|
||||
next_inode: u64,
|
||||
next_mtime: SystemTime,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathBuf>>>,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathEvent>>>,
|
||||
events_paused: bool,
|
||||
buffered_events: Vec<PathBuf>,
|
||||
buffered_events: Vec<PathEvent>,
|
||||
metadata_call_count: usize,
|
||||
read_dir_call_count: usize,
|
||||
}
|
||||
@ -793,11 +861,14 @@ impl FakeFsState {
|
||||
|
||||
fn emit_event<I, T>(&mut self, paths: I)
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator<Item = (T, Option<PathEventKind>)>,
|
||||
T: Into<PathBuf>,
|
||||
{
|
||||
self.buffered_events
|
||||
.extend(paths.into_iter().map(Into::into));
|
||||
.extend(paths.into_iter().map(|(path, kind)| PathEvent {
|
||||
path: path.into(),
|
||||
kind,
|
||||
}));
|
||||
|
||||
if !self.events_paused {
|
||||
self.flush_events(self.buffered_events.len());
|
||||
@ -872,7 +943,7 @@ impl FakeFs {
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([path.to_path_buf()]);
|
||||
state.emit_event([(path.to_path_buf(), None)]);
|
||||
}
|
||||
|
||||
pub async fn insert_file(&self, path: impl AsRef<Path>, content: Vec<u8>) {
|
||||
@ -895,7 +966,7 @@ impl FakeFs {
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([path]);
|
||||
state.emit_event([(path, None)]);
|
||||
}
|
||||
|
||||
fn write_file_internal(&self, path: impl AsRef<Path>, content: Vec<u8>) -> Result<()> {
|
||||
@ -910,18 +981,24 @@ impl FakeFs {
|
||||
mtime,
|
||||
content,
|
||||
}));
|
||||
state.write_path(path, move |entry| {
|
||||
let mut kind = None;
|
||||
state.write_path(path, {
|
||||
let kind = &mut kind;
|
||||
move |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
*kind = Some(PathEventKind::Created);
|
||||
e.insert(file);
|
||||
}
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
*kind = Some(PathEventKind::Changed);
|
||||
*e.get_mut() = file;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})?;
|
||||
state.emit_event([path]);
|
||||
state.emit_event([(path, kind)]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1030,7 +1107,7 @@ impl FakeFs {
|
||||
f(&mut repo_state);
|
||||
|
||||
if emit_git_event {
|
||||
state.emit_event([dot_git]);
|
||||
state.emit_event([(dot_git, None)]);
|
||||
}
|
||||
} else {
|
||||
panic!("not a directory");
|
||||
@ -1081,7 +1158,7 @@ impl FakeFs {
|
||||
self.state.lock().emit_event(
|
||||
statuses
|
||||
.iter()
|
||||
.map(|(path, _)| dot_git.parent().unwrap().join(path)),
|
||||
.map(|(path, _)| (dot_git.parent().unwrap().join(path), None)),
|
||||
);
|
||||
}
|
||||
|
||||
@ -1251,7 +1328,7 @@ impl Fs for FakeFs {
|
||||
state.next_inode += 1;
|
||||
state.write_path(&cur_path, |entry| {
|
||||
entry.or_insert_with(|| {
|
||||
created_dirs.push(cur_path.clone());
|
||||
created_dirs.push((cur_path.clone(), Some(PathEventKind::Created)));
|
||||
Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode,
|
||||
mtime,
|
||||
@ -1263,7 +1340,7 @@ impl Fs for FakeFs {
|
||||
})?
|
||||
}
|
||||
|
||||
self.state.lock().emit_event(&created_dirs);
|
||||
self.state.lock().emit_event(created_dirs);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1279,10 +1356,12 @@ impl Fs for FakeFs {
|
||||
mtime,
|
||||
content: Vec::new(),
|
||||
}));
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
state.write_path(path, |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
if options.overwrite {
|
||||
kind = Some(PathEventKind::Changed);
|
||||
*e.get_mut() = file;
|
||||
} else if !options.ignore_if_exists {
|
||||
return Err(anyhow!("path already exists: {}", path.display()));
|
||||
@ -1294,7 +1373,7 @@ impl Fs for FakeFs {
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
state.emit_event([path]);
|
||||
state.emit_event([(path, kind)]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1313,7 +1392,7 @@ impl Fs for FakeFs {
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event(&[path]);
|
||||
state.emit_event([(path, None)]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1388,7 +1467,10 @@ impl Fs for FakeFs {
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
state.emit_event(&[old_path, new_path]);
|
||||
state.emit_event([
|
||||
(old_path, Some(PathEventKind::Removed)),
|
||||
(new_path, Some(PathEventKind::Created)),
|
||||
]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1403,9 +1485,11 @@ impl Fs for FakeFs {
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
let source_entry = state.read_path(&source)?;
|
||||
let content = source_entry.lock().file_content(&source)?.clone();
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
let entry = state.write_path(&target, |e| match e {
|
||||
btree_map::Entry::Occupied(e) => {
|
||||
if options.overwrite {
|
||||
kind = Some(PathEventKind::Changed);
|
||||
Ok(Some(e.get().clone()))
|
||||
} else if !options.ignore_if_exists {
|
||||
return Err(anyhow!("{target:?} already exists"));
|
||||
@ -1425,7 +1509,7 @@ impl Fs for FakeFs {
|
||||
if let Some(entry) = entry {
|
||||
entry.lock().set_file_content(&target, content)?;
|
||||
}
|
||||
state.emit_event(&[target]);
|
||||
state.emit_event([(target, kind)]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1462,7 +1546,7 @@ impl Fs for FakeFs {
|
||||
e.remove();
|
||||
}
|
||||
}
|
||||
state.emit_event(&[path]);
|
||||
state.emit_event([(path, Some(PathEventKind::Removed))]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1491,7 +1575,7 @@ impl Fs for FakeFs {
|
||||
e.remove();
|
||||
}
|
||||
}
|
||||
state.emit_event(&[path]);
|
||||
state.emit_event([(path, Some(PathEventKind::Removed))]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1632,7 +1716,7 @@ impl Fs for FakeFs {
|
||||
path: &Path,
|
||||
_: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
self.simulate_random_delay().await;
|
||||
@ -1642,7 +1726,9 @@ impl Fs for FakeFs {
|
||||
let executor = self.executor.clone();
|
||||
(
|
||||
Box::pin(futures::StreamExt::filter(rx, move |events| {
|
||||
let result = events.iter().any(|evt_path| evt_path.starts_with(&path));
|
||||
let result = events
|
||||
.iter()
|
||||
.any(|evt_path| evt_path.path.starts_with(&path));
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
executor.simulate_random_delay().await;
|
||||
|
@ -22,8 +22,8 @@ use futures::{
|
||||
};
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task,
|
||||
WeakModel,
|
||||
AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel,
|
||||
Task, WeakModel,
|
||||
};
|
||||
use http_client::HttpClient;
|
||||
use itertools::Itertools;
|
||||
@ -57,7 +57,7 @@ use std::{
|
||||
convert::TryInto,
|
||||
ffi::OsStr,
|
||||
iter, mem,
|
||||
ops::Range,
|
||||
ops::{ControlFlow, Range},
|
||||
path::{self, Path, PathBuf},
|
||||
process::Stdio,
|
||||
str,
|
||||
@ -146,7 +146,7 @@ pub struct LspStore {
|
||||
language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
|
||||
language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
|
||||
last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
|
||||
language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
|
||||
language_server_watched_paths: HashMap<LanguageServerId, Model<LanguageServerWatchedPaths>>,
|
||||
language_server_watcher_registrations:
|
||||
HashMap<LanguageServerId, HashMap<String, Vec<FileSystemWatcher>>>,
|
||||
active_entry: Option<ProjectEntryId>,
|
||||
@ -3022,6 +3022,38 @@ impl LspStore {
|
||||
.map(|(key, value)| (*key, value))
|
||||
}
|
||||
|
||||
fn lsp_notify_abs_paths_changed(
|
||||
&mut self,
|
||||
server_id: LanguageServerId,
|
||||
changes: Vec<PathEvent>,
|
||||
) {
|
||||
maybe!({
|
||||
let server = self.language_server_for_id(server_id)?;
|
||||
let changes = changes
|
||||
.into_iter()
|
||||
.filter_map(|event| {
|
||||
let typ = match event.kind? {
|
||||
PathEventKind::Created => lsp::FileChangeType::CREATED,
|
||||
PathEventKind::Removed => lsp::FileChangeType::DELETED,
|
||||
PathEventKind::Changed => lsp::FileChangeType::CHANGED,
|
||||
};
|
||||
Some(lsp::FileEvent {
|
||||
uri: lsp::Url::from_file_path(&event.path).ok()?,
|
||||
typ,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !changes.is_empty() {
|
||||
server
|
||||
.notify::<lsp::notification::DidChangeWatchedFiles>(
|
||||
lsp::DidChangeWatchedFilesParams { changes },
|
||||
)
|
||||
.log_err();
|
||||
}
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
fn rebuild_watched_paths(
|
||||
&mut self,
|
||||
language_server_id: LanguageServerId,
|
||||
@ -3034,62 +3066,169 @@ impl LspStore {
|
||||
return;
|
||||
};
|
||||
|
||||
let watched_paths = self
|
||||
.language_server_watched_paths
|
||||
.entry(language_server_id)
|
||||
.or_default();
|
||||
let mut worktree_globs = HashMap::default();
|
||||
let mut abs_globs = HashMap::default();
|
||||
log::trace!(
|
||||
"Processing new watcher paths for language server with id {}",
|
||||
language_server_id
|
||||
);
|
||||
|
||||
let mut builders = HashMap::default();
|
||||
let worktrees = self
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.filter_map(|worktree| {
|
||||
self.language_servers_for_worktree(worktree.read(cx).id())
|
||||
.find(|(_, _, server)| server.server_id() == language_server_id)
|
||||
.map(|_| worktree)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
enum PathToWatch {
|
||||
Worktree {
|
||||
literal_prefix: Arc<Path>,
|
||||
pattern: String,
|
||||
},
|
||||
Absolute {
|
||||
path: Arc<Path>,
|
||||
pattern: String,
|
||||
},
|
||||
}
|
||||
for watcher in watchers.values().flatten() {
|
||||
for worktree in self.worktree_store.read(cx).worktrees().collect::<Vec<_>>() {
|
||||
let mut found_host = false;
|
||||
for worktree in &worktrees {
|
||||
let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
|
||||
if let Some(abs_path) = tree.abs_path().to_str() {
|
||||
let relative_glob_pattern = match &watcher.glob_pattern {
|
||||
lsp::GlobPattern::String(s) => Some(
|
||||
s.strip_prefix(abs_path)
|
||||
.unwrap_or(s)
|
||||
if let Some(worktree_root_path) = tree.abs_path().to_str() {
|
||||
let path_to_watch = match &watcher.glob_pattern {
|
||||
lsp::GlobPattern::String(s) => {
|
||||
match s.strip_prefix(worktree_root_path) {
|
||||
Some(relative) => {
|
||||
let pattern = relative
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(s),
|
||||
),
|
||||
.unwrap_or(relative)
|
||||
.to_owned();
|
||||
let literal_prefix = glob_literal_prefix(&pattern);
|
||||
|
||||
let literal_prefix = Arc::from(PathBuf::from(
|
||||
literal_prefix
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(literal_prefix),
|
||||
));
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix,
|
||||
pattern,
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let path = glob_literal_prefix(s);
|
||||
let glob = &s[path.len()..];
|
||||
let pattern = glob
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(glob)
|
||||
.to_owned();
|
||||
let path = PathBuf::from(path).into();
|
||||
|
||||
PathToWatch::Absolute { path, pattern }
|
||||
}
|
||||
}
|
||||
}
|
||||
lsp::GlobPattern::Relative(rp) => {
|
||||
let base_uri = match &rp.base_uri {
|
||||
let Ok(mut base_uri) = match &rp.base_uri {
|
||||
lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
|
||||
lsp::OneOf::Right(base_uri) => base_uri,
|
||||
}
|
||||
.to_file_path() else {
|
||||
return false;
|
||||
};
|
||||
base_uri.to_file_path().ok().and_then(|file_path| {
|
||||
(file_path.to_str() == Some(abs_path))
|
||||
.then_some(rp.pattern.as_str())
|
||||
})
|
||||
|
||||
match base_uri.strip_prefix(worktree_root_path) {
|
||||
Ok(relative) => {
|
||||
let mut literal_prefix = relative.to_owned();
|
||||
literal_prefix.push(glob_literal_prefix(&rp.pattern));
|
||||
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix: literal_prefix.into(),
|
||||
pattern: rp.pattern.clone(),
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let path = glob_literal_prefix(&rp.pattern);
|
||||
let glob = &rp.pattern[path.len()..];
|
||||
let pattern = glob
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(glob)
|
||||
.to_owned();
|
||||
base_uri.push(path);
|
||||
|
||||
PathToWatch::Absolute {
|
||||
path: base_uri.into(),
|
||||
pattern,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
if let Some(relative_glob_pattern) = relative_glob_pattern {
|
||||
let literal_prefix = glob_literal_prefix(relative_glob_pattern);
|
||||
tree.as_local_mut()
|
||||
.unwrap()
|
||||
.add_path_prefix_to_scan(Path::new(literal_prefix).into());
|
||||
if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
|
||||
builders
|
||||
match path_to_watch {
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix,
|
||||
pattern,
|
||||
} => {
|
||||
if let Some((tree, glob)) =
|
||||
tree.as_local_mut().zip(Glob::new(&pattern).log_err())
|
||||
{
|
||||
tree.add_path_prefix_to_scan(literal_prefix);
|
||||
worktree_globs
|
||||
.entry(tree.id())
|
||||
.or_insert_with(|| GlobSetBuilder::new())
|
||||
.or_insert_with(GlobSetBuilder::new)
|
||||
.add(glob);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
PathToWatch::Absolute { path, pattern } => {
|
||||
if let Some(glob) = Glob::new(&pattern).log_err() {
|
||||
abs_globs
|
||||
.entry(path)
|
||||
.or_insert_with(GlobSetBuilder::new)
|
||||
.add(glob);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
false
|
||||
});
|
||||
if glob_is_inside_worktree {
|
||||
break;
|
||||
log::trace!(
|
||||
"Watcher pattern `{}` has been attached to the worktree at `{}`",
|
||||
serde_json::to_string(&watcher.glob_pattern).unwrap(),
|
||||
worktree.read(cx).abs_path().display()
|
||||
);
|
||||
found_host = true;
|
||||
}
|
||||
}
|
||||
if !found_host {
|
||||
log::error!(
|
||||
"Watcher pattern `{}` has not been attached to any worktree or absolute path",
|
||||
serde_json::to_string(&watcher.glob_pattern).unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
watched_paths.clear();
|
||||
for (worktree_id, builder) in builders {
|
||||
let mut watch_builder = LanguageServerWatchedPathsBuilder::default();
|
||||
for (worktree_id, builder) in worktree_globs {
|
||||
if let Ok(globset) = builder.build() {
|
||||
watched_paths.insert(worktree_id, globset);
|
||||
watch_builder.watch_worktree(worktree_id, globset);
|
||||
}
|
||||
}
|
||||
for (abs_path, builder) in abs_globs {
|
||||
if let Ok(globset) = builder.build() {
|
||||
watch_builder.watch_abs_path(abs_path, globset);
|
||||
}
|
||||
}
|
||||
let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx);
|
||||
self.language_server_watched_paths
|
||||
.insert(language_server_id, watcher);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
@ -5542,7 +5681,7 @@ impl LspStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_local_worktree_language_servers(
|
||||
pub(super) fn update_local_worktree_language_servers(
|
||||
&mut self,
|
||||
worktree_handle: &Model<Worktree>,
|
||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||
@ -5571,7 +5710,7 @@ impl LspStore {
|
||||
if let Some(watched_paths) = self
|
||||
.language_server_watched_paths
|
||||
.get(&server_id)
|
||||
.and_then(|paths| paths.get(&worktree_id))
|
||||
.and_then(|paths| paths.read(cx).worktree_paths.get(&worktree_id))
|
||||
{
|
||||
let params = lsp::DidChangeWatchedFilesParams {
|
||||
changes: changes
|
||||
@ -5732,6 +5871,102 @@ pub enum LanguageServerToQuery {
|
||||
Other(LanguageServerId),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct LanguageServerWatchedPaths {
|
||||
worktree_paths: HashMap<WorktreeId, GlobSet>,
|
||||
abs_paths: HashMap<Arc<Path>, (GlobSet, Task<()>)>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct LanguageServerWatchedPathsBuilder {
|
||||
worktree_paths: HashMap<WorktreeId, GlobSet>,
|
||||
abs_paths: HashMap<Arc<Path>, GlobSet>,
|
||||
}
|
||||
|
||||
impl LanguageServerWatchedPathsBuilder {
|
||||
fn watch_worktree(&mut self, worktree_id: WorktreeId, glob_set: GlobSet) {
|
||||
self.worktree_paths.insert(worktree_id, glob_set);
|
||||
}
|
||||
fn watch_abs_path(&mut self, path: Arc<Path>, glob_set: GlobSet) {
|
||||
self.abs_paths.insert(path, glob_set);
|
||||
}
|
||||
fn build(
|
||||
self,
|
||||
fs: Arc<dyn Fs>,
|
||||
language_server_id: LanguageServerId,
|
||||
cx: &mut ModelContext<LspStore>,
|
||||
) -> Model<LanguageServerWatchedPaths> {
|
||||
let project = cx.weak_model();
|
||||
|
||||
cx.new_model(|cx| {
|
||||
let this_id = cx.entity_id();
|
||||
const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100);
|
||||
let abs_paths = self
|
||||
.abs_paths
|
||||
.into_iter()
|
||||
.map(|(abs_path, globset)| {
|
||||
let task = cx.spawn({
|
||||
let abs_path = abs_path.clone();
|
||||
let fs = fs.clone();
|
||||
|
||||
let lsp_store = project.clone();
|
||||
|_, mut cx| async move {
|
||||
maybe!(async move {
|
||||
let mut push_updates =
|
||||
fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await;
|
||||
while let Some(update) = push_updates.0.next().await {
|
||||
let action = lsp_store
|
||||
.update(&mut cx, |this, cx| {
|
||||
let Some(watcher) = this
|
||||
.language_server_watched_paths
|
||||
.get(&language_server_id)
|
||||
else {
|
||||
return ControlFlow::Break(());
|
||||
};
|
||||
if watcher.entity_id() != this_id {
|
||||
// This watcher is no longer registered on the project, which means that we should
|
||||
// cease operations.
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
let (globs, _) = watcher
|
||||
.read(cx)
|
||||
.abs_paths
|
||||
.get(&abs_path)
|
||||
.expect(
|
||||
"Watched abs path is not registered with a watcher",
|
||||
);
|
||||
let matching_entries = update
|
||||
.into_iter()
|
||||
.filter(|event| globs.is_match(&event.path))
|
||||
.collect::<Vec<_>>();
|
||||
this.lsp_notify_abs_paths_changed(
|
||||
language_server_id,
|
||||
matching_entries,
|
||||
);
|
||||
ControlFlow::Continue(())
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
if action.is_break() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
})
|
||||
.await;
|
||||
}
|
||||
});
|
||||
(abs_path, (globset, task))
|
||||
})
|
||||
.collect();
|
||||
LanguageServerWatchedPaths {
|
||||
worktree_paths: self.worktree_paths,
|
||||
abs_paths,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct LspBufferSnapshot {
|
||||
version: i32,
|
||||
snapshot: TextBufferSnapshot,
|
||||
@ -5843,7 +6078,9 @@ impl DiagnosticSummary {
|
||||
}
|
||||
|
||||
fn glob_literal_prefix(glob: &str) -> &str {
|
||||
let mut literal_end = 0;
|
||||
let is_absolute = glob.starts_with(path::MAIN_SEPARATOR);
|
||||
|
||||
let mut literal_end = is_absolute as usize;
|
||||
for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
|
||||
if part.contains(&['*', '?', '{', '}']) {
|
||||
break;
|
||||
@ -5855,6 +6092,7 @@ fn glob_literal_prefix(glob: &str) -> &str {
|
||||
literal_end += part.len();
|
||||
}
|
||||
}
|
||||
let literal_end = literal_end.min(glob.len());
|
||||
&glob[..literal_end]
|
||||
}
|
||||
|
||||
|
@ -169,7 +169,12 @@ impl SnippetProvider {
|
||||
|
||||
let (mut entries, _) = watcher.await;
|
||||
while let Some(entries) = entries.next().await {
|
||||
process_updates(this.clone(), entries, cx.clone()).await?;
|
||||
process_updates(
|
||||
this.clone(),
|
||||
entries.into_iter().map(|event| event.path).collect(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
@ -7,7 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use clock::ReplicaId;
|
||||
use collections::{HashMap, HashSet, VecDeque};
|
||||
use fs::{copy_recursive, Fs, RemoveOptions, Watcher};
|
||||
use fs::{copy_recursive, Fs, PathEvent, RemoveOptions, Watcher};
|
||||
use futures::{
|
||||
channel::{
|
||||
mpsc::{self, UnboundedSender},
|
||||
@ -1051,7 +1051,11 @@ impl LocalWorktree {
|
||||
watcher,
|
||||
};
|
||||
|
||||
scanner.run(events).await;
|
||||
scanner
|
||||
.run(Box::pin(
|
||||
events.map(|events| events.into_iter().map(Into::into).collect()),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
});
|
||||
let scan_state_updater = cx.spawn(|this, mut cx| async move {
|
||||
@ -3522,7 +3526,7 @@ enum BackgroundScannerPhase {
|
||||
}
|
||||
|
||||
impl BackgroundScanner {
|
||||
async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>) {
|
||||
async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>) {
|
||||
use futures::FutureExt as _;
|
||||
|
||||
// If the worktree root does not contain a git repository, then find
|
||||
@ -3603,7 +3607,8 @@ impl BackgroundScanner {
|
||||
while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
|
||||
paths.extend(more_paths);
|
||||
}
|
||||
self.process_events(paths).await;
|
||||
self.process_events(paths.into_iter().map(Into::into).collect())
|
||||
.await;
|
||||
}
|
||||
|
||||
// Continue processing events until the worktree is dropped.
|
||||
@ -3644,7 +3649,7 @@ impl BackgroundScanner {
|
||||
while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
|
||||
paths.extend(more_paths);
|
||||
}
|
||||
self.process_events(paths.clone()).await;
|
||||
self.process_events(paths.into_iter().map(Into::into).collect()).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1158,13 +1158,13 @@ fn watch_themes(fs: Arc<dyn fs::Fs>, cx: &mut AppContext) {
|
||||
.await;
|
||||
|
||||
while let Some(paths) = events.next().await {
|
||||
for path in paths {
|
||||
if fs.metadata(&path).await.ok().flatten().is_some() {
|
||||
for event in paths {
|
||||
if fs.metadata(&event.path).await.ok().flatten().is_some() {
|
||||
if let Some(theme_registry) =
|
||||
cx.update(|cx| ThemeRegistry::global(cx).clone()).log_err()
|
||||
{
|
||||
if let Some(()) = theme_registry
|
||||
.load_user_theme(&path, fs.clone())
|
||||
.load_user_theme(&event.path, fs.clone())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
@ -1194,8 +1194,10 @@ fn watch_languages(fs: Arc<dyn fs::Fs>, languages: Arc<LanguageRegistry>, cx: &m
|
||||
cx.spawn(|_| async move {
|
||||
let (mut events, _) = fs.watch(path.as_path(), Duration::from_millis(100)).await;
|
||||
while let Some(event) = events.next().await {
|
||||
let has_language_file = event.iter().any(|path| {
|
||||
path.extension()
|
||||
let has_language_file = event.iter().any(|event| {
|
||||
event
|
||||
.path
|
||||
.extension()
|
||||
.map(|ext| ext.to_string_lossy().as_ref() == "scm")
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user