mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-27 23:59:52 +03:00
Allow opening buffers without a project entry
This commit is contained in:
parent
ae6ddceb67
commit
1f6c69c7dc
@ -1658,19 +1658,15 @@ impl Project {
|
||||
|
||||
pub fn open_path(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
path: ProjectPath,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
|
||||
let project_path = path.into();
|
||||
let task = self.open_buffer(project_path.clone(), cx);
|
||||
) -> Task<Result<(Option<ProjectEntryId>, AnyModelHandle)>> {
|
||||
let task = self.open_buffer(path.clone(), cx);
|
||||
cx.spawn_weak(|_, cx| async move {
|
||||
let buffer = task.await?;
|
||||
let project_entry_id = buffer
|
||||
.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
})
|
||||
.with_context(|| format!("no project entry for {project_path:?}"))?;
|
||||
|
||||
let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
});
|
||||
let buffer: &AnyModelHandle = &buffer;
|
||||
Ok((project_entry_id, buffer.clone()))
|
||||
})
|
||||
@ -1985,8 +1981,10 @@ impl Project {
|
||||
remote_id,
|
||||
);
|
||||
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2441,24 +2439,25 @@ impl Project {
|
||||
return None;
|
||||
};
|
||||
|
||||
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
match self.local_buffer_ids_by_entry_id.get(&entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
}
|
||||
None => {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -5777,7 +5776,7 @@ impl Project {
|
||||
ignored_paths_to_process.pop_front()
|
||||
{
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_path_excluded(&ignored_abs_path)
|
||||
|| snapshot.is_path_excluded(ignored_abs_path.clone())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@ -6208,10 +6207,13 @@ impl Project {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
|
||||
let new_file = if let Some(entry) = old_file
|
||||
.entry_id
|
||||
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6220,7 +6222,7 @@ impl Project {
|
||||
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6250,10 +6252,12 @@ impl Project {
|
||||
);
|
||||
}
|
||||
|
||||
if new_file.entry_id != *entry_id {
|
||||
if new_file.entry_id != Some(*entry_id) {
|
||||
self.local_buffer_ids_by_entry_id.remove(entry_id);
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(new_file.entry_id, buffer_id);
|
||||
if let Some(entry_id) = new_file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, buffer_id);
|
||||
}
|
||||
}
|
||||
|
||||
if new_file != *old_file {
|
||||
|
@ -955,13 +955,16 @@ impl LocalWorktree {
|
||||
) -> Task<Result<(File, String, Option<String>)>> {
|
||||
let path = Arc::from(path);
|
||||
let abs_path = self.absolutize(&path);
|
||||
let is_excluded = self.is_path_excluded(abs_path.clone());
|
||||
let fs = self.fs.clone();
|
||||
let entry = self.refresh_entry(path.clone(), None, cx);
|
||||
let entry = if is_excluded {
|
||||
None
|
||||
} else {
|
||||
Some(self.refresh_entry(path.clone(), None, cx))
|
||||
};
|
||||
|
||||
cx.spawn(|this, cx| async move {
|
||||
let text = fs.load(&abs_path).await?;
|
||||
let entry = entry.await?;
|
||||
|
||||
let mut index_task = None;
|
||||
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
|
||||
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||
@ -981,18 +984,46 @@ impl LocalWorktree {
|
||||
None
|
||||
};
|
||||
|
||||
Ok((
|
||||
File {
|
||||
entry_id: entry.id,
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
match entry {
|
||||
Some(entry) => {
|
||||
let entry = entry.await?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
}
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Loading metadata for excluded file {abs_path:?}")
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded file {abs_path:?} got removed during loading")
|
||||
})?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: None,
|
||||
worktree: this,
|
||||
path,
|
||||
mtime: metadata.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -1020,7 +1051,7 @@ impl LocalWorktree {
|
||||
|
||||
if has_changed_file {
|
||||
let new_file = Arc::new(File {
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
worktree: handle,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
@ -2226,10 +2257,20 @@ impl LocalSnapshot {
|
||||
paths
|
||||
}
|
||||
|
||||
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
|
||||
loop {
|
||||
if self
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if !path.pop() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@ -2458,8 +2499,7 @@ impl BackgroundScannerState {
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|
||||
|| snapshot.is_path_excluded(&git_dir_abs_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(git_dir_abs_path.clone());
|
||||
if git_dir_excluded
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
@ -2666,7 +2706,7 @@ pub struct File {
|
||||
pub worktree: ModelHandle<Worktree>,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub(crate) entry_id: ProjectEntryId,
|
||||
pub(crate) entry_id: Option<ProjectEntryId>,
|
||||
pub(crate) is_local: bool,
|
||||
pub(crate) is_deleted: bool,
|
||||
}
|
||||
@ -2735,7 +2775,7 @@ impl language::File for File {
|
||||
fn to_proto(&self) -> rpc::proto::File {
|
||||
rpc::proto::File {
|
||||
worktree_id: self.worktree.id() as u64,
|
||||
entry_id: self.entry_id.to_proto(),
|
||||
entry_id: self.entry_id.map(|id| id.to_proto()),
|
||||
path: self.path.to_string_lossy().into(),
|
||||
mtime: Some(self.mtime.into()),
|
||||
is_deleted: self.is_deleted,
|
||||
@ -2793,7 +2833,7 @@ impl File {
|
||||
worktree,
|
||||
path: entry.path.clone(),
|
||||
mtime: entry.mtime,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
})
|
||||
@ -2818,7 +2858,7 @@ impl File {
|
||||
worktree,
|
||||
path: Path::new(&proto.path).into(),
|
||||
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
|
||||
entry_id: ProjectEntryId::from_proto(proto.entry_id),
|
||||
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
|
||||
is_local: false,
|
||||
is_deleted: proto.is_deleted,
|
||||
})
|
||||
@ -2836,7 +2876,7 @@ impl File {
|
||||
if self.is_deleted {
|
||||
None
|
||||
} else {
|
||||
Some(self.entry_id)
|
||||
self.entry_id
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3338,16 +3378,7 @@ impl BackgroundScanner {
|
||||
return false;
|
||||
}
|
||||
|
||||
// FS events may come for files which parent directory is excluded, need to check ignore those.
|
||||
let mut path_to_test = abs_path.clone();
|
||||
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|
||||
|| snapshot.is_path_excluded(&relative_path);
|
||||
while !excluded_file_event && path_to_test.pop() {
|
||||
if snapshot.is_path_excluded(&path_to_test) {
|
||||
excluded_file_event = true;
|
||||
}
|
||||
}
|
||||
if excluded_file_event {
|
||||
if snapshot.is_path_excluded(abs_path.clone()) {
|
||||
if !is_git_related {
|
||||
log::debug!("ignoring FS event for excluded path {relative_path:?}");
|
||||
}
|
||||
@ -3531,7 +3562,7 @@ impl BackgroundScanner {
|
||||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_path_excluded(&job.abs_path) {
|
||||
if snapshot.is_path_excluded(job.abs_path.to_path_buf()) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
@ -3603,7 +3634,10 @@ impl BackgroundScanner {
|
||||
|
||||
{
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_path_excluded(&child_abs_path) {
|
||||
if state
|
||||
.snapshot
|
||||
.is_path_excluded(child_abs_path.to_path_buf())
|
||||
{
|
||||
let relative_path = job.path.join(child_name);
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
|
@ -1357,7 +1357,7 @@ message User {
|
||||
|
||||
message File {
|
||||
uint64 worktree_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
optional uint64 entry_id = 2;
|
||||
string path = 3;
|
||||
Timestamp mtime = 4;
|
||||
bool is_deleted = 5;
|
||||
|
@ -9,4 +9,4 @@ pub use notification::*;
|
||||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 66;
|
||||
pub const PROTOCOL_VERSION: u32 = 67;
|
||||
|
@ -218,10 +218,13 @@ impl PathMatcher {
|
||||
})
|
||||
}
|
||||
|
||||
// TODO kb tests
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
other.as_ref().starts_with(&self.maybe_path)
|
||||
|| self.glob.is_match(&other)
|
||||
|| self.check_with_end_separator(other.as_ref())
|
||||
let other_path = other.as_ref();
|
||||
other_path.starts_with(&self.maybe_path)
|
||||
|| other_path.file_name() == Some(self.maybe_path.as_os_str())
|
||||
|| self.glob.is_match(other_path)
|
||||
|| self.check_with_end_separator(other_path)
|
||||
}
|
||||
|
||||
fn check_with_end_separator(&self, path: &Path) -> bool {
|
||||
|
@ -481,18 +481,21 @@ impl Pane {
|
||||
|
||||
pub(crate) fn open_item(
|
||||
&mut self,
|
||||
project_entry_id: ProjectEntryId,
|
||||
project_entry_id: Option<ProjectEntryId>,
|
||||
focus_item: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
|
||||
) -> Box<dyn ItemHandle> {
|
||||
let mut existing_item = None;
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
|
||||
{
|
||||
let item = item.boxed_clone();
|
||||
existing_item = Some((index, item));
|
||||
break;
|
||||
if let Some(project_entry_id) = project_entry_id {
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
if item.is_singleton(cx)
|
||||
&& item.project_entry_ids(cx).as_slice() == [project_entry_id]
|
||||
{
|
||||
let item = item.boxed_clone();
|
||||
existing_item = Some((index, item));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1549,6 +1549,7 @@ impl Workspace {
|
||||
let abs_path = abs_path.clone();
|
||||
async move {
|
||||
let (worktree, project_path) = project_path?;
|
||||
// TODO kb consider excluded files here?
|
||||
if fs.is_file(&abs_path).await {
|
||||
Some(
|
||||
this.update(&mut cx, |this, cx| {
|
||||
@ -2129,13 +2130,13 @@ impl Workspace {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn load_path(
|
||||
fn load_path(
|
||||
&mut self,
|
||||
path: ProjectPath,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<
|
||||
Result<(
|
||||
ProjectEntryId,
|
||||
Option<ProjectEntryId>,
|
||||
impl 'static + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
|
||||
)>,
|
||||
> {
|
||||
|
@ -763,7 +763,7 @@ mod tests {
|
||||
AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use project::{Project, ProjectPath};
|
||||
use project::{project_settings::ProjectSettings, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
|
||||
use std::{
|
||||
@ -1308,6 +1308,99 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_opening_ignored_and_excluded_paths(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
// TODO kb also test external excluded dirs opening
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
".gitignore": "ignored_dir\n",
|
||||
".git": {
|
||||
"HEAD": "ref: refs/heads/main",
|
||||
},
|
||||
"regular_dir": {
|
||||
"file": "regular file contents",
|
||||
},
|
||||
"ignored_dir": {
|
||||
"file": "ignored file contents",
|
||||
},
|
||||
"excluded_dir": {
|
||||
"file": "excluded file contents",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
|
||||
let workspace = window.root(cx);
|
||||
|
||||
let entries = cx.read(|cx| workspace.file_project_paths(cx));
|
||||
// dbg!(&entries);
|
||||
|
||||
let (opened_workspace, new_items) = cx
|
||||
.update(|cx| {
|
||||
workspace::open_paths(
|
||||
&[Path::new("/root/excluded_dir/file").to_path_buf()],
|
||||
&app_state,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
// dbg!(
|
||||
// &workspace,
|
||||
// &opened_workspace,
|
||||
// new_items
|
||||
// .iter()
|
||||
// .map(|i| i
|
||||
// .as_ref()
|
||||
// .expect("should be present")
|
||||
// .as_ref()
|
||||
// .expect("should not error"))
|
||||
// .map(|i| cx.read(|cx| i.project_path(cx)))
|
||||
// .collect::<Vec<_>>()
|
||||
// );
|
||||
|
||||
let entries = cx.read(|cx| workspace.file_project_paths(cx));
|
||||
dbg!(&entries);
|
||||
// #[rustfmt::skip]
|
||||
// workspace.update(cx, |w, cx| {
|
||||
// dbg!(w.open_paths(vec!["/root/regular_dir/file".into()], true, cx));
|
||||
// dbg!(w.open_paths(vec!["/root/ignored_dir/file".into()], true, cx));
|
||||
// dbg!(w.open_paths(vec!["/root/excluded_dir/file".into()], true, cx));
|
||||
// dbg!(w.open_paths(vec!["/root/excluded_dir/file".into()], false, cx));
|
||||
//
|
||||
// });
|
||||
|
||||
// // Open the first entry
|
||||
// let entry_1 = workspace
|
||||
// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
// cx.read(|cx| {
|
||||
// let pane = workspace.read(cx).active_pane().read(cx);
|
||||
// assert_eq!(
|
||||
// pane.active_item().unwrap().project_path(cx),
|
||||
// Some(file1.clone())
|
||||
// );
|
||||
// assert_eq!(pane.items_len(), 1);
|
||||
// });
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_save_conflicting_item(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
|
Loading…
Reference in New Issue
Block a user