mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
WIP
This commit is contained in:
parent
da8919002f
commit
0de4a93ec7
55
Cargo.lock
generated
55
Cargo.lock
generated
@ -5927,6 +5927,61 @@ dependencies = [
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "project2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"backtrace",
|
||||
"client2",
|
||||
"clock",
|
||||
"collections",
|
||||
"copilot",
|
||||
"ctor",
|
||||
"db2",
|
||||
"env_logger 0.9.3",
|
||||
"fs",
|
||||
"fsevent",
|
||||
"futures 0.3.28",
|
||||
"fuzzy2",
|
||||
"git",
|
||||
"git2",
|
||||
"globset",
|
||||
"gpui2",
|
||||
"ignore",
|
||||
"itertools 0.10.5",
|
||||
"language2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"lsp2",
|
||||
"node_runtime",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"prettier",
|
||||
"pretty_assertions",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"rpc",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"settings2",
|
||||
"sha2 0.10.7",
|
||||
"similar",
|
||||
"smol",
|
||||
"sum_tree",
|
||||
"tempdir",
|
||||
"terminal",
|
||||
"text",
|
||||
"thiserror",
|
||||
"toml 0.5.11",
|
||||
"unindent",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "project_panel"
|
||||
version = "0.1.0"
|
||||
|
@ -61,6 +61,7 @@ members = [
|
||||
"crates/plugin_runtime",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
"crates/project2",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/recent_projects",
|
||||
|
@ -17,13 +17,14 @@ use crate::{
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{HashMap, HashSet, VecDeque};
|
||||
use futures::Future;
|
||||
use futures::{future::BoxFuture, Future};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use slotmap::SlotMap;
|
||||
use std::{
|
||||
any::{type_name, Any, TypeId},
|
||||
mem,
|
||||
sync::{atomic::Ordering::SeqCst, Arc, Weak},
|
||||
time::Duration,
|
||||
};
|
||||
use util::http::{self, HttpClient};
|
||||
|
||||
@ -89,6 +90,7 @@ impl App {
|
||||
event_listeners: SubscriberSet::new(),
|
||||
release_listeners: SubscriberSet::new(),
|
||||
global_observers: SubscriberSet::new(),
|
||||
quit_observers: SubscriberSet::new(),
|
||||
layout_id_buffer: Default::default(),
|
||||
propagate_event: true,
|
||||
})
|
||||
@ -155,11 +157,12 @@ impl App {
|
||||
}
|
||||
}
|
||||
|
||||
type ActionBuilder = fn(json: Option<serde_json::Value>) -> anyhow::Result<Box<dyn Action>>;
|
||||
type FrameCallback = Box<dyn FnOnce(&mut WindowContext) + Send>;
|
||||
type Handler = Box<dyn Fn(&mut AppContext) -> bool + Send + Sync + 'static>;
|
||||
type Listener = Box<dyn Fn(&dyn Any, &mut AppContext) -> bool + Send + Sync + 'static>;
|
||||
type QuitHandler = Box<dyn Fn(&mut AppContext) -> BoxFuture<'static, ()> + Send + Sync + 'static>;
|
||||
type ReleaseListener = Box<dyn Fn(&mut dyn Any, &mut AppContext) + Send + Sync + 'static>;
|
||||
type FrameCallback = Box<dyn FnOnce(&mut WindowContext) + Send>;
|
||||
type ActionBuilder = fn(json: Option<serde_json::Value>) -> anyhow::Result<Box<dyn Action>>;
|
||||
|
||||
pub struct AppContext {
|
||||
this: Weak<Mutex<AppContext>>,
|
||||
@ -188,11 +191,33 @@ pub struct AppContext {
|
||||
pub(crate) event_listeners: SubscriberSet<EntityId, Listener>,
|
||||
pub(crate) release_listeners: SubscriberSet<EntityId, ReleaseListener>,
|
||||
pub(crate) global_observers: SubscriberSet<TypeId, Listener>,
|
||||
pub(crate) quit_observers: SubscriberSet<(), QuitHandler>,
|
||||
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
|
||||
pub(crate) propagate_event: bool,
|
||||
}
|
||||
|
||||
impl AppContext {
|
||||
pub fn quit(&mut self) {
|
||||
let mut futures = Vec::new();
|
||||
|
||||
self.quit_observers.clone().retain(&(), |observer| {
|
||||
futures.push(observer(self));
|
||||
true
|
||||
});
|
||||
|
||||
self.windows.clear();
|
||||
self.flush_effects();
|
||||
|
||||
let futures = futures::future::join_all(futures);
|
||||
if self
|
||||
.executor
|
||||
.block_with_timeout(Duration::from_millis(100), futures)
|
||||
.is_err()
|
||||
{
|
||||
log::error!("timed out waiting on app_will_quit");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn app_metadata(&self) -> AppMetadata {
|
||||
self.app_metadata.clone()
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ use crate::{
|
||||
Subscription, Task, WeakHandle,
|
||||
};
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use futures::FutureExt;
|
||||
use std::{future::Future, marker::PhantomData};
|
||||
|
||||
#[derive(Deref, DerefMut)]
|
||||
@ -84,6 +85,28 @@ impl<'a, T: Send + Sync + 'static> ModelContext<'a, T> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn on_app_quit<Fut>(
|
||||
&mut self,
|
||||
on_quit: impl Fn(&mut T, &mut AppContext) -> Fut + Send + Sync + 'static,
|
||||
) -> Subscription
|
||||
where
|
||||
Fut: 'static + Future<Output = ()> + Send,
|
||||
{
|
||||
let handle = self.handle();
|
||||
self.app.quit_observers.insert(
|
||||
(),
|
||||
Box::new(move |cx| {
|
||||
let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok();
|
||||
async move {
|
||||
if let Some(future) = future {
|
||||
future.await;
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn observe_release<E: Send + Sync + 'static>(
|
||||
&mut self,
|
||||
handle: &Handle<E>,
|
||||
|
@ -152,14 +152,11 @@ impl Executor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block_with_timeout<F, R>(
|
||||
pub fn block_with_timeout<R>(
|
||||
&self,
|
||||
duration: Duration,
|
||||
future: F,
|
||||
) -> Result<R, impl Future<Output = R>>
|
||||
where
|
||||
F: Future<Output = R> + Send + Sync + 'static,
|
||||
{
|
||||
future: impl Future<Output = R>,
|
||||
) -> Result<R, impl Future<Output = R>> {
|
||||
let mut future = Box::pin(future);
|
||||
let timeout = {
|
||||
let future = &mut future;
|
||||
|
@ -832,7 +832,8 @@ impl Buffer {
|
||||
if parse_again {
|
||||
this.reparse(cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
@ -876,7 +877,8 @@ impl Buffer {
|
||||
let indent_sizes = indent_sizes.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.apply_autoindents(indent_sizes, cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
@ -234,6 +234,7 @@ impl SyntaxMap {
|
||||
self.snapshot.interpolate(text);
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // todo!()
|
||||
#[cfg(test)]
|
||||
pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
|
||||
self.snapshot
|
||||
@ -785,6 +786,7 @@ impl SyntaxSnapshot {
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // todo!()
|
||||
#[cfg(test)]
|
||||
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
|
||||
self.layers_for_range(0..buffer.len(), buffer).collect()
|
||||
|
84
crates/project2/Cargo.toml
Normal file
84
crates/project2/Cargo.toml
Normal file
@ -0,0 +1,84 @@
|
||||
[package]
|
||||
name = "project2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/project2.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"client2/test-support",
|
||||
"db2/test-support",
|
||||
"language2/test-support",
|
||||
"settings2/test-support",
|
||||
"text/test-support",
|
||||
"prettier/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
text = { path = "../text" }
|
||||
copilot = { path = "../copilot" }
|
||||
client2 = { path = "../client2" }
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
db2 = { path = "../db2" }
|
||||
fs = { path = "../fs" }
|
||||
fsevent = { path = "../fsevent" }
|
||||
fuzzy2 = { path = "../fuzzy2" }
|
||||
git = { path = "../git" }
|
||||
gpui2 = { path = "../gpui2" }
|
||||
language2 = { path = "../language2" }
|
||||
lsp2 = { path = "../lsp2" }
|
||||
node_runtime = { path = "../node_runtime" }
|
||||
prettier = { path = "../prettier" }
|
||||
rpc = { path = "../rpc" }
|
||||
settings2 = { path = "../settings2" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
terminal = { path = "../terminal" }
|
||||
util = { path = "../util" }
|
||||
|
||||
aho-corasick = "1.1"
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
backtrace = "0.3"
|
||||
futures.workspace = true
|
||||
globset.workspace = true
|
||||
ignore = "0.4"
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha2 = "0.10"
|
||||
similar = "1.3"
|
||||
smol.workspace = true
|
||||
thiserror.workspace = true
|
||||
toml.workspace = true
|
||||
itertools = "0.10"
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
client2 = { path = "../client2", features = ["test-support"] }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
db2 = { path = "../db2", features = ["test-support"] }
|
||||
fs = { path = "../fs", features = ["test-support"] }
|
||||
gpui2 = { path = "../gpui2", features = ["test-support"] }
|
||||
language2 = { path = "../language2", features = ["test-support"] }
|
||||
lsp2 = { path = "../lsp2", features = ["test-support"] }
|
||||
settings2 = { path = "../settings2", features = ["test-support"] }
|
||||
prettier = { path = "../prettier", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
git2.workspace = true
|
||||
tempdir.workspace = true
|
||||
unindent.workspace = true
|
57
crates/project2/src/ignore.rs
Normal file
57
crates/project2/src/ignore.rs
Normal file
@ -0,0 +1,57 @@
|
||||
use ignore::gitignore::Gitignore;
|
||||
use std::{ffi::OsStr, path::Path, sync::Arc};
|
||||
|
||||
pub enum IgnoreStack {
|
||||
None,
|
||||
Some {
|
||||
abs_base_path: Arc<Path>,
|
||||
ignore: Arc<Gitignore>,
|
||||
parent: Arc<IgnoreStack>,
|
||||
},
|
||||
All,
|
||||
}
|
||||
|
||||
impl IgnoreStack {
|
||||
pub fn none() -> Arc<Self> {
|
||||
Arc::new(Self::None)
|
||||
}
|
||||
|
||||
pub fn all() -> Arc<Self> {
|
||||
Arc::new(Self::All)
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
matches!(self, IgnoreStack::All)
|
||||
}
|
||||
|
||||
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
|
||||
match self.as_ref() {
|
||||
IgnoreStack::All => self,
|
||||
_ => Arc::new(Self::Some {
|
||||
abs_base_path,
|
||||
ignore,
|
||||
parent: self,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool {
|
||||
if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
match self {
|
||||
Self::None => false,
|
||||
Self::All => true,
|
||||
Self::Some {
|
||||
abs_base_path,
|
||||
ignore,
|
||||
parent: prev,
|
||||
} => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) {
|
||||
ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir),
|
||||
ignore::Match::Ignore(_) => true,
|
||||
ignore::Match::Whitelist(_) => false,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
2352
crates/project2/src/lsp_command.rs
Normal file
2352
crates/project2/src/lsp_command.rs
Normal file
File diff suppressed because it is too large
Load Diff
8846
crates/project2/src/project2.rs
Normal file
8846
crates/project2/src/project2.rs
Normal file
File diff suppressed because it is too large
Load Diff
47
crates/project2/src/project_settings.rs
Normal file
47
crates/project2/src/project_settings.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use collections::HashMap;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Setting;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ProjectSettings {
|
||||
#[serde(default)]
|
||||
pub lsp: HashMap<Arc<str>, LspSettings>,
|
||||
#[serde(default)]
|
||||
pub git: GitSettings,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct GitSettings {
|
||||
pub git_gutter: Option<GitGutterSetting>,
|
||||
pub gutter_debounce: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum GitGutterSetting {
|
||||
#[default]
|
||||
TrackedFiles,
|
||||
Hide,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct LspSettings {
|
||||
pub initialization_options: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl Setting for ProjectSettings {
|
||||
const KEY: Option<&'static str> = None;
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
4077
crates/project2/src/project_tests.rs
Normal file
4077
crates/project2/src/project_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
458
crates/project2/src/search.rs
Normal file
458
crates/project2/src/search.rs
Normal file
@ -0,0 +1,458 @@
|
||||
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
|
||||
use anyhow::{Context, Result};
|
||||
use client2::proto;
|
||||
use globset::{Glob, GlobMatcher};
|
||||
use itertools::Itertools;
|
||||
use language2::{char_kind, BufferSnapshot};
|
||||
use regex::{Regex, RegexBuilder};
|
||||
use smol::future::yield_now;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
io::{BufRead, BufReader, Read},
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SearchInputs {
|
||||
query: Arc<str>,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
}
|
||||
|
||||
impl SearchInputs {
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.query.as_ref()
|
||||
}
|
||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
||||
&self.files_to_include
|
||||
}
|
||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
||||
&self.files_to_exclude
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum SearchQuery {
|
||||
Text {
|
||||
search: Arc<AhoCorasick>,
|
||||
replacement: Option<String>,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
|
||||
Regex {
|
||||
regex: Regex,
|
||||
replacement: Option<String>,
|
||||
multiline: bool,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PathMatcher {
|
||||
maybe_path: PathBuf,
|
||||
glob: GlobMatcher,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PathMatcher {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.maybe_path.to_string_lossy().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathMatcher {
|
||||
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
|
||||
Ok(PathMatcher {
|
||||
glob: Glob::new(&maybe_glob)?.compile_matcher(),
|
||||
maybe_path: PathBuf::from(maybe_glob),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchQuery {
|
||||
pub fn text(
|
||||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
let query = query.to_string();
|
||||
let search = AhoCorasickBuilder::new()
|
||||
.ascii_case_insensitive(!case_sensitive)
|
||||
.build(&[&query])?;
|
||||
let inner = SearchInputs {
|
||||
query: query.into(),
|
||||
files_to_exclude,
|
||||
files_to_include,
|
||||
};
|
||||
Ok(Self::Text {
|
||||
search: Arc::new(search),
|
||||
replacement: None,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn regex(
|
||||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
let mut query = query.to_string();
|
||||
let initial_query = Arc::from(query.as_str());
|
||||
if whole_word {
|
||||
let mut word_query = String::new();
|
||||
word_query.push_str("\\b");
|
||||
word_query.push_str(&query);
|
||||
word_query.push_str("\\b");
|
||||
query = word_query
|
||||
}
|
||||
|
||||
let multiline = query.contains('\n') || query.contains("\\n");
|
||||
let regex = RegexBuilder::new(&query)
|
||||
.case_insensitive(!case_sensitive)
|
||||
.multi_line(multiline)
|
||||
.build()?;
|
||||
let inner = SearchInputs {
|
||||
query: initial_query,
|
||||
files_to_exclude,
|
||||
files_to_include,
|
||||
};
|
||||
Ok(Self::Regex {
|
||||
regex,
|
||||
replacement: None,
|
||||
multiline,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_proto(message: proto::SearchProject) -> Result<Self> {
|
||||
if message.regex {
|
||||
Self::regex(
|
||||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
} else {
|
||||
Self::text(
|
||||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
pub fn with_replacement(mut self, new_replacement: String) -> Self {
|
||||
match self {
|
||||
Self::Text {
|
||||
ref mut replacement,
|
||||
..
|
||||
}
|
||||
| Self::Regex {
|
||||
ref mut replacement,
|
||||
..
|
||||
} => {
|
||||
*replacement = Some(new_replacement);
|
||||
self
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn to_proto(&self, project_id: u64) -> proto::SearchProject {
|
||||
proto::SearchProject {
|
||||
project_id,
|
||||
query: self.as_str().to_string(),
|
||||
regex: self.is_regex(),
|
||||
whole_word: self.whole_word(),
|
||||
case_sensitive: self.case_sensitive(),
|
||||
files_to_include: self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.map(|matcher| matcher.to_string())
|
||||
.join(","),
|
||||
files_to_exclude: self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.map(|matcher| matcher.to_string())
|
||||
.join(","),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
|
||||
if self.as_str().is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match self {
|
||||
Self::Text { search, .. } => {
|
||||
let mat = search.stream_find_iter(stream).next();
|
||||
match mat {
|
||||
Some(Ok(_)) => Ok(true),
|
||||
Some(Err(err)) => Err(err.into()),
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
Self::Regex {
|
||||
regex, multiline, ..
|
||||
} => {
|
||||
let mut reader = BufReader::new(stream);
|
||||
if *multiline {
|
||||
let mut text = String::new();
|
||||
if let Err(err) = reader.read_to_string(&mut text) {
|
||||
Err(err.into())
|
||||
} else {
|
||||
Ok(regex.find(&text).is_some())
|
||||
}
|
||||
} else {
|
||||
for line in reader.lines() {
|
||||
let line = line?;
|
||||
if regex.find(&line).is_some() {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Returns the replacement text for this `SearchQuery`.
|
||||
pub fn replacement(&self) -> Option<&str> {
|
||||
match self {
|
||||
SearchQuery::Text { replacement, .. } | SearchQuery::Regex { replacement, .. } => {
|
||||
replacement.as_deref()
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Replaces search hits if replacement is set. `text` is assumed to be a string that matches this `SearchQuery` exactly, without any leftovers on either side.
|
||||
pub fn replacement_for<'a>(&self, text: &'a str) -> Option<Cow<'a, str>> {
|
||||
match self {
|
||||
SearchQuery::Text { replacement, .. } => replacement.clone().map(Cow::from),
|
||||
SearchQuery::Regex {
|
||||
regex, replacement, ..
|
||||
} => {
|
||||
if let Some(replacement) = replacement {
|
||||
Some(regex.replace(text, replacement))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub async fn search(
|
||||
&self,
|
||||
buffer: &BufferSnapshot,
|
||||
subrange: Option<Range<usize>>,
|
||||
) -> Vec<Range<usize>> {
|
||||
const YIELD_INTERVAL: usize = 20000;
|
||||
|
||||
if self.as_str().is_empty() {
|
||||
return Default::default();
|
||||
}
|
||||
|
||||
let range_offset = subrange.as_ref().map(|r| r.start).unwrap_or(0);
|
||||
let rope = if let Some(range) = subrange {
|
||||
buffer.as_rope().slice(range)
|
||||
} else {
|
||||
buffer.as_rope().clone()
|
||||
};
|
||||
|
||||
let mut matches = Vec::new();
|
||||
match self {
|
||||
Self::Text {
|
||||
search, whole_word, ..
|
||||
} => {
|
||||
for (ix, mat) in search
|
||||
.stream_find_iter(rope.bytes_in_range(0..rope.len()))
|
||||
.enumerate()
|
||||
{
|
||||
if (ix + 1) % YIELD_INTERVAL == 0 {
|
||||
yield_now().await;
|
||||
}
|
||||
|
||||
let mat = mat.unwrap();
|
||||
if *whole_word {
|
||||
let scope = buffer.language_scope_at(range_offset + mat.start());
|
||||
let kind = |c| char_kind(&scope, c);
|
||||
|
||||
let prev_kind = rope.reversed_chars_at(mat.start()).next().map(kind);
|
||||
let start_kind = kind(rope.chars_at(mat.start()).next().unwrap());
|
||||
let end_kind = kind(rope.reversed_chars_at(mat.end()).next().unwrap());
|
||||
let next_kind = rope.chars_at(mat.end()).next().map(kind);
|
||||
if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
matches.push(mat.start()..mat.end())
|
||||
}
|
||||
}
|
||||
|
||||
Self::Regex {
|
||||
regex, multiline, ..
|
||||
} => {
|
||||
if *multiline {
|
||||
let text = rope.to_string();
|
||||
for (ix, mat) in regex.find_iter(&text).enumerate() {
|
||||
if (ix + 1) % YIELD_INTERVAL == 0 {
|
||||
yield_now().await;
|
||||
}
|
||||
|
||||
matches.push(mat.start()..mat.end());
|
||||
}
|
||||
} else {
|
||||
let mut line = String::new();
|
||||
let mut line_offset = 0;
|
||||
for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
|
||||
if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
|
||||
yield_now().await;
|
||||
}
|
||||
|
||||
for (newline_ix, text) in chunk.split('\n').enumerate() {
|
||||
if newline_ix > 0 {
|
||||
for mat in regex.find_iter(&line) {
|
||||
let start = line_offset + mat.start();
|
||||
let end = line_offset + mat.end();
|
||||
matches.push(start..end);
|
||||
}
|
||||
|
||||
line_offset += line.len() + 1;
|
||||
line.clear();
|
||||
}
|
||||
line.push_str(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
matches
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.as_inner().as_str()
|
||||
}
|
||||
|
||||
pub fn whole_word(&self) -> bool {
|
||||
match self {
|
||||
Self::Text { whole_word, .. } => *whole_word,
|
||||
Self::Regex { whole_word, .. } => *whole_word,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn case_sensitive(&self) -> bool {
|
||||
match self {
|
||||
Self::Text { case_sensitive, .. } => *case_sensitive,
|
||||
Self::Regex { case_sensitive, .. } => *case_sensitive,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_regex(&self) -> bool {
|
||||
matches!(self, Self::Regex { .. })
|
||||
}
|
||||
|
||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
||||
self.as_inner().files_to_include()
|
||||
}
|
||||
|
||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
||||
self.as_inner().files_to_exclude()
|
||||
}
|
||||
|
||||
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
|
||||
match file_path {
|
||||
Some(file_path) => {
|
||||
!self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(file_path))
|
||||
&& (self.files_to_include().is_empty()
|
||||
|| self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.any(|include_glob| include_glob.is_match(file_path)))
|
||||
}
|
||||
None => self.files_to_include().is_empty(),
|
||||
}
|
||||
}
|
||||
pub fn as_inner(&self) -> &SearchInputs {
|
||||
match self {
|
||||
Self::Regex { inner, .. } | Self::Text { inner, .. } => inner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
|
||||
glob_set
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.filter(|glob_str| !glob_str.is_empty())
|
||||
.map(|glob_str| {
|
||||
PathMatcher::new(glob_str)
|
||||
.with_context(|| format!("deserializing path match glob {glob_str}"))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn path_matcher_creation_for_valid_paths() {
|
||||
for valid_path in [
|
||||
"file",
|
||||
"Cargo.toml",
|
||||
".DS_Store",
|
||||
"~/dir/another_dir/",
|
||||
"./dir/file",
|
||||
"dir/[a-z].txt",
|
||||
"../dir/filé",
|
||||
] {
|
||||
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
|
||||
panic!("Valid path {valid_path} should be accepted, but got: {e}")
|
||||
});
|
||||
assert!(
|
||||
path_matcher.is_match(valid_path),
|
||||
"Path matcher for valid path {valid_path} should match itself"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_matcher_creation_for_globs() {
|
||||
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
|
||||
match PathMatcher::new(invalid_glob) {
|
||||
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
|
||||
Err(_expected) => {}
|
||||
}
|
||||
}
|
||||
|
||||
for valid_glob in [
|
||||
"dir/?ile",
|
||||
"dir/*.txt",
|
||||
"dir/**/file",
|
||||
"dir/[a-z].txt",
|
||||
"{dir,file}",
|
||||
] {
|
||||
match PathMatcher::new(valid_glob) {
|
||||
Ok(_expected) => {}
|
||||
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
124
crates/project2/src/terminals.rs
Normal file
124
crates/project2/src/terminals.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use crate::Project;
|
||||
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
|
||||
use std::path::{Path, PathBuf};
|
||||
use terminal::{
|
||||
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
|
||||
Terminal, TerminalBuilder,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
pub struct Terminals {
|
||||
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
|
||||
}
|
||||
|
||||
impl Project {
|
||||
pub fn create_terminal(
|
||||
&mut self,
|
||||
working_directory: Option<PathBuf>,
|
||||
window: AnyWindowHandle,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> anyhow::Result<ModelHandle<Terminal>> {
|
||||
if self.is_remote() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"creating terminals as a guest is not supported yet"
|
||||
));
|
||||
} else {
|
||||
let settings = settings::get::<TerminalSettings>(cx);
|
||||
let python_settings = settings.detect_venv.clone();
|
||||
let shell = settings.shell.clone();
|
||||
|
||||
let terminal = TerminalBuilder::new(
|
||||
working_directory.clone(),
|
||||
shell.clone(),
|
||||
settings.env.clone(),
|
||||
Some(settings.blinking.clone()),
|
||||
settings.alternate_scroll,
|
||||
window,
|
||||
)
|
||||
.map(|builder| {
|
||||
let terminal_handle = cx.add_model(|cx| builder.subscribe(cx));
|
||||
|
||||
self.terminals
|
||||
.local_handles
|
||||
.push(terminal_handle.downgrade());
|
||||
|
||||
let id = terminal_handle.id();
|
||||
cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
|
||||
let handles = &mut project.terminals.local_handles;
|
||||
|
||||
if let Some(index) = handles.iter().position(|terminal| terminal.id() == id) {
|
||||
handles.remove(index);
|
||||
cx.notify();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
if let Some(python_settings) = &python_settings.as_option() {
|
||||
let activate_script_path =
|
||||
self.find_activate_script_path(&python_settings, working_directory);
|
||||
self.activate_python_virtual_environment(
|
||||
activate_script_path,
|
||||
&terminal_handle,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
terminal_handle
|
||||
});
|
||||
|
||||
terminal
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_activate_script_path(
|
||||
&mut self,
|
||||
settings: &VenvSettingsContent,
|
||||
working_directory: Option<PathBuf>,
|
||||
) -> Option<PathBuf> {
|
||||
// When we are unable to resolve the working directory, the terminal builder
|
||||
// defaults to '/'. We should probably encode this directly somewhere, but for
|
||||
// now, let's just hard code it here.
|
||||
let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
|
||||
let activate_script_name = match settings.activate_script {
|
||||
terminal_settings::ActivateScript::Default => "activate",
|
||||
terminal_settings::ActivateScript::Csh => "activate.csh",
|
||||
terminal_settings::ActivateScript::Fish => "activate.fish",
|
||||
terminal_settings::ActivateScript::Nushell => "activate.nu",
|
||||
};
|
||||
|
||||
for virtual_environment_name in settings.directories {
|
||||
let mut path = working_directory.join(virtual_environment_name);
|
||||
path.push("bin/");
|
||||
path.push(activate_script_name);
|
||||
|
||||
if path.exists() {
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn activate_python_virtual_environment(
|
||||
&mut self,
|
||||
activate_script: Option<PathBuf>,
|
||||
terminal_handle: &ModelHandle<Terminal>,
|
||||
cx: &mut ModelContext<Project>,
|
||||
) {
|
||||
if let Some(activate_script) = activate_script {
|
||||
// Paths are not strings so we need to jump through some hoops to format the command without `format!`
|
||||
let mut command = Vec::from("source ".as_bytes());
|
||||
command.extend_from_slice(activate_script.as_os_str().as_bytes());
|
||||
command.push(b'\n');
|
||||
|
||||
terminal_handle.update(cx, |this, _| this.input_bytes(command));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
|
||||
&self.terminals.local_handles
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Add a few tests for adding and removing terminal tabs
|
4387
crates/project2/src/worktree.rs
Normal file
4387
crates/project2/src/worktree.rs
Normal file
File diff suppressed because it is too large
Load Diff
2141
crates/project2/src/worktree_tests.rs
Normal file
2141
crates/project2/src/worktree_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user