catch up with main

This commit is contained in:
KCaverly 2023-08-02 16:48:11 -04:00
commit a125e318fe
59 changed files with 2619 additions and 618 deletions

View File

@ -6,14 +6,23 @@ jobs:
discord_release:
runs-on: ubuntu-latest
steps:
- name: Get appropriate URL
id: get-appropriate-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
fi
echo "::set-output name=URL::$URL"
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
if: ${{ ! github.event.release.prerelease }}
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: |
📣 Zed ${{ github.event.release.tag_name }} was just released!
Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it.
Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
${{ github.event.release.body }}

5
.zed/settings.json Normal file
View File

@ -0,0 +1,5 @@
{
"JSON": {
"tab_size": 4
}
}

569
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -109,9 +109,9 @@ pretty_assertions = "1.3.0"
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.0"
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
@ -131,6 +131,7 @@ tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", r
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }

View File

@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
FROM rust:1.70-bullseye as builder
FROM rust:1.71-bullseye as builder
WORKDIR app
COPY . .

View File

@ -1,159 +1,179 @@
{
"suffixes": {
"aac": "audio",
"bash": "terminal",
"bmp": "image",
"c": "code",
"conf": "settings",
"cpp": "code",
"cc": "code",
"css": "code",
"doc": "document",
"docx": "document",
"eslintrc": "eslint",
"eslintrc.js": "eslint",
"eslintrc.json": "eslint",
"flac": "audio",
"fish": "terminal",
"gitattributes": "vcs",
"gitignore": "vcs",
"gitmodules": "vcs",
"gif": "image",
"go": "code",
"h": "code",
"handlebars": "code",
"hbs": "template",
"htm": "template",
"html": "template",
"svelte": "template",
"hpp": "code",
"ico": "image",
"ini": "settings",
"java": "code",
"jpeg": "image",
"jpg": "image",
"js": "code",
"json": "storage",
"lock": "lock",
"log": "log",
"md": "document",
"mdx": "document",
"mp3": "audio",
"mp4": "video",
"ods": "document",
"odp": "document",
"odt": "document",
"ogg": "video",
"pdf": "document",
"php": "code",
"png": "image",
"ppt": "document",
"pptx": "document",
"prettierrc": "prettier",
"prettierignore": "prettier",
"ps1": "terminal",
"psd": "image",
"py": "code",
"rb": "code",
"rkt": "code",
"rs": "rust",
"rtf": "document",
"scm": "code",
"sh": "terminal",
"bashrc": "terminal",
"bash_profile": "terminal",
"bash_aliases": "terminal",
"bash_logout": "terminal",
"profile": "terminal",
"zshrc": "terminal",
"zshenv": "terminal",
"zsh_profile": "terminal",
"zsh_aliases": "terminal",
"zsh_histfile": "terminal",
"zlogin": "terminal",
"sql": "code",
"svg": "image",
"swift": "code",
"tiff": "image",
"toml": "toml",
"ts": "typescript",
"tsx": "code",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zsh": "terminal"
},
"types": {
"audio": {
"icon": "icons/file_icons/audio.svg"
"suffixes": {
"aac": "audio",
"accdb": "storage",
"bak": "backup",
"bash": "terminal",
"bash_aliases": "terminal",
"bash_logout": "terminal",
"bash_profile": "terminal",
"bashrc": "terminal",
"bmp": "image",
"c": "code",
"cc": "code",
"conf": "settings",
"cpp": "code",
"css": "code",
"csv": "storage",
"dat": "storage",
"db": "storage",
"dbf": "storage",
"dll": "storage",
"doc": "document",
"docx": "document",
"eslintrc": "eslint",
"eslintrc.js": "eslint",
"eslintrc.json": "eslint",
"fmp": "storage",
"fp7": "storage",
"flac": "audio",
"fish": "terminal",
"frm": "storage",
"gdb": "storage",
"gitattributes": "vcs",
"gitignore": "vcs",
"gitmodules": "vcs",
"gif": "image",
"go": "code",
"h": "code",
"handlebars": "code",
"hbs": "template",
"htm": "template",
"html": "template",
"ib": "storage",
"ico": "image",
"ini": "settings",
"java": "code",
"jpeg": "image",
"jpg": "image",
"js": "code",
"json": "storage",
"ldf": "storage",
"lock": "lock",
"log": "log",
"mdb": "storage",
"md": "document",
"mdf": "storage",
"mdx": "document",
"mp3": "audio",
"mp4": "video",
"myd": "storage",
"myi": "storage",
"ods": "document",
"odp": "document",
"odt": "document",
"ogg": "video",
"pdb": "storage",
"pdf": "document",
"php": "code",
"png": "image",
"ppt": "document",
"pptx": "document",
"prettierignore": "prettier",
"prettierrc": "prettier",
"profile": "terminal",
"ps1": "terminal",
"psd": "image",
"py": "code",
"rb": "code",
"rkt": "code",
"rs": "rust",
"rtf": "document",
"sav": "storage",
"scm": "code",
"sh": "terminal",
"sqlite": "storage",
"sdf": "storage",
"svelte": "template",
"svg": "image",
"swift": "code",
"ts": "typescript",
"tsx": "code",
"tiff": "image",
"toml": "toml",
"tsv": "storage",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
"zshenv": "terminal",
"zsh_histfile": "terminal",
"zsh_profile": "terminal",
"zshrc": "terminal"
},
"code": {
"icon": "icons/file_icons/code.svg"
},
"collapsed_chevron": {
"icon": "icons/file_icons/chevron_right.svg"
},
"collapsed_folder": {
"icon": "icons/file_icons/folder.svg"
},
"default": {
"icon": "icons/file_icons/file.svg"
},
"document": {
"icon": "icons/file_icons/book.svg"
},
"eslint": {
"icon": "icons/file_icons/eslint.svg"
},
"expanded_chevron": {
"icon": "icons/file_icons/chevron_down.svg"
},
"expanded_folder": {
"icon": "icons/file_icons/folder_open.svg"
},
"image": {
"icon": "icons/file_icons/image.svg"
},
"lock": {
"icon": "icons/file_icons/lock.svg"
},
"log": {
"icon": "icons/file_icons/info.svg"
},
"prettier": {
"icon": "icons/file_icons/prettier.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"settings": {
"icon": "icons/file_icons/settings.svg"
},
"storage": {
"icon": "icons/file_icons/database.svg"
},
"template": {
"icon": "icons/file_icons/html.svg"
},
"terminal": {
"icon": "icons/file_icons/terminal.svg"
},
"toml": {
"icon": "icons/file_icons/toml.svg"
},
"typescript": {
"icon": "icons/file_icons/typescript.svg"
},
"vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
"types": {
"audio": {
"icon": "icons/file_icons/audio.svg"
},
"code": {
"icon": "icons/file_icons/code.svg"
},
"collapsed_chevron": {
"icon": "icons/file_icons/chevron_right.svg"
},
"collapsed_folder": {
"icon": "icons/file_icons/folder.svg"
},
"default": {
"icon": "icons/file_icons/file.svg"
},
"document": {
"icon": "icons/file_icons/book.svg"
},
"eslint": {
"icon": "icons/file_icons/eslint.svg"
},
"expanded_chevron": {
"icon": "icons/file_icons/chevron_down.svg"
},
"expanded_folder": {
"icon": "icons/file_icons/folder_open.svg"
},
"image": {
"icon": "icons/file_icons/image.svg"
},
"lock": {
"icon": "icons/file_icons/lock.svg"
},
"log": {
"icon": "icons/file_icons/info.svg"
},
"prettier": {
"icon": "icons/file_icons/prettier.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"settings": {
"icon": "icons/file_icons/settings.svg"
},
"storage": {
"icon": "icons/file_icons/database.svg"
},
"template": {
"icon": "icons/file_icons/html.svg"
},
"terminal": {
"icon": "icons/file_icons/terminal.svg"
},
"toml": {
"icon": "icons/file_icons/toml.svg"
},
"typescript": {
"icon": "icons/file_icons/typescript.svg"
},
"vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
}
}
}
}

View File

@ -22,6 +22,7 @@
"alt-cmd-right": "pane::ActivateNextItem",
"cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": "pane::CloseInactiveItems",
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k u": "pane::CloseCleanItems",
"cmd-k cmd-w": "pane::CloseAllItems",
"cmd-shift-w": "workspace::CloseWindow",
@ -226,6 +227,13 @@
"alt-enter": "search::SelectAllMatches"
}
},
{
"context": "BufferSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{
"context": "ProjectSearchBar",
"bindings": {
@ -233,6 +241,13 @@
"alt-tab": "project_search::CycleMode",
}
},
{
"context": "ProjectSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{
"context": "ProjectSearchView",
"bindings": {

View File

@ -1637,6 +1637,7 @@ impl ConversationEditor {
let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx);
editor.set_show_wrap_guides(false, cx);
editor
});

View File

@ -183,7 +183,7 @@ async fn apply_server_operation(
let username;
{
let mut plan = plan.lock();
let mut user = plan.user(user_id);
let user = plan.user(user_id);
if user.online {
return false;
}

View File

@ -374,7 +374,7 @@ impl CollabTitlebarItem {
"Share Feedback",
feedback::feedback_editor::GiveFeedback,
),
ContextMenuItem::action("Sign out", SignOut),
ContextMenuItem::action("Sign Out", SignOut),
]
} else {
vec![

View File

@ -338,9 +338,9 @@ impl Copilot {
let (server, fake_server) =
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
let this = cx.add_model(|cx| Self {
let this = cx.add_model(|_| Self {
http: http.clone(),
node_runtime: NodeRuntime::instance(http, cx.background().clone()),
node_runtime: NodeRuntime::instance(http),
server: CopilotServer::Running(RunningCopilotServer {
lsp: Arc::new(server),
sign_in_status: SignInStatus::Authorized,

View File

@ -397,7 +397,7 @@ impl InlayMap {
buffer_snapshot: MultiBufferSnapshot,
mut buffer_edits: Vec<text::Edit<usize>>,
) -> (InlaySnapshot, Vec<InlayEdit>) {
let mut snapshot = &mut self.snapshot;
let snapshot = &mut self.snapshot;
if buffer_edits.is_empty() {
if snapshot.buffer.trailing_excerpt_update_count()
@ -572,7 +572,6 @@ impl InlayMap {
})
.collect();
let buffer_snapshot = snapshot.buffer.clone();
drop(snapshot);
let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits);
(snapshot, edits)
}
@ -635,7 +634,6 @@ impl InlayMap {
}
log::info!("removing inlays: {:?}", to_remove);
drop(snapshot);
let (snapshot, edits) = self.splice(to_remove, to_insert);
(snapshot, edits)
}

View File

@ -543,6 +543,7 @@ pub struct Editor {
show_local_selections: bool,
mode: EditorMode,
show_gutter: bool,
show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>,
#[allow(clippy::type_complexity)]
@ -1375,6 +1376,7 @@ impl Editor {
show_local_selections: true,
mode,
show_gutter: mode == EditorMode::Full,
show_wrap_guides: None,
placeholder_text: None,
highlighted_rows: None,
background_highlights: Default::default(),
@ -1537,7 +1539,7 @@ impl Editor {
self.collapse_matches = collapse_matches;
}
fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches {
return range.start..range.start;
}
@ -4219,7 +4221,7 @@ impl Editor {
_: &SortLinesCaseSensitive,
cx: &mut ViewContext<Self>,
) {
self.manipulate_lines(cx, |text| text.sort())
self.manipulate_lines(cx, |lines| lines.sort())
}
pub fn sort_lines_case_insensitive(
@ -4227,7 +4229,7 @@ impl Editor {
_: &SortLinesCaseInsensitive,
cx: &mut ViewContext<Self>,
) {
self.manipulate_lines(cx, |text| text.sort_by_key(|line| line.to_lowercase()))
self.manipulate_lines(cx, |lines| lines.sort_by_key(|line| line.to_lowercase()))
}
pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) {
@ -4265,19 +4267,19 @@ impl Editor {
let text = buffer
.text_for_range(start_point..end_point)
.collect::<String>();
let mut text = text.split("\n").collect_vec();
let mut lines = text.split("\n").collect_vec();
let text_len = text.len();
callback(&mut text);
let lines_len = lines.len();
callback(&mut lines);
// This is a current limitation with selections.
// If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections.
debug_assert!(
text.len() == text_len,
lines.len() == lines_len,
"callback should not change the number of lines"
);
edits.push((start_point..end_point, text.join("\n")));
edits.push((start_point..end_point, lines.join("\n")));
let start_anchor = buffer.anchor_after(start_point);
let end_anchor = buffer.anchor_before(end_point);
@ -6374,8 +6376,8 @@ impl Editor {
.range
.to_offset(definition.target.buffer.read(cx));
let range = self.range_for_match(&range);
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
let range = self.range_for_match(&range);
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@ -6392,7 +6394,6 @@ impl Editor {
// When selecting a definition in a different buffer, disable the nav history
// to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history());
let range = target_editor.range_for_match(&range);
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@ -7188,6 +7189,10 @@ impl Editor {
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
let mut wrap_guides = smallvec::smallvec![];
if self.show_wrap_guides == Some(false) {
return wrap_guides;
}
let settings = self.buffer.read(cx).settings_at(0, cx);
if settings.show_wrap_guides {
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
@ -7245,6 +7250,11 @@ impl Editor {
cx.notify();
}
pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
self.show_wrap_guides = Some(show_gutter);
cx.notify();
}
pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) {
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {

View File

@ -546,8 +546,20 @@ impl EditorElement {
});
}
let scroll_left =
layout.position_map.snapshot.scroll_position().x() * layout.position_map.em_width;
for (wrap_position, active) in layout.wrap_guides.iter() {
let x = text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.;
let x =
(text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.)
- scroll_left;
if x < text_bounds.origin_x()
|| (layout.show_scrollbars && x > self.scrollbar_left(&bounds))
{
continue;
}
let color = if *active {
self.style.active_wrap_guide
} else {
@ -1036,6 +1048,10 @@ impl EditorElement {
scene.pop_layer();
}
fn scrollbar_left(&self, bounds: &RectF) -> f32 {
bounds.max_x() - self.style.theme.scrollbar.width
}
fn paint_scrollbar(
&mut self,
scene: &mut SceneBuilder,
@ -1054,7 +1070,7 @@ impl EditorElement {
let top = bounds.min_y();
let bottom = bounds.max_y();
let right = bounds.max_x();
let left = right - style.width;
let left = self.scrollbar_left(&bounds);
let row_range = &layout.scrollbar_row_range;
let max_row = layout.max_row as f32 + (row_range.end - row_range.start);

View File

@ -571,7 +571,6 @@ fn new_update_task(
if let Some(buffer) =
refresh_multi_buffer.buffer(pending_refresh_query.buffer_id)
{
drop(refresh_multi_buffer);
editor.inlay_hint_cache.update_tasks.insert(
pending_refresh_query.excerpt_id,
UpdateTask {

View File

@ -1128,6 +1128,12 @@ impl AppContext {
self.keystroke_matcher.clear_bindings();
}
pub fn binding_for_action(&self, action: &dyn Action) -> Option<&Binding> {
self.keystroke_matcher
.bindings_for_action(action.id())
.find(|binding| binding.action().eq(action))
}
pub fn default_global<T: 'static + Default>(&mut self) -> &T {
let type_id = TypeId::of::<T>();
self.update(|this| {

View File

@ -844,8 +844,8 @@ impl LanguageRegistry {
}
}
}
Err(err) => {
log::error!("failed to load language {name} - {err}");
Err(e) => {
log::error!("failed to load language {name}:\n{:?}", e);
let mut state = this.state.write();
state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) {
@ -853,7 +853,7 @@ impl LanguageRegistry {
let _ = tx.send(Err(anyhow!(
"failed to load language {}: {}",
name,
err
e
)));
}
}
@ -1188,25 +1188,39 @@ impl Language {
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
if let Some(query) = queries.highlights {
self = self.with_highlights_query(query.as_ref())?;
self = self
.with_highlights_query(query.as_ref())
.context("Error loading highlights query")?;
}
if let Some(query) = queries.brackets {
self = self.with_brackets_query(query.as_ref())?;
self = self
.with_brackets_query(query.as_ref())
.context("Error loading brackets query")?;
}
if let Some(query) = queries.indents {
self = self.with_indents_query(query.as_ref())?;
self = self
.with_indents_query(query.as_ref())
.context("Error loading indents query")?;
}
if let Some(query) = queries.outline {
self = self.with_outline_query(query.as_ref())?;
self = self
.with_outline_query(query.as_ref())
.context("Error loading outline query")?;
}
if let Some(query) = queries.embedding {
self = self.with_embedding_query(query.as_ref())?;
self = self
.with_embedding_query(query.as_ref())
.context("Error loading embedding query")?;
}
if let Some(query) = queries.injections {
self = self.with_injection_query(query.as_ref())?;
self = self
.with_injection_query(query.as_ref())
.context("Error loading injection query")?;
}
if let Some(query) = queries.overrides {
self = self.with_override_query(query.as_ref())?;
self = self
.with_override_query(query.as_ref())
.context("Error loading override query")?;
}
Ok(self)
}

View File

@ -58,11 +58,14 @@ fn build_bridge(swift_target: &SwiftTarget) {
"cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME
);
let swift_package_root = swift_package_root();
let swift_target_folder = swift_target_folder();
if !Command::new("swift")
.arg("build")
.args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple])
.args(["--build-path".into(), swift_target_folder])
.current_dir(&swift_package_root)
.status()
.unwrap()
@ -128,6 +131,12 @@ fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
}
fn swift_target_folder() -> PathBuf {
env::current_dir()
.unwrap()
.join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
}
fn copy_dir(source: &Path, destination: &Path) {
assert!(
Command::new("rm")
@ -155,8 +164,7 @@ fn copy_dir(source: &Path, destination: &Path) {
impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf {
swift_package_root()
.join(".build")
swift_target_folder()
.join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap())
}

View File

@ -1,9 +1,6 @@
use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use futures::lock::Mutex;
use futures::{future::Shared, FutureExt};
use gpui::{executor::Background, Task};
use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command};
use std::process::{Output, Stdio};
@ -33,20 +30,12 @@ pub struct NpmInfoDistTags {
pub struct NodeRuntime {
http: Arc<dyn HttpClient>,
background: Arc<Background>,
installation_path: Mutex<Option<Shared<Task<Result<PathBuf, Arc<anyhow::Error>>>>>>,
}
impl NodeRuntime {
pub fn instance(http: Arc<dyn HttpClient>, background: Arc<Background>) -> Arc<NodeRuntime> {
pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
RUNTIME_INSTANCE
.get_or_init(|| {
Arc::new(NodeRuntime {
http,
background,
installation_path: Mutex::new(None),
})
})
.get_or_init(|| Arc::new(NodeRuntime { http }))
.clone()
}
@ -61,7 +50,9 @@ impl NodeRuntime {
subcommand: &str,
args: &[&str],
) -> Result<Output> {
let attempt = |installation_path: PathBuf| async move {
let attempt = || async move {
let installation_path = self.install_if_needed().await?;
let mut env_path = installation_path.join("bin").into_os_string();
if let Some(existing_path) = std::env::var_os("PATH") {
if !existing_path.is_empty() {
@ -92,10 +83,9 @@ impl NodeRuntime {
command.output().await.map_err(|e| anyhow!("{e}"))
};
let installation_path = self.install_if_needed().await?;
let mut output = attempt(installation_path.clone()).await;
let mut output = attempt().await;
if output.is_err() {
output = attempt(installation_path).await;
output = attempt().await;
if output.is_err() {
return Err(anyhow!(
"failed to launch npm subcommand {subcommand} subcommand"
@ -167,23 +157,8 @@ impl NodeRuntime {
}
async fn install_if_needed(&self) -> Result<PathBuf> {
let task = self
.installation_path
.lock()
.await
.get_or_insert_with(|| {
let http = self.http.clone();
self.background
.spawn(async move { Self::install(http).await.map_err(Arc::new) })
.shared()
})
.clone();
log::info!("Node runtime install_if_needed");
task.await.map_err(|e| anyhow!("{}", e))
}
async fn install(http: Arc<dyn HttpClient>) -> Result<PathBuf> {
log::info!("installing Node runtime");
let arch = match consts::ARCH {
"x86_64" => "x64",
"aarch64" => "arm64",
@ -214,7 +189,8 @@ impl NodeRuntime {
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
let mut response = http
let mut response = self
.http
.get(&url, Default::default(), true)
.await
.context("error downloading Node binary tarball")?;

View File

@ -1,7 +1,6 @@
use crate::{worktree::WorktreeHandle, Event, *};
use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt};
use globset::Glob;
use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent},
@ -3641,7 +3640,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
vec![Glob::new("*.odd").unwrap().compile_matcher()],
vec![PathMatcher::new("*.odd").unwrap()],
Vec::new()
),
cx
@ -3659,7 +3658,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
vec![Glob::new("*.rs").unwrap().compile_matcher()],
vec![PathMatcher::new("*.rs").unwrap()],
Vec::new()
),
cx
@ -3681,8 +3680,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
],
Vec::new()
),
@ -3705,9 +3704,9 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
],
Vec::new()
),
@ -3752,7 +3751,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
vec![Glob::new("*.odd").unwrap().compile_matcher()],
vec![PathMatcher::new("*.odd").unwrap()],
),
cx
)
@ -3775,7 +3774,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
vec![Glob::new("*.rs").unwrap().compile_matcher()],
vec![PathMatcher::new("*.rs").unwrap()],
),
cx
)
@ -3797,8 +3796,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
],
),
cx
@ -3821,9 +3820,9 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher(),
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
],
),
cx
@ -3860,8 +3859,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
vec![Glob::new("*.odd").unwrap().compile_matcher()],
vec![Glob::new("*.odd").unwrap().compile_matcher()],
vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()],
),
cx
)
@ -3878,8 +3877,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
vec![Glob::new("*.ts").unwrap().compile_matcher()],
vec![Glob::new("*.ts").unwrap().compile_matcher()],
vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
),
cx
)
@ -3897,12 +3896,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
],
vec![
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
],
),
cx
@ -3921,12 +3920,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
Glob::new("*.ts").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
],
vec![
Glob::new("*.rs").unwrap().compile_matcher(),
Glob::new("*.odd").unwrap().compile_matcher()
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.odd").unwrap()
],
),
cx

View File

@ -1,5 +1,5 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::Result;
use anyhow::{Context, Result};
use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools;
@ -9,7 +9,7 @@ use smol::future::yield_now;
use std::{
io::{BufRead, BufReader, Read},
ops::Range,
path::Path,
path::{Path, PathBuf},
sync::Arc,
};
@ -20,8 +20,8 @@ pub enum SearchQuery {
query: Arc<str>,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
},
Regex {
regex: Regex,
@ -29,18 +29,43 @@ pub enum SearchQuery {
multiline: bool,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
},
}
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery {
pub fn text(
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Self {
let query = query.to_string();
let search = AhoCorasickBuilder::new()
@ -61,8 +86,8 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<GlobMatcher>,
files_to_exclude: Vec<GlobMatcher>,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
let mut query = query.to_string();
let initial_query = Arc::from(query.as_str());
@ -96,16 +121,16 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
deserialize_globs(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
)
} else {
Ok(Self::text(
message.query,
message.whole_word,
message.case_sensitive,
deserialize_globs(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
))
}
}
@ -120,12 +145,12 @@ impl SearchQuery {
files_to_include: self
.files_to_include()
.iter()
.map(|g| g.glob().to_string())
.map(|matcher| matcher.to_string())
.join(","),
files_to_exclude: self
.files_to_exclude()
.iter()
.map(|g| g.glob().to_string())
.map(|matcher| matcher.to_string())
.join(","),
}
}
@ -266,7 +291,7 @@ impl SearchQuery {
matches!(self, Self::Regex { .. })
}
pub fn files_to_include(&self) -> &[GlobMatcher] {
pub fn files_to_include(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_include, ..
@ -277,7 +302,7 @@ impl SearchQuery {
}
}
pub fn files_to_exclude(&self) -> &[GlobMatcher] {
pub fn files_to_exclude(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_exclude, ..
@ -306,11 +331,63 @@ impl SearchQuery {
}
}
fn deserialize_globs(glob_set: &str) -> Result<Vec<GlobMatcher>> {
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
glob_set
.split(',')
.map(str::trim)
.filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| Ok(Glob::new(glob_str)?.compile_matcher()))
.map(|glob_str| {
PathMatcher::new(glob_str)
.with_context(|| format!("deserializing path match glob {glob_str}"))
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn path_matcher_creation_for_valid_paths() {
for valid_path in [
"file",
"Cargo.toml",
".DS_Store",
"~/dir/another_dir/",
"./dir/file",
"dir/[a-z].txt",
"../dir/filé",
] {
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
panic!("Valid path {valid_path} should be accepted, but got: {e}")
});
assert!(
path_matcher.is_match(valid_path),
"Path matcher for valid path {valid_path} should match itself"
)
}
}
#[test]
fn path_matcher_creation_for_globs() {
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
match PathMatcher::new(invalid_glob) {
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
Err(_expected) => {}
}
}
for valid_glob in [
"dir/?ile",
"dir/*.txt",
"dir/**/file",
"dir/[a-z].txt",
"{dir,file}",
] {
match PathMatcher::new(valid_glob) {
Ok(_expected) => {}
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
}
}
}
}

View File

@ -2369,7 +2369,7 @@ impl BackgroundScannerState {
}
// Remove any git repositories whose .git entry no longer exists.
let mut snapshot = &mut self.snapshot;
let snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories);
let mut repository_entries = mem::take(&mut snapshot.repository_entries);
repositories.retain(|work_directory_id, _| {

View File

@ -115,6 +115,7 @@ actions!(
[
ExpandSelectedEntry,
CollapseSelectedEntry,
CollapseAllEntries,
NewDirectory,
NewFile,
Copy,
@ -140,6 +141,7 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
file_associations::init(assets, cx);
cx.add_action(ProjectPanel::expand_selected_entry);
cx.add_action(ProjectPanel::collapse_selected_entry);
cx.add_action(ProjectPanel::collapse_all_entries);
cx.add_action(ProjectPanel::select_prev);
cx.add_action(ProjectPanel::select_next);
cx.add_action(ProjectPanel::new_file);
@ -514,6 +516,12 @@ impl ProjectPanel {
}
}
pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext<Self>) {
self.expanded_dir_ids.clear();
self.update_visible_entries(None, cx);
cx.notify();
}
fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
@ -2678,6 +2686,63 @@ mod tests {
);
}
#[gpui::test]
async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/project_root",
json!({
"dir_1": {
"nested_dir": {
"file_a.py": "# File contents",
"file_b.py": "# File contents",
"file_c.py": "# File contents",
},
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
},
"dir_2": {
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
panel.update(cx, |panel, cx| {
panel.collapse_all_entries(&CollapseAllEntries, cx)
});
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&["v project_root", " > dir_1", " > dir_2",]
);
// Open dir_1 and make sure nested_dir was collapsed when running collapse_all_entries
toggle_expand_dir(&panel, "project_root/dir_1", cx);
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v project_root",
" v dir_1 <== selected",
" > nested_dir",
" file_1.py",
" file_2.py",
" file_3.py",
" > dir_2",
]
);
}
fn toggle_expand_dir(
panel: &ViewHandle<ProjectPanel>,
path: impl AsRef<Path>,
@ -2878,3 +2943,4 @@ mod tests {
});
}
}
// TODO - a workspace command?

View File

@ -1,6 +1,6 @@
use crate::{
SearchOptions, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive,
ToggleRegex, ToggleWholeWord,
NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectAllMatches,
SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
};
use collections::HashMap;
use editor::Editor;
@ -46,6 +46,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel);
cx.add_action(BufferSearchBar::next_history_query);
cx.add_action(BufferSearchBar::previous_history_query);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx);
@ -65,7 +67,7 @@ fn add_toggle_option_action<A: Action>(option: SearchOptions, cx: &mut AppContex
}
pub struct BufferSearchBar {
pub query_editor: ViewHandle<Editor>,
query_editor: ViewHandle<Editor>,
active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>,
@ -76,6 +78,7 @@ pub struct BufferSearchBar {
default_options: SearchOptions,
query_contains_error: bool,
dismissed: bool,
search_history: SearchHistory,
}
impl Entity for BufferSearchBar {
@ -106,6 +109,48 @@ impl View for BufferSearchBar {
.map(|active_searchable_item| active_searchable_item.supported_options())
.unwrap_or_default();
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes = cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
Flex::row()
.with_child(
Flex::row()
@ -258,6 +303,7 @@ impl BufferSearchBar {
pending_search: None,
query_contains_error: false,
dismissed: true,
search_history: SearchHistory::default(),
}
}
@ -341,7 +387,7 @@ impl BufferSearchBar {
cx: &mut ViewContext<Self>,
) -> oneshot::Receiver<()> {
let options = options.unwrap_or(self.default_options);
if query != self.query_editor.read(cx).text(cx) || self.search_options != options {
if query != self.query(cx) || self.search_options != options {
self.query_editor.update(cx, |query_editor, cx| {
query_editor.buffer().update(cx, |query_buffer, cx| {
let len = query_buffer.len(cx);
@ -674,7 +720,7 @@ impl BufferSearchBar {
fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> {
let (done_tx, done_rx) = oneshot::channel();
let query = self.query_editor.read(cx).text(cx);
let query = self.query(cx);
self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
if query.is_empty() {
@ -707,6 +753,7 @@ impl BufferSearchBar {
)
};
let query_text = query.as_str().to_string();
let matches = active_searchable_item.find_matches(query, cx);
let active_searchable_item = active_searchable_item.downgrade();
@ -720,6 +767,7 @@ impl BufferSearchBar {
.insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx);
this.search_history.add(query_text);
if !this.dismissed {
let matches = this
.searchable_items_with_matches
@ -753,6 +801,28 @@ impl BufferSearchBar {
cx.notify();
}
}
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(new_query) = self.search_history.next().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
} else {
self.search_history.reset_selection();
let _ = self.search("", Some(self.search_options), cx);
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if self.query(cx).is_empty() {
if let Some(new_query) = self.search_history.current().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
return;
}
}
if let Some(new_query) = self.search_history.previous().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
}
}
}
#[cfg(test)]
@ -1333,4 +1403,154 @@ mod tests {
);
});
}
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
crate::project_search::tests::init_test(cx);
let buffer_text = r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
pattern in text. Usually such patterns are used by string-searching algorithms
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = BufferSearchBar::new(cx);
search_bar.set_active_pane_item(Some(&editor), cx);
search_bar.show(cx);
search_bar
});
// Add 3 search items into the history.
search_bar
.update(cx, |search_bar, cx| search_bar.search("a", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| search_bar.search("b", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| {
search_bar.search("c", Some(SearchOptions::CASE_SENSITIVE), cx)
})
.await
.unwrap();
// Ensure that the latest search is active.
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar
.update(cx, |search_bar, cx| search_bar.search("ba", None, cx))
.await
.unwrap();
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
}
}

View File

@ -1,14 +1,14 @@
use crate::{
SearchOptions, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectNextMatch,
SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
};
use anyhow::Result;
use anyhow::Context;
use collections::HashMap;
use editor::{
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
SelectAll, MAX_TAB_TITLE_LEN,
};
use futures::StreamExt;
use globset::{Glob, GlobMatcher};
use gpui::color::Color;
use gpui::geometry::rect::RectF;
use gpui::geometry::vector::IntoVector2F;
@ -24,7 +24,10 @@ use gpui::{
use gpui::{scene::Path, LayoutContext};
use menu::Confirm;
use postage::stream::Stream;
use project::{search::SearchQuery, Entry, Project};
use project::{
search::{PathMatcher, SearchQuery},
Entry, Project,
};
use semantic_index::SemanticIndex;
use smallvec::SmallVec;
use std::{
@ -69,6 +72,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::cycle_mode);
cx.add_action(ProjectSearchBar::next_history_query);
cx.add_action(ProjectSearchBar::previous_history_query);
cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
@ -107,6 +112,7 @@ struct ProjectSearch {
match_ranges: Vec<Range<Anchor>>,
active_query: Option<SearchQuery>,
search_id: usize,
search_history: SearchHistory,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -228,6 +234,7 @@ impl ProjectSearch {
match_ranges: Default::default(),
active_query: None,
search_id: 0,
search_history: SearchHistory::default(),
}
}
@ -241,6 +248,7 @@ impl ProjectSearch {
match_ranges: self.match_ranges.clone(),
active_query: self.active_query.clone(),
search_id: self.search_id,
search_history: self.search_history.clone(),
})
}
@ -255,6 +263,7 @@ impl ProjectSearch {
.project
.update(cx, |project, cx| project.search(query.clone(), cx));
self.search_id += 1;
self.search_history.add(query.as_str().to_string());
self.active_query = Some(query);
self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
@ -290,27 +299,22 @@ impl ProjectSearch {
cx.notify();
}
fn semantic_search(
&mut self,
query: String,
include_files: Vec<GlobMatcher>,
exclude_files: Vec<GlobMatcher>,
cx: &mut ModelContext<Self>,
) {
fn semantic_search(&mut self, query: SearchQuery, cx: &mut ModelContext<Self>) {
let search = SemanticIndex::global(cx).map(|index| {
index.update(cx, |semantic_index, cx| {
semantic_index.search_project(
self.project.clone(),
query.clone(),
query.as_str().to_owned(),
10,
include_files,
exclude_files,
query.files_to_include().to_vec(),
query.files_to_exclude().to_vec(),
cx,
)
})
});
self.search_id += 1;
self.match_ranges.clear();
self.search_history.add(query.as_str().to_string());
self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?;
@ -415,6 +419,49 @@ impl View for ProjectSearchView {
],
};
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes =
cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
MouseEventHandler::<Status, _>::new(0, cx, |_, _| {
Flex::column()
.with_child(Flex::column().contained().flex(1., true))
@ -641,6 +688,9 @@ impl Item for ProjectSearchView {
}
impl ProjectSearchView {
fn toggle_search_option(&mut self, option: SearchOptions) {
self.search_options.toggle(option);
}
fn activate_search_mode(&mut self, mode: SearchMode, cx: &mut ViewContext<Self>) {
self.model.update(cx, |model, _| model.kill_search());
self.current_mode = mode;
@ -815,8 +865,7 @@ impl ProjectSearchView {
if !dir_entry.is_dir() {
return;
}
let filter_path = dir_entry.path.join("**");
let Some(filter_str) = filter_path.to_str() else { return; };
let Some(filter_str) = dir_entry.path.to_str() else { return; };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
@ -891,16 +940,13 @@ impl ProjectSearchView {
return;
}
let query = self.query_editor.read(cx).text(cx);
if let Some((included_files, exclude_files)) =
self.get_included_and_excluded_globsets(cx)
{
self.model.update(cx, |model, cx| {
model.semantic_search(query, included_files, exclude_files, cx)
});
if let Some(query) = self.build_search_query(cx) {
self.model
.update(cx, |model, cx| model.semantic_search(query, cx));
}
}
}
_ => {
if let Some(query) = self.build_search_query(cx) {
self.model.update(cx, |model, cx| model.search(query, cx));
@ -909,45 +955,10 @@ impl ProjectSearchView {
}
}
fn get_included_and_excluded_globsets(
&mut self,
cx: &mut ViewContext<Self>,
) -> Option<(Vec<GlobMatcher>, Vec<GlobMatcher>)> {
let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Include);
cx.notify();
return None;
}
};
let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Exclude);
cx.notify();
return None;
}
};
Some((included_files, excluded_files))
}
fn toggle_search_option(&mut self, option: SearchOptions) {
self.search_options.toggle(option);
self.semantic = None;
}
fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> {
let text = self.query_editor.read(cx).text(cx);
let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
match Self::parse_path_matches(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
@ -959,7 +970,7 @@ impl ProjectSearchView {
}
};
let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
match Self::parse_path_matches(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
@ -999,11 +1010,14 @@ impl ProjectSearchView {
}
}
fn load_glob_set(text: &str) -> Result<Vec<GlobMatcher>> {
fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
text.split(',')
.map(str::trim)
.filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| anyhow::Ok(Glob::new(glob_str)?.compile_matcher()))
.filter(|maybe_glob_str| !maybe_glob_str.is_empty())
.map(|maybe_glob_str| {
PathMatcher::new(maybe_glob_str)
.with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
})
.collect()
}
@ -1016,6 +1030,7 @@ impl ProjectSearchView {
let range_to_select = match_ranges[new_index].clone();
self.results_editor.update(cx, |editor, cx| {
let range_to_select = editor.range_for_match(&range_to_select);
editor.unfold_ranges([range_to_select.clone()], false, true, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range_to_select])
@ -1057,8 +1072,12 @@ impl ProjectSearchView {
let is_new_search = self.search_id != prev_search_id;
self.results_editor.update(cx, |editor, cx| {
if is_new_search {
let range_to_select = match_ranges
.first()
.clone()
.map(|range| editor.range_for_match(range));
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges(match_ranges.first().cloned())
s.select_ranges(range_to_select)
});
}
editor.highlight_background::<Self>(
@ -1597,6 +1616,47 @@ impl ProjectSearchBar {
false
}
}
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
let new_query = search_view.model.update(cx, |model, _| {
if let Some(new_query) = model.search_history.next().map(str::to_string) {
new_query
} else {
model.search_history.reset_selection();
String::new()
}
});
search_view.set_query(&new_query, cx);
});
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
if search_view.query_editor.read(cx).text(cx).is_empty() {
if let Some(new_query) = search_view
.model
.read(cx)
.search_history
.current()
.map(str::to_string)
{
search_view.set_query(&new_query, cx);
return;
}
}
if let Some(new_query) = search_view.model.update(cx, |model, _| {
model.search_history.previous().map(str::to_string)
}) {
search_view.set_query(&new_query, cx);
}
});
}
}
}
impl Entity for ProjectSearchBar {
@ -1869,6 +1929,7 @@ pub mod tests {
use editor::DisplayPoint;
use gpui::{color::Color, executor::Deterministic, TestAppContext};
use project::FakeFs;
use semantic_index::semantic_index_settings::SemanticIndexSettings;
use serde_json::json;
use settings::SettingsStore;
use std::sync::Arc;
@ -2270,7 +2331,7 @@ pub mod tests {
search_view.included_files_editor.update(cx, |editor, cx| {
assert_eq!(
editor.display_text(cx),
a_dir_entry.path.join("**").display().to_string(),
a_dir_entry.path.to_str().unwrap(),
"New search in directory should have included dir entry path"
);
});
@ -2294,6 +2355,192 @@ pub mod tests {
});
}
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
let search_view = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
.expect("Search view expected to appear after new search event trigger")
});
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = ProjectSearchBar::new();
search_bar.set_active_pane_item(Some(&search_view), cx);
// search_bar.show(cx);
search_bar
});
// Add 3 search items into the history + another unsubmitted one.
search_view.update(cx, |search_view, cx| {
search_view.search_options = SearchOptions::CASE_SENSITIVE;
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("ONE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("THREE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view.query_editor.update(cx, |query_editor, cx| {
query_editor.set_text("JUST_TEXT_INPUT", cx)
});
});
cx.foreground().run_until_parked();
// Ensure that the latest input with search settings is active.
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view.query_editor.read(cx).text(cx),
"JUST_TEXT_INPUT"
);
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest submitted one.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO_NEW", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
}
pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let fonts = cx.font_cache();
@ -2303,6 +2550,7 @@ pub mod tests {
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
cx.set_global(ActiveSearches::default());
settings::register::<SemanticIndexSettings>(cx);
theme::init((), cx);
cx.update_global::<SettingsStore, _, _>(|store, _| {

View File

@ -3,6 +3,7 @@ pub use buffer_search::BufferSearchBar;
use gpui::{actions, Action, AppContext};
use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView};
use smallvec::SmallVec;
pub mod buffer_search;
pub mod project_search;
@ -21,6 +22,8 @@ actions!(
SelectNextMatch,
SelectPrevMatch,
SelectAllMatches,
NextHistoryQuery,
PreviousHistoryQuery,
]
);
@ -65,3 +68,187 @@ impl SearchOptions {
options
}
}
const SEARCH_HISTORY_LIMIT: usize = 20;
#[derive(Default, Debug, Clone)]
pub struct SearchHistory {
history: SmallVec<[String; SEARCH_HISTORY_LIMIT]>,
selected: Option<usize>,
}
impl SearchHistory {
pub fn add(&mut self, search_string: String) {
if let Some(i) = self.selected {
if search_string == self.history[i] {
return;
}
}
if let Some(previously_searched) = self.history.last_mut() {
if search_string.find(previously_searched.as_str()).is_some() {
*previously_searched = search_string;
self.selected = Some(self.history.len() - 1);
return;
}
}
self.history.push(search_string);
if self.history.len() > SEARCH_HISTORY_LIMIT {
self.history.remove(0);
}
self.selected = Some(self.history.len() - 1);
}
pub fn next(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let selected = self.selected?;
if selected == history_size - 1 {
return None;
}
let next_index = selected + 1;
self.selected = Some(next_index);
Some(&self.history[next_index])
}
pub fn current(&self) -> Option<&str> {
Some(&self.history[self.selected?])
}
pub fn previous(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let prev_index = match self.selected {
Some(selected_index) => {
if selected_index == 0 {
return None;
} else {
selected_index - 1
}
}
None => history_size - 1,
};
self.selected = Some(prev_index);
Some(&self.history[prev_index])
}
pub fn reset_selection(&mut self) {
self.selected = None;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.current(),
None,
"No current selection should be set fo the default search history"
);
search_history.add("rust".to_string());
assert_eq!(
search_history.current(),
Some("rust"),
"Newly added item should be selected"
);
// check if duplicates are not added
search_history.add("rust".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should not add a duplicate"
);
assert_eq!(search_history.current(), Some("rust"));
// check if new string containing the previous string replaces it
search_history.add("rustlang".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should replace previous item if it's a substring"
);
assert_eq!(search_history.current(), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..SEARCH_HISTORY_LIMIT * 2 {
search_history.add(format!("item{i}"));
}
assert!(search_history.history.len() <= SEARCH_HISTORY_LIMIT);
}
#[test]
fn test_next_and_previous() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.next(),
None,
"Default search history should not have a next item"
);
search_history.add("Rust".to_string());
assert_eq!(search_history.next(), None);
search_history.add("JavaScript".to_string());
assert_eq!(search_history.next(), None);
search_history.add("TypeScript".to_string());
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.previous(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.previous(), Some("Rust"));
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.previous(), None);
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.next(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.next(), Some("TypeScript"));
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
}
#[test]
fn test_reset_selection() {
let mut search_history = SearchHistory::default();
search_history.add("Rust".to_string());
search_history.add("JavaScript".to_string());
search_history.add("TypeScript".to_string());
assert_eq!(search_history.current(), Some("TypeScript"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
assert_eq!(
search_history.previous(),
Some("TypeScript"),
"Should start from the end after reset on previous item query"
);
search_history.previous();
assert_eq!(search_history.current(), Some("JavaScript"));
search_history.previous();
assert_eq!(search_history.current(), Some("Rust"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
}
}

View File

@ -54,9 +54,12 @@ tempdir.workspace = true
ctor.workspace = true
env_logger.workspace = true
tree-sitter-typescript = "*"
tree-sitter-json = "*"
tree-sitter-rust = "*"
tree-sitter-toml = "*"
tree-sitter-cpp = "*"
tree-sitter-elixir = "*"
tree-sitter-typescript.workspace = true
tree-sitter-json.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-toml.workspace = true
tree-sitter-cpp.workspace = true
tree-sitter-elixir.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-ruby.workspace = true
tree-sitter-php.workspace = true

View File

@ -1,7 +1,6 @@
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
use anyhow::{anyhow, Context, Result};
use globset::GlobMatcher;
use project::Fs;
use project::{search::PathMatcher, Fs};
use rpc::proto::Timestamp;
use rusqlite::{
params,
@ -290,8 +289,8 @@ impl VectorDatabase {
pub fn retrieve_included_file_ids(
&self,
worktree_ids: &[i64],
include_globs: Vec<GlobMatcher>,
exclude_globs: Vec<GlobMatcher>,
includes: &[PathMatcher],
excludes: &[PathMatcher],
) -> Result<Vec<i64>> {
let mut file_query = self.db.prepare(
"
@ -310,13 +309,9 @@ impl VectorDatabase {
while let Some(row) = rows.next()? {
let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?;
let included = include_globs.is_empty()
|| include_globs
.iter()
.any(|glob| glob.is_match(relative_path));
let excluded = exclude_globs
.iter()
.any(|glob| glob.is_match(relative_path));
let included =
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
if included && !excluded {
file_ids.push(file_id);
}

View File

@ -21,7 +21,9 @@ const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const ENTIRE_FILE_TEMPLATE: &str =
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &["TOML", "YAML", "CSS"];
const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] =
&["TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML"];
pub struct CodeContextRetriever {
pub parser: Parser,
@ -59,7 +61,7 @@ impl CodeContextRetriever {
let document_span = ENTIRE_FILE_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<language>", language_name.as_ref())
.replace("item", &content);
.replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
@ -69,6 +71,19 @@ impl CodeContextRetriever {
}])
}
fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Document>> {
let document_span = MARKDOWN_CONTEXT_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: "Markdown".to_string(),
}])
}
fn get_matches_in_file(
&mut self,
content: &str,
@ -135,6 +150,8 @@ impl CodeContextRetriever {
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
return self.parse_entire_file(relative_path, language_name, &content);
} else if &language_name.to_string() == &"Markdown".to_string() {
return self.parse_markdown_file(relative_path, &content);
}
let mut documents = self.parse_file(content, language)?;
@ -200,7 +217,12 @@ impl CodeContextRetriever {
let mut document_content = String::new();
for context_range in &context_match.context_ranges {
document_content.push_str(&content[context_range.clone()]);
add_content_from_range(
&mut document_content,
content,
context_range.clone(),
context_match.start_col,
);
document_content.push_str("\n");
}

View File

@ -11,13 +11,12 @@ use anyhow::{anyhow, Result};
use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future};
use globset::GlobMatcher;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
use project::{Fs, Project, WorktreeId};
use project::{search::PathMatcher, Fs, Project, WorktreeId};
use smol::channel;
use std::{
cmp::Ordering,
@ -613,6 +612,7 @@ impl SemanticIndex {
.await
{
if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
&& &language.name().as_ref() != &"Markdown"
&& language
.grammar()
.and_then(|grammar| grammar.embedding_config.as_ref())
@ -682,8 +682,8 @@ impl SemanticIndex {
project: ModelHandle<Project>,
phrase: String,
limit: usize,
include_globs: Vec<GlobMatcher>,
exclude_globs: Vec<GlobMatcher>,
includes: Vec<PathMatcher>,
excludes: Vec<PathMatcher>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
@ -714,11 +714,8 @@ impl SemanticIndex {
.next()
.unwrap();
let file_ids = database.retrieve_included_file_ids(
&worktree_db_ids,
include_globs,
exclude_globs,
)?;
let file_ids =
database.retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)?;
let batch_n = cx.background().num_cpus();
let ids_len = file_ids.clone().len();

View File

@ -7,11 +7,10 @@ use crate::{
};
use anyhow::Result;
use async_trait::async_trait;
use globset::Glob;
use gpui::{Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use pretty_assertions::assert_eq;
use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng};
use serde_json::json;
use settings::SettingsStore;
@ -121,8 +120,8 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
);
// Test Include Files Functonality
let include_files = vec![Glob::new("*.rs").unwrap().compile_matcher()];
let exclude_files = vec![Glob::new("*.rs").unwrap().compile_matcher()];
let include_files = vec![PathMatcher::new("*.rs").unwrap()];
let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
let rust_only_search_results = store
.update(cx, |store, cx| {
store.search_project(
@ -486,6 +485,79 @@ async fn test_code_context_retrieval_javascript() {
)
}
#[gpui::test]
async fn test_code_context_retrieval_lua() {
let language = lua_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
-- Creates a new class
-- @param baseclass The Baseclass of this class, or nil.
-- @return A new class reference.
function classes.class(baseclass)
-- Create the class definition and metatable.
local classdef = {}
-- Find the super class, either Object or user-defined.
baseclass = baseclass or classes.Object
-- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
setmetatable(classdef, { __index = baseclass })
-- All class instances have a reference to the class object.
classdef.class = classdef
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
-- All class instances have a reference to a superclass object.
local instance = { super = baseclass.alloc(mastertable) }
-- Any functions this instance does not know of will 'look up' to the superclass definition.
setmetatable(instance, { __index = classdef, __newindex = mastertable })
return instance
end
end
"#.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(r#"
-- Creates a new class
-- @param baseclass The Baseclass of this class, or nil.
-- @return A new class reference.
function classes.class(baseclass)
-- Create the class definition and metatable.
local classdef = {}
-- Find the super class, either Object or user-defined.
baseclass = baseclass or classes.Object
-- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
setmetatable(classdef, { __index = baseclass })
-- All class instances have a reference to the class object.
classdef.class = classdef
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
--[ ... ]--
--[ ... ]--
end
end"#.unindent(),
114),
(r#"
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
-- All class instances have a reference to a superclass object.
local instance = { super = baseclass.alloc(mastertable) }
-- Any functions this instance does not know of will 'look up' to the superclass definition.
setmetatable(instance, { __index = classdef, __newindex = mastertable })
return instance
end"#.unindent(), 809),
]
);
}
#[gpui::test]
async fn test_code_context_retrieval_elixir() {
let language = elixir_lang();
@ -754,6 +826,346 @@ async fn test_code_context_retrieval_cpp() {
);
}
#[gpui::test]
async fn test_code_context_retrieval_ruby() {
let language = ruby_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
# This concern is inspired by "sudo mode" on GitHub. It
# is a way to re-authenticate a user before allowing them
# to see or perform an action.
#
# Add `before_action :require_challenge!` to actions you
# want to protect.
#
# The user will be shown a page to enter the challenge (which
# is either the password, or just the username when no
# password exists). Upon passing, there is a grace period
# during which no challenge will be asked from the user.
#
# Accessing challenge-protected resources during the grace
# period will refresh the grace period.
module ChallengableConcern
extend ActiveSupport::Concern
CHALLENGE_TIMEOUT = 1.hour.freeze
def require_challenge!
return if skip_challenge?
if challenge_passed_recently?
session[:challenge_passed_at] = Time.now.utc
return
end
@challenge = Form::Challenge.new(return_to: request.url)
if params.key?(:form_challenge)
if challenge_passed?
session[:challenge_passed_at] = Time.now.utc
else
flash.now[:alert] = I18n.t('challenge.invalid_password')
render_challenge
end
else
render_challenge
end
end
def challenge_passed?
current_user.valid_password?(challenge_params[:current_password])
end
end
class Animal
include Comparable
attr_reader :legs
def initialize(name, legs)
@name, @legs = name, legs
end
def <=>(other)
legs <=> other.legs
end
end
# Singleton method for car object
def car.wheels
puts "There are four wheels"
end"#
.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(
r#"
# This concern is inspired by "sudo mode" on GitHub. It
# is a way to re-authenticate a user before allowing them
# to see or perform an action.
#
# Add `before_action :require_challenge!` to actions you
# want to protect.
#
# The user will be shown a page to enter the challenge (which
# is either the password, or just the username when no
# password exists). Upon passing, there is a grace period
# during which no challenge will be asked from the user.
#
# Accessing challenge-protected resources during the grace
# period will refresh the grace period.
module ChallengableConcern
extend ActiveSupport::Concern
CHALLENGE_TIMEOUT = 1.hour.freeze
def require_challenge!
# ...
end
def challenge_passed?
# ...
end
end"#
.unindent(),
558,
),
(
r#"
def require_challenge!
return if skip_challenge?
if challenge_passed_recently?
session[:challenge_passed_at] = Time.now.utc
return
end
@challenge = Form::Challenge.new(return_to: request.url)
if params.key?(:form_challenge)
if challenge_passed?
session[:challenge_passed_at] = Time.now.utc
else
flash.now[:alert] = I18n.t('challenge.invalid_password')
render_challenge
end
else
render_challenge
end
end"#
.unindent(),
663,
),
(
r#"
def challenge_passed?
current_user.valid_password?(challenge_params[:current_password])
end"#
.unindent(),
1254,
),
(
r#"
class Animal
include Comparable
attr_reader :legs
def initialize(name, legs)
# ...
end
def <=>(other)
# ...
end
end"#
.unindent(),
1363,
),
(
r#"
def initialize(name, legs)
@name, @legs = name, legs
end"#
.unindent(),
1427,
),
(
r#"
def <=>(other)
legs <=> other.legs
end"#
.unindent(),
1501,
),
(
r#"
# Singleton method for car object
def car.wheels
puts "There are four wheels"
end"#
.unindent(),
1591,
),
],
);
}
#[gpui::test]
async fn test_code_context_retrieval_php() {
let language = php_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
<?php
namespace LevelUp\Experience\Concerns;
/*
This is a multiple-lines comment block
that spans over multiple
lines
*/
function functionName() {
echo "Hello world!";
}
trait HasAchievements
{
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{
if ($progress > 100) {
throw new Exception(message: 'Progress cannot be greater than 100');
}
if ($this->achievements()->find($achievement->id)) {
throw new Exception(message: 'User already has this Achievement');
}
$this->achievements()->attach($achievement, [
'progress' => $progress ?? null,
]);
$this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
}
public function achievements(): BelongsToMany
{
return $this->belongsToMany(related: Achievement::class)
->withPivot(columns: 'progress')
->where('is_secret', false)
->using(AchievementUser::class);
}
}
interface Multiplier
{
public function qualifies(array $data): bool;
public function setMultiplier(): int;
}
enum AuditType: string
{
case Add = 'add';
case Remove = 'remove';
case Reset = 'reset';
case LevelUp = 'level_up';
}
?>"#
.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(
r#"
/*
This is a multiple-lines comment block
that spans over multiple
lines
*/
function functionName() {
echo "Hello world!";
}"#
.unindent(),
123,
),
(
r#"
trait HasAchievements
{
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{/* ... */}
public function achievements(): BelongsToMany
{/* ... */}
}"#
.unindent(),
177,
),
(r#"
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{
if ($progress > 100) {
throw new Exception(message: 'Progress cannot be greater than 100');
}
if ($this->achievements()->find($achievement->id)) {
throw new Exception(message: 'User already has this Achievement');
}
$this->achievements()->attach($achievement, [
'progress' => $progress ?? null,
]);
$this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
}"#.unindent(), 245),
(r#"
public function achievements(): BelongsToMany
{
return $this->belongsToMany(related: Achievement::class)
->withPivot(columns: 'progress')
->where('is_secret', false)
->using(AchievementUser::class);
}"#.unindent(), 902),
(r#"
interface Multiplier
{
public function qualifies(array $data): bool;
public function setMultiplier(): int;
}"#.unindent(),
1146),
(r#"
enum AuditType: string
{
case Add = 'add';
case Remove = 'remove';
case Reset = 'reset';
case LevelUp = 'level_up';
}"#.unindent(), 1265)
],
);
}
#[gpui::test]
fn test_dot_product(mut rng: StdRng) {
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
@ -1084,6 +1496,131 @@ fn cpp_lang() -> Arc<Language> {
)
}
fn lua_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "Lua".into(),
path_suffixes: vec!["lua".into()],
collapsed_placeholder: "--[ ... ]--".to_string(),
..Default::default()
},
Some(tree_sitter_lua::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
(function_declaration
"function" @name
name: (_) @name
(comment)* @collapse
body: (block) @collapse
) @item
)
"#,
)
.unwrap(),
)
}
fn php_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "PHP".into(),
path_suffixes: vec!["php".into()],
collapsed_placeholder: "/* ... */".into(),
..Default::default()
},
Some(tree_sitter_php::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
[
(function_definition
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(trait_declaration
"trait" @name
name: (_) @name)
(method_declaration
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(interface_declaration
"interface" @name
name: (_) @name
)
(enum_declaration
"enum" @name
name: (_) @name
)
] @item
)
"#,
)
.unwrap(),
)
}
fn ruby_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "Ruby".into(),
path_suffixes: vec!["rb".into()],
collapsed_placeholder: "# ...".to_string(),
..Default::default()
},
Some(tree_sitter_ruby::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
[
(module
"module" @name
name: (_) @name)
(method
"def" @name
name: (_) @name
body: (body_statement) @collapse)
(class
"class" @name
name: (_) @name)
(singleton_method
"def" @name
object: (_) @name
"." @name
name: (_) @name
body: (body_statement) @collapse)
] @item
)
"#,
)
.unwrap(),
)
}
fn elixir_lang() -> Arc<Language> {
Arc::new(
Language::new(

View File

@ -202,7 +202,7 @@ where
self.position = D::default();
}
let mut entry = self.stack.last_mut().unwrap();
let entry = self.stack.last_mut().unwrap();
if !descending {
if entry.index == 0 {
self.stack.pop();
@ -438,6 +438,7 @@ where
} => {
if ascending {
entry.index += 1;
entry.position = self.position.clone();
}
for (child_tree, child_summary) in child_trees[entry.index..]

View File

@ -738,7 +738,7 @@ mod tests {
for _ in 0..num_operations {
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let splice_start = rng.gen_range(0..splice_end + 1);
let count = rng.gen_range(0..3);
let count = rng.gen_range(0..10);
let tree_end = tree.extent::<Count>(&());
let new_items = rng
.sample_iter(distributions::Standard)
@ -805,10 +805,12 @@ mod tests {
}
assert_eq!(filter_cursor.item(), None);
let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let mut before_start = false;
let mut cursor = tree.cursor::<Count>();
cursor.seek(&Count(pos), Bias::Right, &());
let start_pos = rng.gen_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right, &());
let mut pos = rng.gen_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right, &());
for i in 0..10 {
assert_eq!(cursor.start().0, pos);

View File

@ -16,7 +16,7 @@ db = { path = "../db" }
theme = { path = "../theme" }
util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" }
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "7b9f32300ee0a249c0872302c97635b460e45ba5" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
smallvec.workspace = true
smol.workspace = true

View File

@ -114,11 +114,7 @@ fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
//Convenience method to convert from a GPUI color to an alacritty Rgb
pub fn to_alac_rgb(color: Color) -> AlacRgb {
AlacRgb {
r: color.r,
g: color.g,
b: color.g,
}
AlacRgb::new(color.r, color.g, color.g)
}
#[cfg(test)]

View File

@ -1,20 +1,64 @@
use gpui::{elements::Label, AnyElement, Element, Entity, View, ViewContext};
use gpui::{
elements::{Empty, Label},
AnyElement, Element, Entity, Subscription, View, ViewContext,
};
use settings::SettingsStore;
use workspace::{item::ItemHandle, StatusItemView};
use crate::state::Mode;
use crate::{state::Mode, Vim, VimEvent, VimModeSetting};
pub struct ModeIndicator {
pub mode: Mode,
pub mode: Option<Mode>,
_subscription: Subscription,
}
impl ModeIndicator {
pub fn new(mode: Mode) -> Self {
Self { mode }
pub fn new(cx: &mut ViewContext<Self>) -> Self {
let handle = cx.handle().downgrade();
let _subscription = cx.subscribe_global::<VimEvent, _>(move |&event, cx| {
if let Some(mode_indicator) = handle.upgrade(cx) {
match event {
VimEvent::ModeChanged { mode } => {
cx.update_window(mode_indicator.window_id(), |cx| {
mode_indicator.update(cx, move |mode_indicator, cx| {
mode_indicator.set_mode(mode, cx);
})
});
}
}
}
});
cx.observe_global::<SettingsStore, _>(move |mode_indicator, cx| {
if settings::get::<VimModeSetting>(cx).0 {
mode_indicator.mode = cx
.has_global::<Vim>()
.then(|| cx.global::<Vim>().state.mode);
} else {
mode_indicator.mode.take();
}
})
.detach();
// Vim doesn't exist in some tests
let mode = cx
.has_global::<Vim>()
.then(|| {
let vim = cx.global::<Vim>();
vim.enabled.then(|| vim.state.mode)
})
.flatten();
Self {
mode,
_subscription,
}
}
pub fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext<Self>) {
if mode != self.mode {
self.mode = mode;
if self.mode != Some(mode) {
self.mode = Some(mode);
cx.notify();
}
}
@ -30,11 +74,16 @@ impl View for ModeIndicator {
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let Some(mode) = self.mode.as_ref() else {
return Empty::new().into_any();
};
let theme = &theme::current(cx).workspace.status_bar;
// we always choose text to be 12 monospace characters
// so that as the mode indicator changes, the rest of the
// UI stays still.
let text = match self.mode {
let text = match mode {
Mode::Normal => "-- NORMAL --",
Mode::Insert => "-- INSERT --",
Mode::Visual { line: false } => "-- VISUAL --",

View File

@ -93,7 +93,7 @@ fn search_submit(workspace: &mut Workspace, _: &SearchSubmit, cx: &mut ViewConte
pane.update(cx, |pane, cx| {
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
search_bar.update(cx, |search_bar, cx| {
let mut state = &mut vim.state.search;
let state = &mut vim.state.search;
let mut count = state.count;
// in the case that the query has changed, the search bar
@ -222,7 +222,7 @@ mod test {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
assert_eq!(bar.query(cx), "cc");
});
deterministic.run_until_parked();

View File

@ -99,7 +99,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "");
assert_eq!(bar.query(cx), "");
})
}
@ -175,7 +175,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
assert_eq!(bar.query(cx), "cc");
});
// wait for the query editor change event to fire.
@ -215,7 +215,7 @@ async fn test_status_indicator(
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Normal
Some(Mode::Normal)
);
// shows the correct mode
@ -223,7 +223,7 @@ async fn test_status_indicator(
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Insert
Some(Mode::Insert)
);
// shows even in search
@ -231,7 +231,7 @@ async fn test_status_indicator(
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Visual { line: false }
Some(Mode::Visual { line: false })
);
// hides if vim mode is disabled
@ -239,15 +239,15 @@ async fn test_status_indicator(
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>();
assert!(mode_indicator.is_none());
let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.read(cx).mode.is_none());
});
cx.enable_vim();
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>();
assert!(mode_indicator.is_some());
let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.read(cx).mode.is_some());
});
}

View File

@ -43,6 +43,10 @@ impl<'a> VimTestContext<'a> {
toolbar.add_item(project_search_bar, cx);
})
});
workspace.status_bar().update(cx, |status_bar, cx| {
let vim_mode_indicator = cx.add_view(ModeIndicator::new);
status_bar.add_right_item(vim_mode_indicator, cx);
});
});
Self { cx }

View File

@ -43,6 +43,11 @@ struct Number(u8);
actions!(vim, [Tab, Enter]);
impl_actions!(vim, [Number, SwitchMode, PushOperator]);
#[derive(Copy, Clone, Debug)]
enum VimEvent {
ModeChanged { mode: Mode },
}
pub fn init(cx: &mut AppContext) {
settings::register::<VimModeSetting>(cx);
@ -121,8 +126,6 @@ pub fn observe_keystrokes(cx: &mut WindowContext) {
pub struct Vim {
active_editor: Option<WeakViewHandle<Editor>>,
editor_subscription: Option<Subscription>,
mode_indicator: Option<ViewHandle<ModeIndicator>>,
enabled: bool,
state: VimState,
}
@ -181,9 +184,7 @@ impl Vim {
self.state.mode = mode;
self.state.operator_stack.clear();
if let Some(mode_indicator) = &self.mode_indicator {
mode_indicator.update(cx, |mode_indicator, cx| mode_indicator.set_mode(mode, cx))
}
cx.emit_global(VimEvent::ModeChanged { mode });
// Sync editor settings like clip mode
self.sync_vim_settings(cx);
@ -271,44 +272,6 @@ impl Vim {
}
}
fn sync_mode_indicator(cx: &mut WindowContext) {
let Some(workspace) = cx.root_view()
.downcast_ref::<Workspace>()
.map(|workspace| workspace.downgrade()) else {
return;
};
cx.spawn(|mut cx| async move {
workspace.update(&mut cx, |workspace, cx| {
Vim::update(cx, |vim, cx| {
workspace.status_bar().update(cx, |status_bar, cx| {
let current_position = status_bar.position_of_item::<ModeIndicator>();
if vim.enabled && current_position.is_none() {
if vim.mode_indicator.is_none() {
vim.mode_indicator =
Some(cx.add_view(|_| ModeIndicator::new(vim.state.mode)));
};
let mode_indicator = vim.mode_indicator.as_ref().unwrap();
let position = status_bar
.position_of_item::<language_selector::ActiveBufferLanguage>();
if let Some(position) = position {
status_bar.insert_item_after(position, mode_indicator.clone(), cx)
} else {
status_bar.add_left_item(mode_indicator.clone(), cx)
}
} else if !vim.enabled {
if let Some(position) = current_position {
status_bar.remove_item_at(position, cx)
}
}
})
})
})
})
.detach_and_log_err(cx);
}
fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) {
if self.enabled != enabled {
self.enabled = enabled;
@ -359,8 +322,6 @@ impl Vim {
self.unhook_vim_settings(editor, cx);
}
});
Vim::sync_mode_indicator(cx);
}
fn unhook_vim_settings(&self, editor: &mut Editor, cx: &mut ViewContext<Editor>) {

View File

@ -746,6 +746,10 @@ impl Pane {
_: &CloseAllItems,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
if self.items.is_empty() {
return None;
}
Some(self.close_items(cx, move |_| true))
}

View File

@ -122,6 +122,7 @@ actions!(
NewFile,
NewWindow,
CloseWindow,
CloseInactiveTabsAndPanes,
AddFolderToProject,
Unfollow,
Save,
@ -240,6 +241,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
cx.add_async_action(Workspace::follow_next_collaborator);
cx.add_async_action(Workspace::close);
cx.add_async_action(Workspace::close_inactive_items_and_panes);
cx.add_global_action(Workspace::close_global);
cx.add_global_action(restart);
cx.add_async_action(Workspace::save_all);
@ -1671,6 +1673,45 @@ impl Workspace {
}
}
pub fn close_inactive_items_and_panes(
&mut self,
_: &CloseInactiveTabsAndPanes,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
let current_pane = self.active_pane();
let mut tasks = Vec::new();
if let Some(current_pane_close) = current_pane.update(cx, |pane, cx| {
pane.close_inactive_items(&CloseInactiveItems, cx)
}) {
tasks.push(current_pane_close);
};
for pane in self.panes() {
if pane.id() == current_pane.id() {
continue;
}
if let Some(close_pane_items) = pane.update(cx, |pane: &mut Pane, cx| {
pane.close_all_items(&CloseAllItems, cx)
}) {
tasks.push(close_pane_items)
}
}
if tasks.is_empty() {
None
} else {
Some(cx.spawn(|_, _| async move {
for task in tasks {
task.await?
}
Ok(())
}))
}
}
pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) {
let dock = match dock_side {
DockPosition::Left => &self.left_dock,

View File

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
version = "0.98.0"
version = "0.99.0"
publish = false
[lib]
@ -128,6 +128,7 @@ tree-sitter-svelte.workspace = true
tree-sitter-racket.workspace = true
tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-nix.workspace = true
url = "2.2"
urlencoding = "2.1.2"

View File

@ -152,8 +152,10 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
);
language("elm", tree_sitter_elm::language(), vec![]);
language("glsl", tree_sitter_glsl::language(), vec![]);
language("nix", tree_sitter_nix::language(), vec![]);
}
#[cfg(any(test, feature = "test-support"))]

View File

@ -7,3 +7,4 @@ brackets = [
{ start = "[", end = "]", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
]
collapsed_placeholder = "--[ ... ]--"

View File

@ -0,0 +1,10 @@
(
(comment)* @context
.
(function_declaration
"function" @name
name: (_) @name
(comment)* @collapse
body: (block) @collapse
) @item
)

View File

@ -0,0 +1,11 @@
name = "Nix"
path_suffixes = ["nix"]
line_comment = "# "
block_comment = ["/* ", " */"]
autoclose_before = ";:.,=}])>` \n\t\""
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = true, newline = true },
]

View File

@ -0,0 +1,95 @@
(comment) @comment
[
"if"
"then"
"else"
"let"
"inherit"
"in"
"rec"
"with"
"assert"
"or"
] @keyword
[
(string_expression)
(indented_string_expression)
] @string
[
(path_expression)
(hpath_expression)
(spath_expression)
] @string.special.path
(uri_expression) @link_uri
[
(integer_expression)
(float_expression)
] @number
(interpolation
"${" @punctuation.special
"}" @punctuation.special) @embedded
(escape_sequence) @escape
(dollar_escape) @escape
(function_expression
universal: (identifier) @parameter
)
(formal
name: (identifier) @parameter
"?"? @punctuation.delimiter)
(select_expression
attrpath: (attrpath (identifier)) @property)
(apply_expression
function: [
(variable_expression (identifier)) @function
(select_expression
attrpath: (attrpath
attr: (identifier) @function .))])
(unary_expression
operator: _ @operator)
(binary_expression
operator: _ @operator)
(variable_expression (identifier) @variable)
(binding
attrpath: (attrpath (identifier)) @property)
"=" @operator
[
";"
"."
","
] @punctuation.delimiter
[
"("
")"
"["
"]"
"{"
"}"
] @punctuation.bracket
(identifier) @variable
((identifier) @function.builtin
(#match? @function.builtin "^(__add|__addErrorContext|__all|__any|__appendContext|__attrNames|__attrValues|__bitAnd|__bitOr|__bitXor|__catAttrs|__compareVersions|__concatLists|__concatMap|__concatStringsSep|__deepSeq|__div|__elem|__elemAt|__fetchurl|__filter|__filterSource|__findFile|__foldl'|__fromJSON|__functionArgs|__genList|__genericClosure|__getAttr|__getContext|__getEnv|__hasAttr|__hasContext|__hashFile|__hashString|__head|__intersectAttrs|__isAttrs|__isBool|__isFloat|__isFunction|__isInt|__isList|__isPath|__isString|__langVersion|__length|__lessThan|__listToAttrs|__mapAttrs|__match|__mul|__parseDrvName|__partition|__path|__pathExists|__readDir|__readFile|__replaceStrings|__seq|__sort|__split|__splitVersion|__storePath|__stringLength|__sub|__substring|__tail|__toFile|__toJSON|__toPath|__toXML|__trace|__tryEval|__typeOf|__unsafeDiscardOutputDependency|__unsafeDiscardStringContext|__unsafeGetAttrPos|__valueSize|abort|baseNameOf|derivation|derivationStrict|dirOf|fetchGit|fetchMercurial|fetchTarball|fromTOML|import|isNull|map|placeholder|removeAttrs|scopedImport|throw|toString)$")
(#is-not? local))
((identifier) @variable.builtin
(#match? @variable.builtin "^(__currentSystem|__currentTime|__nixPath|__nixVersion|__storeDir|builtins|false|null|true)$")
(#is-not? local))

View File

@ -9,3 +9,4 @@ brackets = [
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
]
collapsed_placeholder = "/* ... */"

View File

@ -0,0 +1,36 @@
(
(comment)* @context
.
[
(function_definition
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(trait_declaration
"trait" @name
name: (_) @name)
(method_declaration
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(interface_declaration
"interface" @name
name: (_) @name
)
(enum_declaration
"enum" @name
name: (_) @name
)
] @item
)

View File

@ -8,8 +8,6 @@
name: (_) @name
) @item
(method_declaration
"function" @context
name: (_) @name
@ -24,3 +22,8 @@
"enum" @context
name: (_) @name
) @item
(trait_declaration
"trait" @context
name: (_) @name
) @item

View File

@ -10,3 +10,4 @@ brackets = [
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] },
]
collapsed_placeholder = "# ..."

View File

@ -0,0 +1,22 @@
(
(comment)* @context
.
[
(module
"module" @name
name: (_) @name)
(method
"def" @name
name: (_) @name
body: (body_statement) @collapse)
(class
"class" @name
name: (_) @name)
(singleton_method
"def" @name
object: (_) @name
"." @name
name: (_) @name
body: (body_statement) @collapse)
] @item
)

View File

@ -45,6 +45,7 @@ use std::{
use sum_tree::Bias;
use terminal_view::{get_working_directory, TerminalSettings, TerminalView};
use util::{
channel::ReleaseChannel,
http::{self, HttpClient},
paths::PathLikeWithPosition,
};
@ -136,7 +137,7 @@ fn main() {
languages.set_executor(cx.background().clone());
languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
let languages = Arc::new(languages);
let node_runtime = NodeRuntime::instance(http.clone(), cx.background().to_owned());
let node_runtime = NodeRuntime::instance(http.clone());
languages::init(languages.clone(), node_runtime.clone());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
@ -415,22 +416,41 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
panic::set_hook(Box::new(move |info| {
let prior_panic_count = PANIC_COUNT.fetch_add(1, Ordering::SeqCst);
if prior_panic_count > 0 {
std::panic::resume_unwind(Box::new(()));
// Give the panic-ing thread time to write the panic file
loop {
std::thread::yield_now();
}
}
let thread = thread::current();
let thread_name = thread.name().unwrap_or("<unnamed>");
let payload = info
.payload()
.downcast_ref::<&str>()
.map(|s| s.to_string())
.or_else(|| info.payload().downcast_ref::<String>().map(|s| s.clone()))
.unwrap_or_else(|| "Box<Any>".to_string());
if *util::channel::RELEASE_CHANNEL == ReleaseChannel::Dev {
let location = info.location().unwrap();
let backtrace = Backtrace::new();
eprintln!(
"Thread {:?} panicked with {:?} at {}:{}:{}\n{:?}",
thread_name,
payload,
location.file(),
location.line(),
location.column(),
backtrace,
);
std::process::exit(-1);
}
let app_version = ZED_APP_VERSION
.or_else(|| platform.app_version().ok())
.map_or("dev".to_string(), |v| v.to_string());
let thread = thread::current();
let thread = thread.name().unwrap_or("<unnamed>");
let payload = info.payload();
let payload = None
.or_else(|| payload.downcast_ref::<&str>().map(|s| s.to_string()))
.or_else(|| payload.downcast_ref::<String>().map(|s| s.clone()))
.unwrap_or_else(|| "Box<Any>".to_string());
let backtrace = Backtrace::new();
let mut backtrace = backtrace
.frames()
@ -447,7 +467,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
}
let panic_data = Panic {
thread: thread.into(),
thread: thread_name.into(),
payload: payload.into(),
location_data: info.location().map(|location| LocationData {
file: location.file().into(),
@ -717,7 +737,7 @@ async fn watch_languages(_: Arc<dyn Fs>, _: Arc<LanguageRegistry>) -> Option<()>
}
#[cfg(not(debug_assertions))]
fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {}
fn watch_file_types(_fs: Arc<dyn Fs>, _cx: &mut AppContext) {}
fn connect_to_cli(
server_name: &str,

View File

@ -308,6 +308,7 @@ pub fn initialize_workspace(
);
let active_buffer_language =
cx.add_view(|_| language_selector::ActiveBufferLanguage::new(workspace));
let vim_mode_indicator = cx.add_view(|cx| vim::ModeIndicator::new(cx));
let feedback_button = cx.add_view(|_| {
feedback::deploy_feedback_button::DeployFeedbackButton::new(workspace)
});
@ -319,6 +320,7 @@ pub fn initialize_workspace(
status_bar.add_right_item(feedback_button, cx);
status_bar.add_right_item(copilot, cx);
status_bar.add_right_item(active_buffer_language, cx);
status_bar.add_right_item(vim_mode_indicator, cx);
status_bar.add_right_item(cursor_position, cx);
});
@ -543,7 +545,6 @@ pub fn handle_keymap_file_changes(
reload_keymaps(cx, &keymap_content);
}
})
.detach();
}));
}
}
@ -2362,7 +2363,7 @@ mod tests {
languages.set_executor(cx.background().clone());
let languages = Arc::new(languages);
let http = FakeHttpClient::with_404_response();
let node_runtime = NodeRuntime::instance(http, cx.background().to_owned());
let node_runtime = NodeRuntime::instance(http);
languages::init(languages.clone(), node_runtime);
for name in languages.language_names() {
languages.language_for_name(&name);

29
docs/theme/generating-theme-types.md vendored Normal file
View File

@ -0,0 +1,29 @@
[⬅ Back to Index](../index.md)
# Generating Theme Types
## How to generate theme types:
Run a script
```bash
./script/build-theme-types
```
Types are generated in `styles/src/types/zed.ts`
## How it works:
1. Rust types
The `crates/theme` contains theme types.
Crate `schemars` used to generate a JSON schema from the theme structs.
Every struct that represent theme type has a `#[derive(JsonSchema)]` attribute.
Task lotaked at `crates/xtask/src/main.rs` generates a JSON schema from the theme structs.
2. TypeScript types
Script `npm run build-types` from `styles` package generates TypeScript types from the JSON schema and saves them to `styles/src/types/zed.ts`.

View File

@ -1,4 +1,4 @@
[toolchain]
channel = "1.70"
channel = "1.71"
components = [ "rustfmt" ]
targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ]

View File

@ -170,8 +170,8 @@ export default function editor(): any {
line_number: with_opacity(foreground(layer), 0.35),
line_number_active: foreground(layer),
rename_fade: 0.6,
wrap_guide: with_opacity(foreground(layer), 0.1),
active_wrap_guide: with_opacity(foreground(layer), 0.2),
wrap_guide: with_opacity(foreground(layer), 0.05),
active_wrap_guide: with_opacity(foreground(layer), 0.1),
unnecessary_code_fade: 0.5,
selection: theme.players[0],
whitespace: theme.ramps.neutral(0.5).hex(),

View File

@ -44,7 +44,7 @@ export default function status_bar(): any {
icon_spacing: 4,
icon_width: 14,
height: 18,
message: text(layer, "sans"),
message: text(layer, "sans", { size: "xs" }),
icon_color: foreground(layer),
},
state: {