Remove 2 suffix for assistant, journal

Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
Max Brunsfeld 2024-01-03 11:14:01 -08:00
parent 789ce8dd75
commit ecbd115542
23 changed files with 909 additions and 6713 deletions

64
Cargo.lock generated
View File

@ -328,46 +328,6 @@ checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"
[[package]]
name = "assistant"
version = "0.1.0"
dependencies = [
"ai",
"anyhow",
"chrono",
"client",
"collections",
"ctor",
"editor",
"env_logger",
"fs",
"futures 0.3.28",
"gpui",
"indoc",
"isahc",
"language",
"log",
"menu",
"multi_buffer",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"project",
"rand 0.8.5",
"regex",
"schemars",
"search",
"semantic_index",
"serde",
"serde_json",
"settings",
"smol",
"theme",
"tiktoken-rs",
"util",
"uuid 1.4.1",
"workspace",
]
[[package]]
name = "assistant2"
version = "0.1.0"
dependencies = [
"ai2",
"anyhow",
@ -4193,24 +4153,6 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "journal"
version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
"dirs 4.0.0",
"editor",
"gpui",
"log",
"schemars",
"serde",
"settings",
"shellexpand",
"util",
"workspace",
]
[[package]]
name = "journal2"
version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
@ -6516,7 +6458,7 @@ dependencies = [
name = "quick_action_bar"
version = "0.1.0"
dependencies = [
"assistant2",
"assistant",
"editor",
"gpui2",
"search",
@ -10862,7 +10804,7 @@ dependencies = [
"activity_indicator",
"ai2",
"anyhow",
"assistant2",
"assistant",
"async-compression",
"async-recursion 0.3.2",
"async-tar",
@ -10899,7 +10841,7 @@ dependencies = [
"indexmap 1.9.3",
"install_cli2",
"isahc",
"journal2",
"journal",
"language2",
"language_selector",
"language_tools",

View File

@ -3,7 +3,6 @@ members = [
"crates/activity_indicator",
"crates/ai",
"crates/assistant",
"crates/assistant2",
"crates/audio",
"crates/audio2",
"crates/auto_update",
@ -50,7 +49,7 @@ members = [
"crates/install_cli",
"crates/install_cli2",
"crates/journal",
"crates/journal2",
"crates/journal",
"crates/language",
"crates/language2",
"crates/language_selector",

View File

@ -9,22 +9,23 @@ path = "src/assistant.rs"
doctest = false
[dependencies]
ai = { path = "../ai" }
client = { path = "../client" }
ai = { package = "ai2", path = "../ai2" }
client = { package = "client2", path = "../client2" }
collections = { path = "../collections"}
editor = { path = "../editor" }
fs = { path = "../fs" }
gpui = { path = "../gpui" }
language = { path = "../language" }
menu = { path = "../menu" }
fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
menu = { package = "menu2", path = "../menu2" }
multi_buffer = { path = "../multi_buffer" }
project = { package = "project2", path = "../project2" }
search = { path = "../search" }
settings = { path = "../settings" }
theme = { path = "../theme" }
semantic_index = { package = "semantic_index2", path = "../semantic_index2" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
semantic_index = { path = "../semantic_index" }
project = { path = "../project" }
uuid.workspace = true
log.workspace = true
@ -43,9 +44,9 @@ smol.workspace = true
tiktoken-rs.workspace = true
[dev-dependencies]
ai = { package = "ai2", path = "../ai2", features = ["test-support"]}
editor = { path = "../editor", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
ai = { path = "../ai", features = ["test-support"]}
project = { package = "project2", path = "../project2", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true

View File

@ -12,12 +12,28 @@ use chrono::{DateTime, Local};
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use gpui::AppContext;
use gpui::{actions, AppContext, SharedString};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
use util::paths::CONVERSATIONS_DIR;
actions!(
assistant,
[
NewConversation,
Assist,
Split,
CycleMessageRole,
QuoteSelection,
ToggleFocus,
ResetKey,
InlineAssist,
ToggleIncludeConversation,
ToggleRetrieveContext,
]
);
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
@ -34,7 +50,7 @@ struct MessageMetadata {
enum MessageStatus {
Pending,
Done,
Error(Arc<str>),
Error(SharedString),
}
#[derive(Serialize, Deserialize)]

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
use anyhow;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Setting;
use settings::Settings;
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
pub enum OpenAIModel {
@ -51,8 +52,8 @@ pub enum AssistantDockPosition {
pub struct AssistantSettings {
pub button: bool,
pub dock: AssistantDockPosition,
pub default_width: f32,
pub default_height: f32,
pub default_width: Pixels,
pub default_height: Pixels,
pub default_open_ai_model: OpenAIModel,
}
@ -65,7 +66,7 @@ pub struct AssistantSettingsContent {
pub default_open_ai_model: Option<OpenAIModel>,
}
impl Setting for AssistantSettings {
impl Settings for AssistantSettings {
const KEY: Option<&'static str> = Some("assistant");
type FileContent = AssistantSettingsContent;
@ -73,7 +74,7 @@ impl Setting for AssistantSettings {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &gpui::AppContext,
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}

View File

@ -3,7 +3,7 @@ use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result;
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{Entity, ModelContext, ModelHandle, Task};
use gpui::{EventEmitter, Model, ModelContext, Task};
use language::{Rope, TransactionId};
use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc};
@ -21,7 +21,7 @@ pub enum CodegenKind {
pub struct Codegen {
provider: Arc<dyn CompletionProvider>,
buffer: ModelHandle<MultiBuffer>,
buffer: Model<MultiBuffer>,
snapshot: MultiBufferSnapshot,
kind: CodegenKind,
last_equal_ranges: Vec<Range<Anchor>>,
@ -32,13 +32,11 @@ pub struct Codegen {
_subscription: gpui::Subscription,
}
impl Entity for Codegen {
type Event = Event;
}
impl EventEmitter<Event> for Codegen {}
impl Codegen {
pub fn new(
buffer: ModelHandle<MultiBuffer>,
buffer: Model<MultiBuffer>,
kind: CodegenKind,
provider: Arc<dyn CompletionProvider>,
cx: &mut ModelContext<Self>,
@ -60,7 +58,7 @@ impl Codegen {
fn handle_buffer_event(
&mut self,
_buffer: ModelHandle<MultiBuffer>,
_buffer: Model<MultiBuffer>,
event: &multi_buffer::Event,
cx: &mut ModelContext<Self>,
) {
@ -111,13 +109,13 @@ impl Codegen {
.unwrap_or_else(|| snapshot.indent_size_for_line(selection_start.row));
let response = self.provider.complete(prompt);
self.generation = cx.spawn_weak(|this, mut cx| {
self.generation = cx.spawn(|this, mut cx| {
async move {
let generate = async {
let mut edit_start = range.start.to_offset(&snapshot);
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
let diff = cx.background().spawn(async move {
let diff = cx.background_executor().spawn(async move {
let chunks = strip_invalid_spans_from_codeblock(response.await?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
@ -183,12 +181,6 @@ impl Codegen {
});
while let Some(hunks) = hunks_rx.next().await {
let this = if let Some(this) = this.upgrade(&cx) {
this
} else {
break;
};
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
@ -245,7 +237,7 @@ impl Codegen {
}
cx.notify();
});
})?;
}
diff.await?;
@ -253,7 +245,6 @@ impl Codegen {
};
let result = generate.await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
this.idle = true;
@ -262,8 +253,8 @@ impl Codegen {
}
cx.emit(Event::Finished);
cx.notify();
});
}
})
.ok();
}
});
self.error.take();
@ -372,7 +363,7 @@ mod tests {
use super::*;
use ai::test::FakeCompletionProvider;
use futures::stream::{self};
use gpui::{executor::Deterministic, TestAppContext};
use gpui::{Context, TestAppContext};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use rand::prelude::*;
@ -391,12 +382,8 @@ mod tests {
}
#[gpui::test(iterations = 10)]
async fn test_transform_autoindent(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
@ -408,14 +395,14 @@ mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Transform { range },
@ -442,10 +429,10 @@ mod tests {
println!("CHUNK: {:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
@ -464,9 +451,8 @@ mod tests {
async fn test_autoindent_when_generating_past_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
@ -475,14 +461,14 @@ mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 6))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
@ -508,10 +494,10 @@ mod tests {
let (chunk, suffix) = new_text.split_at(len);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
@ -530,9 +516,8 @@ mod tests {
async fn test_autoindent_when_generating_before_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = concat!(
@ -541,14 +526,14 @@ mod tests {
"}\n" //
);
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 2))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
@ -575,10 +560,10 @@ mod tests {
println!("{:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),

View File

@ -176,7 +176,7 @@ pub(crate) mod tests {
use super::*;
use std::sync::Arc;
use gpui::AppContext;
use gpui::{AppContext, Context};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use settings::SettingsStore;
@ -227,7 +227,8 @@ pub(crate) mod tests {
#[gpui::test]
fn test_outline_for_prompt(cx: &mut AppContext) {
cx.set_global(SettingsStore::test(cx));
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language_settings::init(cx);
let text = indoc! {"
struct X {
@ -253,7 +254,7 @@ pub(crate) mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(

View File

@ -1,54 +0,0 @@
[package]
name = "assistant2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/assistant.rs"
doctest = false
[dependencies]
ai = { package = "ai2", path = "../ai2" }
client = { package = "client2", path = "../client2" }
collections = { path = "../collections"}
editor = { path = "../editor" }
fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
menu = { package = "menu2", path = "../menu2" }
multi_buffer = { path = "../multi_buffer" }
project = { package = "project2", path = "../project2" }
search = { path = "../search" }
semantic_index = { package = "semantic_index2", path = "../semantic_index2" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
uuid.workspace = true
log.workspace = true
anyhow.workspace = true
chrono = { version = "0.4", features = ["serde"] }
futures.workspace = true
indoc.workspace = true
isahc.workspace = true
ordered-float.workspace = true
parking_lot.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true
tiktoken-rs.workspace = true
[dev-dependencies]
ai = { package = "ai2", path = "../ai2", features = ["test-support"]}
editor = { path = "../editor", features = ["test-support"] }
project = { package = "project2", path = "../project2", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
log.workspace = true
rand.workspace = true

View File

@ -1,63 +0,0 @@
> Hello, I'm Nathan Sobo.
I'm currently your sole creator and the leader of Zed Industries.
Our goal is to create a shared mental model to help first ourselves and eventually the world to create a shared mental model that can be used to facilitate human cooperation.
< Hello, Nathan! Welcome back to Zed. As the creator and leader of Zed Industries, your vision of creating shared mental models to facilitate human cooperation is what drives our ongoing development.
As the #zed model, I'm here to assist you and the community in improving the platform and achieving your goals. Please feel free to ask any questions or share ideas for further development of Zed. Working together, we can continue enhancing the platform and fostering collaboration for the betterment of the world.
> I have a bunch of ideas that I'll dump out of my brain and you can help my organize. Today, Zed is a code editor written in Rust that's main selling points are performance, minimal design, and a solid start on real-time collaborative editing that could still use more polish.
We've decided that our next milestone will be open-sourcing Zed on the Zed platform.
< Nathan, it's exciting to hear that you have more ideas to enhance Zed! Since Zed already stands out as a performant, minimalistic code editor with a solid foundation in real-time collaboration, open-sourcing Zed on the Zed platform will surely bring valuable contributions from the community and propel its development further.
Let's organize your ideas and create a plan for your next milestone. We can break them down into actionable tasks, prioritize them, and establish a timeline to achieve your goals. Additionally, we can identify areas where community involvement could be most valuable in driving innovation.
Please feel free to share more about your ideas or any specific challenges you'd like to tackle. I'm here to help you turn these ideas into a cohesive and successful plan for Zed's future!
@nathan> #zed, can you describe the conversation UI as you currently understand it.
< Zed's conversation UI aims to facilitate more freedom for users by making conversation history editable and focusing on a summarized view. Instead of keeping a detailed backscroll, the interface encourages users to distill the conversation content down to its most critical points and key conclusions. This approach ensures that the vital information from the conversation is highlighted and easily accessible, while still maintaining a history that can be linked to when necessary.
Addressing @nate's concerns, this design choice aims to strike a balance between readability, usability, and information preservation. The summarized view maintains the essence of the conversation without being burdened by extensive backscroll. Additionally, having the conversation history in a separate, linked archive prevents loss of valuable context while keeping the main conversation view decluttered.
@nathan> What details around the conversation view are worth exploring?
< @nathan, there are several aspects of the conversation view that are worth exploring and refining:
1. Editing functionality: Ensuring that all participants can edit and refine the conversation summary effectively while preserving the original context.
2. Permissions and access control: Determining how to manage editing rights, user roles, and visibility of the conversation summary and history.
3. Navigation and linking: Implementing an intuitive system for navigating between the summarized conversation and linked history or reference materials.
4. Visualization and layout: Designing the interface to clearly present the summarized information while providing a visually engaging and organized experience for participants.
5. Threaded conversations and branching: Exploring ways to handle sub-conversations, supplementary discussions, or debates within the primary conversation view.
Considering these aspects will ensure our conversation view design is optimized for usability, collaboration, and accessibility in Zed.
@nathan> @nate, which of these would you like to explore?
@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5.
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multuple peoople, or between multiple people and multiple bots (you and other bots).
@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try...
#zed describe a hypothetical conversation UI that meets the description.
< @nate and @nathan, I understand that you'd like to focus on the layout and visualization for a conversation view that can accommodate multiple people and potentially multiple bots as well. Here's a proposal for a conversation UI that meets the requirements:
1. Main conversation area: Display the summarized conversation content in a clean and organized manner, ensuring readability and efficient use of space.
2. Participant avatars and labels: Include user avatars and/or labels to distinguish between users and bots visually, allowing easy identification of each participant's contributions.
3. Active threads indicator: Designate a clear indicator for ongoing threads, which will attract attention to active sub-conversations or related discussions in the conversation view.
4. Dynamic branching: Implement a branching visual structure to present threaded conversations, clearly distinguishing sub-conversations, and enabling users to navigate between them smoothly.
5. Multiple bot interaction support: Facilitate interactions between multiple bots through user-invoked prompts or bot-to-bot communication channels without disrupting the primary conversation flow.
6. Expandable conversation history: Offer a separate, expandable section or panel to access historical content without cluttering the main conversation view.
7. Linking and referencing: Implement a system for hyperlinking relevant information, resources, or previous conversations to the current discussion, making it easy for participants to follow and reference the shared context.
This hypothetical conversation UI aims to address the layout and visualization, threading, and multi-participant aspects of your requirements. Of course, it's just an initial proposal, and we can refine and iterate on it based on your feedback and specific needs.

View File

@ -1,3 +0,0 @@
Push content to a deeper layer.
A context can have multiple sublayers.
You can enable or disable arbitrary sublayers at arbitrary nesting depths when viewing the document.

View File

@ -1,129 +0,0 @@
pub mod assistant_panel;
mod assistant_settings;
mod codegen;
mod prompts;
mod streaming_diff;
use ai::providers::open_ai::Role;
use anyhow::Result;
pub use assistant_panel::AssistantPanel;
use assistant_settings::OpenAIModel;
use chrono::{DateTime, Local};
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use gpui::{actions, AppContext, SharedString};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
use util::paths::CONVERSATIONS_DIR;
actions!(
assistant,
[
NewConversation,
Assist,
Split,
CycleMessageRole,
QuoteSelection,
ToggleFocus,
ResetKey,
InlineAssist,
ToggleIncludeConversation,
ToggleRetrieveContext,
]
);
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
struct MessageId(usize);
#[derive(Clone, Debug, Serialize, Deserialize)]
struct MessageMetadata {
role: Role,
sent_at: DateTime<Local>,
status: MessageStatus,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum MessageStatus {
Pending,
Done,
Error(SharedString),
}
#[derive(Serialize, Deserialize)]
struct SavedMessage {
id: MessageId,
start: usize,
}
#[derive(Serialize, Deserialize)]
struct SavedConversation {
id: Option<String>,
zed: String,
version: String,
text: String,
messages: Vec<SavedMessage>,
message_metadata: HashMap<MessageId, MessageMetadata>,
summary: String,
model: OpenAIModel,
}
impl SavedConversation {
const VERSION: &'static str = "0.1.0";
}
struct SavedConversationMetadata {
title: String,
path: PathBuf,
mtime: chrono::DateTime<chrono::Local>,
}
impl SavedConversationMetadata {
pub async fn list(fs: Arc<dyn Fs>) -> Result<Vec<Self>> {
fs.create_dir(&CONVERSATIONS_DIR).await?;
let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?;
let mut conversations = Vec::<SavedConversationMetadata>::new();
while let Some(path) = paths.next().await {
let path = path?;
if path.extension() != Some(OsStr::new("json")) {
continue;
}
let pattern = r" - \d+.zed.json$";
let re = Regex::new(pattern).unwrap();
let metadata = fs.metadata(&path).await?;
if let Some((file_name, metadata)) = path
.file_name()
.and_then(|name| name.to_str())
.zip(metadata)
{
let title = re.replace(file_name, "");
conversations.push(Self {
title: title.into_owned(),
path,
mtime: metadata.mtime.into(),
});
}
}
conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime));
Ok(conversations)
}
}
pub fn init(cx: &mut AppContext) {
assistant_panel::init(cx);
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,81 +0,0 @@
use anyhow;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
pub enum OpenAIModel {
#[serde(rename = "gpt-3.5-turbo-0613")]
ThreePointFiveTurbo,
#[serde(rename = "gpt-4-0613")]
Four,
#[serde(rename = "gpt-4-1106-preview")]
FourTurbo,
}
impl OpenAIModel {
pub fn full_name(&self) -> &'static str {
match self {
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
OpenAIModel::Four => "gpt-4-0613",
OpenAIModel::FourTurbo => "gpt-4-1106-preview",
}
}
pub fn short_name(&self) -> &'static str {
match self {
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo",
OpenAIModel::Four => "gpt-4",
OpenAIModel::FourTurbo => "gpt-4-turbo",
}
}
pub fn cycle(&self) -> Self {
match self {
OpenAIModel::ThreePointFiveTurbo => OpenAIModel::Four,
OpenAIModel::Four => OpenAIModel::FourTurbo,
OpenAIModel::FourTurbo => OpenAIModel::ThreePointFiveTurbo,
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum AssistantDockPosition {
Left,
Right,
Bottom,
}
#[derive(Deserialize, Debug)]
pub struct AssistantSettings {
pub button: bool,
pub dock: AssistantDockPosition,
pub default_width: Pixels,
pub default_height: Pixels,
pub default_open_ai_model: OpenAIModel,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContent {
pub button: Option<bool>,
pub dock: Option<AssistantDockPosition>,
pub default_width: Option<f32>,
pub default_height: Option<f32>,
pub default_open_ai_model: Option<OpenAIModel>,
}
impl Settings for AssistantSettings {
const KEY: Option<&'static str> = Some("assistant");
type FileContent = AssistantSettingsContent;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@ -1,688 +0,0 @@
use crate::streaming_diff::{Hunk, StreamingDiff};
use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result;
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{EventEmitter, Model, ModelContext, Task};
use language::{Rope, TransactionId};
use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc};
pub enum Event {
Finished,
Undone,
}
#[derive(Clone)]
pub enum CodegenKind {
Transform { range: Range<Anchor> },
Generate { position: Anchor },
}
pub struct Codegen {
provider: Arc<dyn CompletionProvider>,
buffer: Model<MultiBuffer>,
snapshot: MultiBufferSnapshot,
kind: CodegenKind,
last_equal_ranges: Vec<Range<Anchor>>,
transaction_id: Option<TransactionId>,
error: Option<anyhow::Error>,
generation: Task<()>,
idle: bool,
_subscription: gpui::Subscription,
}
impl EventEmitter<Event> for Codegen {}
impl Codegen {
pub fn new(
buffer: Model<MultiBuffer>,
kind: CodegenKind,
provider: Arc<dyn CompletionProvider>,
cx: &mut ModelContext<Self>,
) -> Self {
let snapshot = buffer.read(cx).snapshot(cx);
Self {
provider,
buffer: buffer.clone(),
snapshot,
kind,
last_equal_ranges: Default::default(),
transaction_id: Default::default(),
error: Default::default(),
idle: true,
generation: Task::ready(()),
_subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
}
}
fn handle_buffer_event(
&mut self,
_buffer: Model<MultiBuffer>,
event: &multi_buffer::Event,
cx: &mut ModelContext<Self>,
) {
if let multi_buffer::Event::TransactionUndone { transaction_id } = event {
if self.transaction_id == Some(*transaction_id) {
self.transaction_id = None;
self.generation = Task::ready(());
cx.emit(Event::Undone);
}
}
}
pub fn range(&self) -> Range<Anchor> {
match &self.kind {
CodegenKind::Transform { range } => range.clone(),
CodegenKind::Generate { position } => position.bias_left(&self.snapshot)..*position,
}
}
pub fn kind(&self) -> &CodegenKind {
&self.kind
}
pub fn last_equal_ranges(&self) -> &[Range<Anchor>] {
&self.last_equal_ranges
}
pub fn idle(&self) -> bool {
self.idle
}
pub fn error(&self) -> Option<&anyhow::Error> {
self.error.as_ref()
}
pub fn start(&mut self, prompt: Box<dyn CompletionRequest>, cx: &mut ModelContext<Self>) {
let range = self.range();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
.text_for_range(range.start..range.end)
.collect::<Rope>();
let selection_start = range.start.to_point(&snapshot);
let suggested_line_indent = snapshot
.suggested_indents(selection_start.row..selection_start.row + 1, cx)
.into_values()
.next()
.unwrap_or_else(|| snapshot.indent_size_for_line(selection_start.row));
let response = self.provider.complete(prompt);
self.generation = cx.spawn(|this, mut cx| {
async move {
let generate = async {
let mut edit_start = range.start.to_offset(&snapshot);
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
let diff = cx.background_executor().spawn(async move {
let chunks = strip_invalid_spans_from_codeblock(response.await?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
let chunk = chunk?;
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta = line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(selection_start.column as usize);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
hunks_tx.send(diff.push_new(&new_text)).await?;
new_text.clear();
}
if lines.peek().is_some() {
hunks_tx.send(diff.push_new("\n")).await?;
line_indent = None;
first_line = false;
}
}
}
hunks_tx.send(diff.push_new(&new_text)).await?;
hunks_tx.send(diff.finish()).await?;
anyhow::Ok(())
});
while let Some(hunks) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
let transaction = this.buffer.update(cx, |buffer, cx| {
// Avoid grouping assistant edits with user edits.
buffer.finalize_last_transaction(cx);
buffer.start_transaction(cx);
buffer.edit(
hunks.into_iter().filter_map(|hunk| match hunk {
Hunk::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
Hunk::Remove { len } => {
let edit_end = edit_start + len;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
Hunk::Keep { len } => {
let edit_end = edit_start + len;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
this.last_equal_ranges.push(edit_range);
None
}
}),
None,
cx,
);
buffer.end_transaction(cx)
});
if let Some(transaction) = transaction {
if let Some(first_transaction) = this.transaction_id {
// Group all assistant edits into the first transaction.
this.buffer.update(cx, |buffer, cx| {
buffer.merge_transactions(
transaction,
first_transaction,
cx,
)
});
} else {
this.transaction_id = Some(transaction);
this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction(cx)
});
}
}
cx.notify();
})?;
}
diff.await?;
anyhow::Ok(())
};
let result = generate.await;
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
this.idle = true;
if let Err(error) = result {
this.error = Some(error);
}
cx.emit(Event::Finished);
cx.notify();
})
.ok();
}
});
self.error.take();
self.idle = false;
cx.notify();
}
pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
if let Some(transaction_id) = self.transaction_id {
self.buffer
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
}
}
}
fn strip_invalid_spans_from_codeblock(
stream: impl Stream<Item = Result<String>>,
) -> impl Stream<Item = Result<String>> {
let mut first_line = true;
let mut buffer = String::new();
let mut starts_with_markdown_codeblock = false;
let mut includes_start_or_end_span = false;
stream.filter_map(move |chunk| {
let chunk = match chunk {
Ok(chunk) => chunk,
Err(err) => return future::ready(Some(Err(err))),
};
buffer.push_str(&chunk);
if buffer.len() > "<|S|".len() && buffer.starts_with("<|S|") {
includes_start_or_end_span = true;
buffer = buffer
.strip_prefix("<|S|>")
.or_else(|| buffer.strip_prefix("<|S|"))
.unwrap_or(&buffer)
.to_string();
} else if buffer.ends_with("|E|>") {
includes_start_or_end_span = true;
} else if buffer.starts_with("<|")
|| buffer.starts_with("<|S")
|| buffer.starts_with("<|S|")
|| buffer.ends_with("|")
|| buffer.ends_with("|E")
|| buffer.ends_with("|E|")
{
return future::ready(None);
}
if first_line {
if buffer == "" || buffer == "`" || buffer == "``" {
return future::ready(None);
} else if buffer.starts_with("```") {
starts_with_markdown_codeblock = true;
if let Some(newline_ix) = buffer.find('\n') {
buffer.replace_range(..newline_ix + 1, "");
first_line = false;
} else {
return future::ready(None);
}
}
}
let mut text = buffer.to_string();
if starts_with_markdown_codeblock {
text = text
.strip_suffix("\n```\n")
.or_else(|| text.strip_suffix("\n```"))
.or_else(|| text.strip_suffix("\n``"))
.or_else(|| text.strip_suffix("\n`"))
.or_else(|| text.strip_suffix('\n'))
.unwrap_or(&text)
.to_string();
}
if includes_start_or_end_span {
text = text
.strip_suffix("|E|>")
.or_else(|| text.strip_suffix("E|>"))
.or_else(|| text.strip_prefix("|>"))
.or_else(|| text.strip_prefix(">"))
.unwrap_or(&text)
.to_string();
};
if text.contains('\n') {
first_line = false;
}
let remainder = buffer.split_off(text.len());
let result = if buffer.is_empty() {
None
} else {
Some(Ok(buffer.clone()))
};
buffer = remainder;
future::ready(result)
})
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
use ai::test::FakeCompletionProvider;
use futures::stream::{self};
use gpui::{Context, TestAppContext};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use rand::prelude::*;
use serde::Serialize;
use settings::SettingsStore;
#[derive(Serialize)]
pub struct DummyCompletionRequest {
pub name: String,
}
impl CompletionRequest for DummyCompletionRequest {
fn data(&self) -> serde_json::Result<String> {
serde_json::to_string(self)
}
}
#[gpui::test(iterations = 10)]
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
fn main() {
let x = 0;
for _ in 0..10 {
x += 1;
}
}
"};
let buffer =
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Transform { range },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
" let mut x = 0;\n",
" while x < 10 {\n",
" x += 1;\n",
" }",
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
println!("CHUNK: {:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test(iterations = 10)]
async fn test_autoindent_when_generating_past_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
fn main() {
le
}
"};
let buffer =
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 6))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
"t mut x = 0;\n",
"while x < 10 {\n",
" x += 1;\n",
"}", //
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test(iterations = 10)]
async fn test_autoindent_when_generating_before_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = concat!(
"fn main() {\n",
" \n",
"}\n" //
);
let buffer =
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 2))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
"let mut x = 0;\n",
"while x < 10 {\n",
" x += 1;\n",
"}", //
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
println!("{:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test]
async fn test_strip_invalid_spans_from_codeblock() {
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("Lorem ipsum dolor", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```\n", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks(
"```html\n```js\nLorem ipsum dolor\n```\n```",
2
))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"```js\nLorem ipsum dolor\n```"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("``\nLorem ipsum dolor\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"``\nLorem ipsum dolor\n```"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("<|S|Lorem ipsum|E|>", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("<|S|>Lorem ipsum", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\n<|S|>Lorem ipsum\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\n<|S|Lorem ipsum|E|>\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
fn chunks(text: &str, size: usize) -> impl Stream<Item = Result<String>> {
stream::iter(
text.chars()
.collect::<Vec<_>>()
.chunks(size)
.map(|chunk| Ok(chunk.iter().collect::<String>()))
.collect::<Vec<_>>(),
)
}
}
fn rust_lang() -> Language {
Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
)
.with_indents_query(
r#"
(call_expression) @indent
(field_expression) @indent
(_ "(" ")" @end) @indent
(_ "{" "}" @end) @indent
"#,
)
.unwrap()
}
}

View File

@ -1,389 +0,0 @@
use ai::models::LanguageModel;
use ai::prompts::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
use ai::prompts::file_context::FileContext;
use ai::prompts::generate::GenerateInlineContent;
use ai::prompts::preamble::EngineerPreamble;
use ai::prompts::repository_context::{PromptCodeSnippet, RepositoryContext};
use ai::providers::open_ai::OpenAILanguageModel;
use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
use std::cmp::{self, Reverse};
use std::ops::Range;
use std::sync::Arc;
#[allow(dead_code)]
fn summarize(buffer: &BufferSnapshot, selected_range: Range<impl ToOffset>) -> String {
#[derive(Debug)]
struct Match {
collapse: Range<usize>,
keep: Vec<Range<usize>>,
}
let selected_range = selected_range.to_offset(buffer);
let mut ts_matches = buffer.matches(0..buffer.len(), |grammar| {
Some(&grammar.embedding_config.as_ref()?.query)
});
let configs = ts_matches
.grammars()
.iter()
.map(|g| g.embedding_config.as_ref().unwrap())
.collect::<Vec<_>>();
let mut matches = Vec::new();
while let Some(mat) = ts_matches.peek() {
let config = &configs[mat.grammar_index];
if let Some(collapse) = mat.captures.iter().find_map(|cap| {
if Some(cap.index) == config.collapse_capture_ix {
Some(cap.node.byte_range())
} else {
None
}
}) {
let mut keep = Vec::new();
for capture in mat.captures.iter() {
if Some(capture.index) == config.keep_capture_ix {
keep.push(capture.node.byte_range());
} else {
continue;
}
}
ts_matches.advance();
matches.push(Match { collapse, keep });
} else {
ts_matches.advance();
}
}
matches.sort_unstable_by_key(|mat| (mat.collapse.start, Reverse(mat.collapse.end)));
let mut matches = matches.into_iter().peekable();
let mut summary = String::new();
let mut offset = 0;
let mut flushed_selection = false;
while let Some(mat) = matches.next() {
// Keep extending the collapsed range if the next match surrounds
// the current one.
while let Some(next_mat) = matches.peek() {
if mat.collapse.start <= next_mat.collapse.start
&& mat.collapse.end >= next_mat.collapse.end
{
matches.next().unwrap();
} else {
break;
}
}
if offset > mat.collapse.start {
// Skip collapsed nodes that have already been summarized.
offset = cmp::max(offset, mat.collapse.end);
continue;
}
if offset <= selected_range.start && selected_range.start <= mat.collapse.end {
if !flushed_selection {
// The collapsed node ends after the selection starts, so we'll flush the selection first.
summary.extend(buffer.text_for_range(offset..selected_range.start));
summary.push_str("<|S|");
if selected_range.end == selected_range.start {
summary.push_str(">");
} else {
summary.extend(buffer.text_for_range(selected_range.clone()));
summary.push_str("|E|>");
}
offset = selected_range.end;
flushed_selection = true;
}
// If the selection intersects the collapsed node, we won't collapse it.
if selected_range.end >= mat.collapse.start {
continue;
}
}
summary.extend(buffer.text_for_range(offset..mat.collapse.start));
for keep in mat.keep {
summary.extend(buffer.text_for_range(keep));
}
offset = mat.collapse.end;
}
// Flush selection if we haven't already done so.
if !flushed_selection && offset <= selected_range.start {
summary.extend(buffer.text_for_range(offset..selected_range.start));
summary.push_str("<|S|");
if selected_range.end == selected_range.start {
summary.push_str(">");
} else {
summary.extend(buffer.text_for_range(selected_range.clone()));
summary.push_str("|E|>");
}
offset = selected_range.end;
}
summary.extend(buffer.text_for_range(offset..buffer.len()));
summary
}
pub fn generate_content_prompt(
user_prompt: String,
language_name: Option<&str>,
buffer: BufferSnapshot,
range: Range<usize>,
search_results: Vec<PromptCodeSnippet>,
model: &str,
project_name: Option<String>,
) -> anyhow::Result<String> {
// Using new Prompt Templates
let openai_model: Arc<dyn LanguageModel> = Arc::new(OpenAILanguageModel::load(model));
let lang_name = if let Some(language_name) = language_name {
Some(language_name.to_string())
} else {
None
};
let args = PromptArguments {
model: openai_model,
language_name: lang_name.clone(),
project_name,
snippets: search_results.clone(),
reserved_tokens: 1000,
buffer: Some(buffer),
selected_range: Some(range),
user_prompt: Some(user_prompt.clone()),
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(PromptPriority::Mandatory, Box::new(EngineerPreamble {})),
(
PromptPriority::Ordered { order: 1 },
Box::new(RepositoryContext {}),
),
(
PromptPriority::Ordered { order: 0 },
Box::new(FileContext {}),
),
(
PromptPriority::Mandatory,
Box::new(GenerateInlineContent {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, _) = chain.generate(true)?;
anyhow::Ok(prompt)
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use std::sync::Arc;
use gpui::{AppContext, Context};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use settings::SettingsStore;
pub(crate) fn rust_lang() -> Language {
Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
)
.with_embedding_query(
r#"
(
[(line_comment) (attribute_item)]* @context
.
[
(struct_item
name: (_) @name)
(enum_item
name: (_) @name)
(impl_item
trait: (_)? @name
"for"? @name
type: (_) @name)
(trait_item
name: (_) @name)
(function_item
name: (_) @name
body: (block
"{" @keep
"}" @keep) @collapse)
(macro_definition
name: (_) @name)
] @item
)
"#,
)
.unwrap()
}
#[gpui::test]
fn test_outline_for_prompt(cx: &mut AppContext) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language_settings::init(cx);
let text = indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let a = 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {
self.a
}
pub fn b(&self) -> usize {
self.b
}
}
"};
let buffer =
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
summarize(&snapshot, Point::new(1, 4)..Point::new(1, 4)),
indoc! {"
struct X {
<|S|>a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(8, 12)..Point::new(8, 14)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let <|S|a |E|>= 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(6, 0)..Point::new(6, 0)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
<|S|>
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(21, 0)..Point::new(21, 0)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
<|S|>"}
);
// Ensure nested functions get collapsed properly.
let text = indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let a = 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {
let a = 30;
fn nested() -> usize {
3
}
self.a + nested()
}
pub fn b(&self) -> usize {
self.b
}
}
"};
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
summarize(&snapshot, Point::new(0, 0)..Point::new(0, 0)),
indoc! {"
<|S|>struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
}
}

View File

@ -1,293 +0,0 @@
use collections::HashMap;
use ordered_float::OrderedFloat;
use std::{
cmp,
fmt::{self, Debug},
ops::Range,
};
struct Matrix {
cells: Vec<f64>,
rows: usize,
cols: usize,
}
impl Matrix {
fn new() -> Self {
Self {
cells: Vec::new(),
rows: 0,
cols: 0,
}
}
fn resize(&mut self, rows: usize, cols: usize) {
self.cells.resize(rows * cols, 0.);
self.rows = rows;
self.cols = cols;
}
fn get(&self, row: usize, col: usize) -> f64 {
if row >= self.rows {
panic!("row out of bounds")
}
if col >= self.cols {
panic!("col out of bounds")
}
self.cells[col * self.rows + row]
}
fn set(&mut self, row: usize, col: usize, value: f64) {
if row >= self.rows {
panic!("row out of bounds")
}
if col >= self.cols {
panic!("col out of bounds")
}
self.cells[col * self.rows + row] = value;
}
}
impl Debug for Matrix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f)?;
for i in 0..self.rows {
for j in 0..self.cols {
write!(f, "{:5}", self.get(i, j))?;
}
writeln!(f)?;
}
Ok(())
}
}
#[derive(Debug)]
pub enum Hunk {
Insert { text: String },
Remove { len: usize },
Keep { len: usize },
}
pub struct StreamingDiff {
old: Vec<char>,
new: Vec<char>,
scores: Matrix,
old_text_ix: usize,
new_text_ix: usize,
equal_runs: HashMap<(usize, usize), u32>,
}
impl StreamingDiff {
const INSERTION_SCORE: f64 = -1.;
const DELETION_SCORE: f64 = -20.;
const EQUALITY_BASE: f64 = 1.8;
const MAX_EQUALITY_EXPONENT: i32 = 16;
pub fn new(old: String) -> Self {
let old = old.chars().collect::<Vec<_>>();
let mut scores = Matrix::new();
scores.resize(old.len() + 1, 1);
for i in 0..=old.len() {
scores.set(i, 0, i as f64 * Self::DELETION_SCORE);
}
Self {
old,
new: Vec::new(),
scores,
old_text_ix: 0,
new_text_ix: 0,
equal_runs: Default::default(),
}
}
pub fn push_new(&mut self, text: &str) -> Vec<Hunk> {
self.new.extend(text.chars());
self.scores.resize(self.old.len() + 1, self.new.len() + 1);
for j in self.new_text_ix + 1..=self.new.len() {
self.scores.set(0, j, j as f64 * Self::INSERTION_SCORE);
for i in 1..=self.old.len() {
let insertion_score = self.scores.get(i, j - 1) + Self::INSERTION_SCORE;
let deletion_score = self.scores.get(i - 1, j) + Self::DELETION_SCORE;
let equality_score = if self.old[i - 1] == self.new[j - 1] {
let mut equal_run = self.equal_runs.get(&(i - 1, j - 1)).copied().unwrap_or(0);
equal_run += 1;
self.equal_runs.insert((i, j), equal_run);
let exponent = cmp::min(equal_run as i32 / 4, Self::MAX_EQUALITY_EXPONENT);
self.scores.get(i - 1, j - 1) + Self::EQUALITY_BASE.powi(exponent)
} else {
f64::NEG_INFINITY
};
let score = insertion_score.max(deletion_score).max(equality_score);
self.scores.set(i, j, score);
}
}
let mut max_score = f64::NEG_INFINITY;
let mut next_old_text_ix = self.old_text_ix;
let next_new_text_ix = self.new.len();
for i in self.old_text_ix..=self.old.len() {
let score = self.scores.get(i, next_new_text_ix);
if score > max_score {
max_score = score;
next_old_text_ix = i;
}
}
let hunks = self.backtrack(next_old_text_ix, next_new_text_ix);
self.old_text_ix = next_old_text_ix;
self.new_text_ix = next_new_text_ix;
hunks
}
fn backtrack(&self, old_text_ix: usize, new_text_ix: usize) -> Vec<Hunk> {
let mut pending_insert: Option<Range<usize>> = None;
let mut hunks = Vec::new();
let mut i = old_text_ix;
let mut j = new_text_ix;
while (i, j) != (self.old_text_ix, self.new_text_ix) {
let insertion_score = if j > self.new_text_ix {
Some((i, j - 1))
} else {
None
};
let deletion_score = if i > self.old_text_ix {
Some((i - 1, j))
} else {
None
};
let equality_score = if i > self.old_text_ix && j > self.new_text_ix {
if self.old[i - 1] == self.new[j - 1] {
Some((i - 1, j - 1))
} else {
None
}
} else {
None
};
let (prev_i, prev_j) = [insertion_score, deletion_score, equality_score]
.iter()
.max_by_key(|cell| cell.map(|(i, j)| OrderedFloat(self.scores.get(i, j))))
.unwrap()
.unwrap();
if prev_i == i && prev_j == j - 1 {
if let Some(pending_insert) = pending_insert.as_mut() {
pending_insert.start = prev_j;
} else {
pending_insert = Some(prev_j..j);
}
} else {
if let Some(range) = pending_insert.take() {
hunks.push(Hunk::Insert {
text: self.new[range].iter().collect(),
});
}
let char_len = self.old[i - 1].len_utf8();
if prev_i == i - 1 && prev_j == j {
if let Some(Hunk::Remove { len }) = hunks.last_mut() {
*len += char_len;
} else {
hunks.push(Hunk::Remove { len: char_len })
}
} else {
if let Some(Hunk::Keep { len }) = hunks.last_mut() {
*len += char_len;
} else {
hunks.push(Hunk::Keep { len: char_len })
}
}
}
i = prev_i;
j = prev_j;
}
if let Some(range) = pending_insert.take() {
hunks.push(Hunk::Insert {
text: self.new[range].iter().collect(),
});
}
hunks.reverse();
hunks
}
pub fn finish(self) -> Vec<Hunk> {
self.backtrack(self.old.len(), self.new.len())
}
}
#[cfg(test)]
mod tests {
use std::env;
use super::*;
use rand::prelude::*;
#[gpui::test(iterations = 100)]
fn test_random_diffs(mut rng: StdRng) {
let old_text_len = env::var("OLD_TEXT_LEN")
.map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
.unwrap_or(10);
let new_text_len = env::var("NEW_TEXT_LEN")
.map(|i| i.parse().expect("invalid `NEW_TEXT_LEN` variable"))
.unwrap_or(10);
let old = util::RandomCharIter::new(&mut rng)
.take(old_text_len)
.collect::<String>();
log::info!("old text: {:?}", old);
let mut diff = StreamingDiff::new(old.clone());
let mut hunks = Vec::new();
let mut new_len = 0;
let mut new = String::new();
while new_len < new_text_len {
let new_chunk_len = rng.gen_range(1..=new_text_len - new_len);
let new_chunk = util::RandomCharIter::new(&mut rng)
.take(new_len)
.collect::<String>();
log::info!("new chunk: {:?}", new_chunk);
new_len += new_chunk_len;
new.push_str(&new_chunk);
let new_hunks = diff.push_new(&new_chunk);
log::info!("hunks: {:?}", new_hunks);
hunks.extend(new_hunks);
}
let final_hunks = diff.finish();
log::info!("final hunks: {:?}", final_hunks);
hunks.extend(final_hunks);
log::info!("new text: {:?}", new);
let mut old_ix = 0;
let mut new_ix = 0;
let mut patched = String::new();
for hunk in hunks {
match hunk {
Hunk::Keep { len } => {
assert_eq!(&old[old_ix..old_ix + len], &new[new_ix..new_ix + len]);
patched.push_str(&old[old_ix..old_ix + len]);
old_ix += len;
new_ix += len;
}
Hunk::Remove { len } => {
old_ix += len;
}
Hunk::Insert { text } => {
assert_eq!(text, &new[new_ix..new_ix + text.len()]);
patched.push_str(&text);
new_ix += text.len();
}
}
}
assert_eq!(patched, new);
}
}

View File

@ -10,10 +10,10 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
gpui = { path = "../gpui" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
settings = { path = "../settings" }
settings2 = { path = "../settings2" }
anyhow.workspace = true
chrono = "0.4"

View File

@ -1,15 +1,15 @@
use anyhow::Result;
use chrono::{Datelike, Local, NaiveTime, Timelike};
use editor::{scroll::autoscroll::Autoscroll, Editor};
use gpui::{actions, AppContext};
use gpui::{actions, AppContext, ViewContext};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings2::Settings;
use std::{
fs::OpenOptions,
path::{Path, PathBuf},
sync::Arc,
};
use workspace::AppState;
use workspace::{AppState, Workspace};
actions!(journal, [NewJournalEntry]);
@ -36,24 +36,35 @@ pub enum HourFormat {
Hour24,
}
impl settings::Setting for JournalSettings {
impl settings2::Settings for JournalSettings {
const KEY: Option<&'static str> = Some("journal");
type FileContent = Self;
fn load(default_value: &Self, user_values: &[&Self], _: &AppContext) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
fn load(
defaults: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(defaults, user_values)
}
}
pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
settings::register::<JournalSettings>(cx);
pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
JournalSettings::register(cx);
cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx));
cx.observe_new_views(
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
workspace.register_action(|workspace, _: &NewJournalEntry, cx| {
new_journal_entry(workspace.app_state().clone(), cx);
});
},
)
.detach();
}
pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
let settings = settings::get::<JournalSettings>(cx);
let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir,
None => {
@ -68,9 +79,9 @@ pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
.join(format!("{:02}", now.month()));
let entry_path = month_dir.join(format!("{:02}.md", now.day()));
let now = now.time();
let entry_heading = heading_entry(now, &settings.hour_format);
let _entry_heading = heading_entry(now, &settings.hour_format);
let create_entry = cx.background().spawn(async move {
let create_entry = cx.background_executor().spawn(async move {
std::fs::create_dir_all(month_dir)?;
OpenOptions::new()
.create(true)
@ -82,30 +93,31 @@ pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
cx.spawn(|mut cx| async move {
let (journal_dir, entry_path) = create_entry.await?;
let (workspace, _) = cx
.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))
.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?
.await?;
let opened = workspace
let _opened = workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], true, cx)
})?
.await;
if let Some(Some(Ok(item))) = opened.first() {
if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
editor.update(&mut cx, |editor, cx| {
let len = editor.buffer().read(cx).len(cx);
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([len..len])
});
if len > 0 {
editor.insert("\n\n", cx);
}
editor.insert(&entry_heading, cx);
editor.insert("\n\n", cx);
})?;
}
}
// todo!("editor")
// if let Some(Some(Ok(item))) = opened.first() {
// if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
// editor.update(&mut cx, |editor, cx| {
// let len = editor.buffer().read(cx).len(cx);
// editor.change_selections(Some(Autoscroll::center()), cx, |s| {
// s.select_ranges([len..len])
// });
// if len > 0 {
// editor.insert("\n\n", cx);
// }
// editor.insert(&entry_heading, cx);
// editor.insert("\n\n", cx);
// })?;
// }
// }
anyhow::Ok(())
})

View File

@ -1,27 +0,0 @@
[package]
name = "journal2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/journal2.rs"
doctest = false
[dependencies]
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
settings2 = { path = "../settings2" }
anyhow.workspace = true
chrono = "0.4"
dirs = "4.0"
serde.workspace = true
schemars.workspace = true
log.workspace = true
shellexpand = "2.1.0"
[dev-dependencies]
editor = { path = "../editor", features = ["test-support"] }

View File

@ -1,181 +0,0 @@
use anyhow::Result;
use chrono::{Datelike, Local, NaiveTime, Timelike};
use gpui::{actions, AppContext, ViewContext};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings2::Settings;
use std::{
fs::OpenOptions,
path::{Path, PathBuf},
sync::Arc,
};
use workspace::{AppState, Workspace};
actions!(journal, [NewJournalEntry]);
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
pub struct JournalSettings {
pub path: Option<String>,
pub hour_format: Option<HourFormat>,
}
impl Default for JournalSettings {
fn default() -> Self {
Self {
path: Some("~".into()),
hour_format: Some(Default::default()),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum HourFormat {
#[default]
Hour12,
Hour24,
}
impl settings2::Settings for JournalSettings {
const KEY: Option<&'static str> = Some("journal");
type FileContent = Self;
fn load(
defaults: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(defaults, user_values)
}
}
pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
JournalSettings::register(cx);
cx.observe_new_views(
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
workspace.register_action(|workspace, _: &NewJournalEntry, cx| {
new_journal_entry(workspace.app_state().clone(), cx);
});
},
)
.detach();
}
pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir,
None => {
log::error!("Can't determine journal directory");
return;
}
};
let now = Local::now();
let month_dir = journal_dir
.join(format!("{:02}", now.year()))
.join(format!("{:02}", now.month()));
let entry_path = month_dir.join(format!("{:02}.md", now.day()));
let now = now.time();
let _entry_heading = heading_entry(now, &settings.hour_format);
let create_entry = cx.background_executor().spawn(async move {
std::fs::create_dir_all(month_dir)?;
OpenOptions::new()
.create(true)
.write(true)
.open(&entry_path)?;
Ok::<_, std::io::Error>((journal_dir, entry_path))
});
cx.spawn(|mut cx| async move {
let (journal_dir, entry_path) = create_entry.await?;
let (workspace, _) = cx
.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?
.await?;
let _opened = workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], true, cx)
})?
.await;
// todo!("editor")
// if let Some(Some(Ok(item))) = opened.first() {
// if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
// editor.update(&mut cx, |editor, cx| {
// let len = editor.buffer().read(cx).len(cx);
// editor.change_selections(Some(Autoscroll::center()), cx, |s| {
// s.select_ranges([len..len])
// });
// if len > 0 {
// editor.insert("\n\n", cx);
// }
// editor.insert(&entry_heading, cx);
// editor.insert("\n\n", cx);
// })?;
// }
// }
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
fn journal_dir(path: &str) -> Option<PathBuf> {
let expanded_journal_dir = shellexpand::full(path) //TODO handle this better
.ok()
.map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal"));
return expanded_journal_dir;
}
fn heading_entry(now: NaiveTime, hour_format: &Option<HourFormat>) -> String {
match hour_format {
Some(HourFormat::Hour24) => {
let hour = now.hour();
format!("# {}:{:02}", hour, now.minute())
}
_ => {
let (pm, hour) = now.hour12();
let am_or_pm = if pm { "PM" } else { "AM" };
format!("# {}:{:02} {}", hour, now.minute(), am_or_pm)
}
}
}
#[cfg(test)]
mod tests {
mod heading_entry_tests {
use super::super::*;
#[test]
fn test_heading_entry_defaults_to_hour_12() {
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
let actual_heading_entry = heading_entry(naive_time, &None);
let expected_heading_entry = "# 3:00 PM";
assert_eq!(actual_heading_entry, expected_heading_entry);
}
#[test]
fn test_heading_entry_is_hour_12() {
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour12));
let expected_heading_entry = "# 3:00 PM";
assert_eq!(actual_heading_entry, expected_heading_entry);
}
#[test]
fn test_heading_entry_is_hour_24() {
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour24));
let expected_heading_entry = "# 15:00";
assert_eq!(actual_heading_entry, expected_heading_entry);
}
}
}

View File

@ -9,7 +9,7 @@ path = "src/quick_action_bar.rs"
doctest = false
[dependencies]
assistant = { package = "assistant2", path = "../assistant2" }
assistant = { path = "../assistant" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
search = { path = "../search" }

View File

@ -42,7 +42,7 @@ fsevent = { path = "../fsevent" }
go_to_line = { path = "../go_to_line" }
gpui = { package = "gpui2", path = "../gpui2" }
install_cli = { package = "install_cli2", path = "../install_cli2" }
journal = { package = "journal2", path = "../journal2" }
journal = { path = "../journal" }
language = { package = "language2", path = "../language2" }
language_selector = { path = "../language_selector" }
lsp = { package = "lsp2", path = "../lsp2" }
@ -50,7 +50,7 @@ menu = { package = "menu2", path = "../menu2" }
language_tools = { path = "../language_tools" }
node_runtime = { path = "../node_runtime" }
notifications = { package = "notifications2", path = "../notifications2" }
assistant = { package = "assistant2", path = "../assistant2" }
assistant = { path = "../assistant" }
outline = { path = "../outline" }
# plugin_runtime = { path = "../plugin_runtime",optional = true }
project = { package = "project2", path = "../project2" }