text: Wrap BufferId into a newtype

This commit is contained in:
Piotr Osiewicz 2024-01-28 21:05:08 +01:00
parent 941e838be9
commit 5ab715aac9
34 changed files with 687 additions and 383 deletions

View File

@ -35,7 +35,7 @@ use gpui::{
StatefulInteractiveElement, Styled, Subscription, Task, TextStyle, UniformListScrollHandle, StatefulInteractiveElement, Styled, Subscription, Task, TextStyle, UniformListScrollHandle,
View, ViewContext, VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext, View, ViewContext, VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext,
}; };
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry, ToOffset as _}; use language::{language_settings::SoftWrap, Buffer, BufferId, LanguageRegistry, ToOffset as _};
use project::Project; use project::Project;
use search::{buffer_search::DivRegistrar, BufferSearchBar}; use search::{buffer_search::DivRegistrar, BufferSearchBar};
use semantic_index::{SemanticIndex, SemanticIndexStatus}; use semantic_index::{SemanticIndex, SemanticIndexStatus};
@ -1414,7 +1414,7 @@ impl Conversation {
) -> Self { ) -> Self {
let markdown = language_registry.language_for_name("Markdown"); let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), ""); let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "");
buffer.set_language_registry(language_registry); buffer.set_language_registry(language_registry);
cx.spawn(|buffer, mut cx| async move { cx.spawn(|buffer, mut cx| async move {
let markdown = markdown.await?; let markdown = markdown.await?;
@ -1515,7 +1515,11 @@ impl Conversation {
let mut message_anchors = Vec::new(); let mut message_anchors = Vec::new();
let mut next_message_id = MessageId(0); let mut next_message_id = MessageId(0);
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), saved_conversation.text); let mut buffer = Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
saved_conversation.text,
);
for message in saved_conversation.messages { for message in saved_conversation.messages {
message_anchors.push(MessageAnchor { message_anchors.push(MessageAnchor {
id: message.id, id: message.id,

View File

@ -365,7 +365,9 @@ mod tests {
use futures::stream::{self}; use futures::stream::{self};
use gpui::{Context, TestAppContext}; use gpui::{Context, TestAppContext};
use indoc::indoc; use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point}; use language::{
language_settings, tree_sitter_rust, Buffer, BufferId, Language, LanguageConfig, Point,
};
use rand::prelude::*; use rand::prelude::*;
use serde::Serialize; use serde::Serialize;
use settings::SettingsStore; use settings::SettingsStore;
@ -394,8 +396,9 @@ mod tests {
} }
} }
"}; "};
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| { let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx); let snapshot = buffer.snapshot(cx);
@ -460,8 +463,9 @@ mod tests {
le le
} }
"}; "};
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| { let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx); let snapshot = buffer.snapshot(cx);
@ -525,8 +529,9 @@ mod tests {
" \n", " \n",
"}\n" // "}\n" //
); );
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| { let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx); let snapshot = buffer.snapshot(cx);

View File

@ -178,7 +178,9 @@ pub(crate) mod tests {
use gpui::{AppContext, Context}; use gpui::{AppContext, Context};
use indoc::indoc; use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point}; use language::{
language_settings, tree_sitter_rust, Buffer, BufferId, Language, LanguageConfig, Point,
};
use settings::SettingsStore; use settings::SettingsStore;
pub(crate) fn rust_lang() -> Language { pub(crate) fn rust_lang() -> Language {
@ -253,8 +255,9 @@ pub(crate) mod tests {
} }
} }
"}; "};
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let snapshot = buffer.read(cx).snapshot(); let snapshot = buffer.read(cx).snapshot();
assert_eq!( assert_eq!(

View File

@ -9,6 +9,7 @@ use rpc::{
TypedEnvelope, TypedEnvelope,
}; };
use std::{sync::Arc, time::Duration}; use std::{sync::Arc, time::Duration};
use text::BufferId;
use util::ResultExt; use util::ResultExt;
pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250); pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250);
@ -53,7 +54,7 @@ impl ChannelBuffer {
channel_id: channel.id, channel_id: channel.id,
}) })
.await?; .await?;
let buffer_id = BufferId::new(response.buffer_id)?;
let base_text = response.base_text; let base_text = response.base_text;
let operations = response let operations = response
.operations .operations
@ -63,12 +64,7 @@ impl ChannelBuffer {
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let capability = channel_store.read(cx).channel_capability(channel.id); let capability = channel_store.read(cx).channel_capability(channel.id);
language::Buffer::remote( language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
response.buffer_id,
response.replica_id as u16,
capability,
base_text,
)
})?; })?;
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
@ -107,7 +103,7 @@ impl ChannelBuffer {
} }
} }
pub fn remote_id(&self, cx: &AppContext) -> u64 { pub fn remote_id(&self, cx: &AppContext) -> BufferId {
self.buffer.read(cx).remote_id() self.buffer.read(cx).remote_id()
} }
@ -210,7 +206,7 @@ impl ChannelBuffer {
pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) { pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) {
let buffer = self.buffer.read(cx); let buffer = self.buffer.read(cx);
let version = buffer.version(); let version = buffer.version();
let buffer_id = buffer.remote_id(); let buffer_id = buffer.remote_id().into();
let client = self.client.clone(); let client = self.client.clone();
let epoch = self.epoch(); let epoch = self.epoch();

View File

@ -693,7 +693,7 @@ impl Database {
return Ok(()); return Ok(());
} }
let mut text_buffer = text::Buffer::new(0, 0, base_text); let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text);
text_buffer text_buffer
.apply_ops(operations.into_iter().filter_map(operation_from_wire)) .apply_ops(operations.into_iter().filter_map(operation_from_wire))
.unwrap(); .unwrap();

View File

@ -67,7 +67,7 @@ async fn test_channel_buffers(db: &Arc<Database>) {
.await .await
.unwrap(); .unwrap();
let mut buffer_a = Buffer::new(0, 0, "".to_string()); let mut buffer_a = Buffer::new(0, text::BufferId::new(0).unwrap(), "".to_string());
let mut operations = Vec::new(); let mut operations = Vec::new();
operations.push(buffer_a.edit([(0..0, "hello world")])); operations.push(buffer_a.edit([(0..0, "hello world")]));
operations.push(buffer_a.edit([(5..5, ", cruel")])); operations.push(buffer_a.edit([(5..5, ", cruel")]));
@ -90,7 +90,11 @@ async fn test_channel_buffers(db: &Arc<Database>) {
.await .await
.unwrap(); .unwrap();
let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text); let mut buffer_b = Buffer::new(
0,
text::BufferId::new(0).unwrap(),
buffer_response_b.base_text,
);
buffer_b buffer_b
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { .apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap(); let operation = proto::deserialize_operation(operation).unwrap();
@ -223,7 +227,11 @@ async fn test_channel_buffers_last_operations(db: &Database) {
.unwrap(), .unwrap(),
); );
text_buffers.push(Buffer::new(0, 0, "".to_string())); text_buffers.push(Buffer::new(
0,
text::BufferId::new(1).unwrap(),
"".to_string(),
));
} }
let operations = db let operations = db
@ -270,7 +278,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id) db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id)
.await .await
.unwrap(); .unwrap();
text_buffers[1] = Buffer::new(1, 0, "def".to_string()); text_buffers[1] = Buffer::new(1, text::BufferId::new(1).unwrap(), "def".to_string());
update_buffer( update_buffer(
buffers[1].channel_id, buffers[1].channel_id,
user_id, user_id,

View File

@ -1023,12 +1023,15 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
mod tests { mod tests {
use super::*; use super::*;
use gpui::TestAppContext; use gpui::TestAppContext;
use language::BufferId;
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
async fn test_buffer_management(cx: &mut TestAppContext) { async fn test_buffer_management(cx: &mut TestAppContext) {
let (copilot, mut lsp) = Copilot::fake(cx); let (copilot, mut lsp) = Copilot::fake(cx);
let buffer_1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Hello")); let buffer_1 = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "Hello")
});
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64()) let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
.parse() .parse()
.unwrap(); .unwrap();
@ -1046,7 +1049,13 @@ mod tests {
} }
); );
let buffer_2 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Goodbye")); let buffer_2 = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"Goodbye",
)
});
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64()) let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
.parse() .parse()
.unwrap(); .unwrap();
@ -1235,7 +1244,7 @@ mod tests {
fn buffer_reloaded( fn buffer_reloaded(
&self, &self,
_: u64, _: BufferId,
_: &clock::Global, _: &clock::Global,
_: language::RopeFingerprint, _: language::RopeFingerprint,
_: language::LineEnding, _: language::LineEnding,

View File

@ -1007,6 +1007,7 @@ pub mod tests {
use settings::SettingsStore; use settings::SettingsStore;
use smol::stream::StreamExt; use smol::stream::StreamExt;
use std::{env, sync::Arc}; use std::{env, sync::Arc};
use text::BufferId;
use theme::{LoadThemes, SyntaxTheme}; use theme::{LoadThemes, SyntaxTheme};
use util::test::{marked_text_ranges, sample_text}; use util::test::{marked_text_ranges, sample_text};
use Bias::*; use Bias::*;
@ -1467,7 +1468,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap()))); cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
}); });
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
@ -1553,7 +1555,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |_| {})); cx.update(|cx| init_test(cx, |_| {}));
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
}); });
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
@ -1620,7 +1623,8 @@ pub mod tests {
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false); let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
}); });
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;

View File

@ -109,7 +109,7 @@ use std::{
}; };
pub use sum_tree::Bias; pub use sum_tree::Bias;
use sum_tree::TreeMap; use sum_tree::TreeMap;
use text::{OffsetUtf16, Rope}; use text::{BufferId, OffsetUtf16, Rope};
use theme::{ use theme::{
observe_buffer_font_size_adjustment, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, observe_buffer_font_size_adjustment, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme,
ThemeColors, ThemeSettings, ThemeColors, ThemeSettings,
@ -1289,19 +1289,37 @@ impl InlayHintRefreshReason {
impl Editor { impl Editor {
pub fn single_line(cx: &mut ViewContext<Self>) -> Self { pub fn single_line(cx: &mut ViewContext<Self>) -> Self {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), String::new())); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
String::new(),
)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(EditorMode::SingleLine, buffer, None, cx) Self::new(EditorMode::SingleLine, buffer, None, cx)
} }
pub fn multi_line(cx: &mut ViewContext<Self>) -> Self { pub fn multi_line(cx: &mut ViewContext<Self>) -> Self {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), String::new())); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
String::new(),
)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(EditorMode::Full, buffer, None, cx) Self::new(EditorMode::Full, buffer, None, cx)
} }
pub fn auto_height(max_lines: usize, cx: &mut ViewContext<Self>) -> Self { pub fn auto_height(max_lines: usize, cx: &mut ViewContext<Self>) -> Self {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), String::new())); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
String::new(),
)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(EditorMode::AutoHeight { max_lines }, buffer, None, cx) Self::new(EditorMode::AutoHeight { max_lines }, buffer, None, cx)
} }

View File

@ -39,7 +39,8 @@ fn test_edit_events(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let mut buffer = language::Buffer::new(0, cx.entity_id().as_u64(), "123456"); let mut buffer =
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "123456");
buffer.set_group_interval(Duration::from_secs(1)); buffer.set_group_interval(Duration::from_secs(1));
buffer buffer
}); });
@ -154,7 +155,9 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let mut now = Instant::now(); let mut now = Instant::now();
let buffer = cx.new_model(|cx| language::Buffer::new(0, cx.entity_id().as_u64(), "123456")); let buffer = cx.new_model(|cx| {
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "123456")
});
let group_interval = buffer.update(cx, |buffer, _| buffer.transaction_group_interval()); let group_interval = buffer.update(cx, |buffer, _| buffer.transaction_group_interval());
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx.add_window(|cx| build_editor(buffer.clone(), cx)); let editor = cx.add_window(|cx| build_editor(buffer.clone(), cx));
@ -225,7 +228,8 @@ fn test_ime_composition(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let mut buffer = language::Buffer::new(0, cx.entity_id().as_u64(), "abcde"); let mut buffer =
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcde");
// Ensure automatic grouping doesn't occur. // Ensure automatic grouping doesn't occur.
buffer.set_group_interval(Duration::ZERO); buffer.set_group_interval(Duration::ZERO);
buffer buffer
@ -629,7 +633,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
// Ensure we don't panic when navigation data contains invalid anchors *and* points. // Ensure we don't panic when navigation data contains invalid anchors *and* points.
let mut invalid_anchor = editor.scroll_manager.anchor().anchor; let mut invalid_anchor = editor.scroll_manager.anchor().anchor;
invalid_anchor.text_anchor.buffer_id = Some(999); invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok();
let invalid_point = Point::new(9999, 0); let invalid_point = Point::new(9999, 0);
editor.navigate( editor.navigate(
Box::new(NavigationData { Box::new(NavigationData {
@ -2342,11 +2346,20 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
)); ));
let toml_buffer = cx.new_model(|cx| { let toml_buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), "a = 1\nb = 2\n").with_language(toml_language, cx) Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"a = 1\nb = 2\n",
)
.with_language(toml_language, cx)
}); });
let rust_buffer = cx.new_model(|cx| { let rust_buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), "const c: usize = 3;\n") Buffer::new(
.with_language(rust_language, cx) 0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"const c: usize = 3;\n",
)
.with_language(rust_language, cx)
}); });
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
@ -3984,8 +3997,10 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx let buffer = cx.new_model(|cx| {
.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
@ -4149,8 +4164,10 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}"; let text = "fn a() {}";
let buffer = cx let buffer = cx.new_model(|cx| {
.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
editor editor
@ -4713,8 +4730,10 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx let buffer = cx.new_model(|cx| {
.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@ -4862,8 +4881,10 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx let buffer = cx.new_model(|cx| {
.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
editor editor
@ -6095,7 +6116,13 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(3, 4, 'a'))); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(3, 4, 'a'),
)
});
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
multibuffer.push_excerpts( multibuffer.push_excerpts(
@ -6179,7 +6206,13 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
primary: None, primary: None,
} }
}); });
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), initial_text)); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
initial_text,
)
});
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
multibuffer.push_excerpts(buffer, excerpt_ranges, cx); multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
@ -6237,7 +6270,13 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
fn test_refresh_selections(cx: &mut TestAppContext) { fn test_refresh_selections(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(3, 4, 'a'))); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(3, 4, 'a'),
)
});
let mut excerpt1_id = None; let mut excerpt1_id = None;
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
@ -6322,7 +6361,13 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(3, 4, 'a'))); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(3, 4, 'a'),
)
});
let mut excerpt1_id = None; let mut excerpt1_id = None;
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
@ -6417,8 +6462,10 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
"{{} }\n", // "{{} }\n", //
); );
let buffer = cx let buffer = cx.new_model(|cx| {
.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@ -7498,8 +7545,20 @@ async fn test_copilot_multibuffer(executor: BackgroundExecutor, cx: &mut gpui::T
let (copilot, copilot_lsp) = Copilot::fake(cx); let (copilot, copilot_lsp) = Copilot::fake(cx);
_ = cx.update(|cx| cx.set_global(copilot)); _ = cx.update(|cx| cx.set_global(copilot));
let buffer_1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "a = 1\nb = 2\n")); let buffer_1 = cx.new_model(|cx| {
let buffer_2 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "c = 3\nd = 4\n")); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"a = 1\nb = 2\n",
)
});
let buffer_2 = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"c = 3\nd = 4\n",
)
});
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite); let mut multibuffer = MultiBuffer::new(0, ReadWrite);
multibuffer.push_excerpts( multibuffer.push_excerpts(

View File

@ -28,7 +28,7 @@ use collections::{hash_map, HashMap, HashSet};
use language::language_settings::InlayHintSettings; use language::language_settings::InlayHintSettings;
use smol::lock::Semaphore; use smol::lock::Semaphore;
use sum_tree::Bias; use sum_tree::Bias;
use text::{ToOffset, ToPoint}; use text::{BufferId, ToOffset, ToPoint};
use util::post_inc; use util::post_inc;
pub struct InlayHintCache { pub struct InlayHintCache {
@ -50,7 +50,7 @@ struct TasksForRanges {
struct CachedExcerptHints { struct CachedExcerptHints {
version: usize, version: usize,
buffer_version: Global, buffer_version: Global,
buffer_id: u64, buffer_id: BufferId,
ordered_hints: Vec<InlayId>, ordered_hints: Vec<InlayId>,
hints_by_id: HashMap<InlayId, InlayHint>, hints_by_id: HashMap<InlayId, InlayHint>,
} }
@ -93,7 +93,7 @@ struct ExcerptHintsUpdate {
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
struct ExcerptQuery { struct ExcerptQuery {
buffer_id: u64, buffer_id: BufferId,
excerpt_id: ExcerptId, excerpt_id: ExcerptId,
cache_version: usize, cache_version: usize,
invalidate: InvalidationStrategy, invalidate: InvalidationStrategy,
@ -553,7 +553,7 @@ impl InlayHintCache {
/// Queries a certain hint from the cache for extra data via the LSP <a href="https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#inlayHint_resolve">resolve</a> request. /// Queries a certain hint from the cache for extra data via the LSP <a href="https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#inlayHint_resolve">resolve</a> request.
pub(super) fn spawn_hint_resolve( pub(super) fn spawn_hint_resolve(
&self, &self,
buffer_id: u64, buffer_id: BufferId,
excerpt_id: ExcerptId, excerpt_id: ExcerptId,
id: InlayId, id: InlayId,
cx: &mut ViewContext<'_, Editor>, cx: &mut ViewContext<'_, Editor>,

View File

@ -30,7 +30,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use text::Selection; use text::{BufferId, Selection};
use theme::Theme; use theme::Theme;
use ui::{h_flex, prelude::*, Label}; use ui::{h_flex, prelude::*, Label};
use util::{paths::PathExt, paths::FILE_ROW_COLUMN_DELIMITER, ResultExt, TryFutureExt}; use util::{paths::PathExt, paths::FILE_ROW_COLUMN_DELIMITER, ResultExt, TryFutureExt};
@ -73,12 +73,14 @@ impl FollowableItem for Editor {
.iter() .iter()
.map(|excerpt| excerpt.buffer_id) .map(|excerpt| excerpt.buffer_id)
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let buffers = project.update(cx, |project, cx| { let buffers = project
buffer_ids .update(cx, |project, cx| {
.iter() buffer_ids
.map(|id| project.open_buffer_by_id(*id, cx)) .iter()
.collect::<Vec<_>>() .map(|id| BufferId::new(*id).map(|id| project.open_buffer_by_id(id, cx)))
}); .collect::<Result<Vec<_>>>()
})
.ok()?;
let pane = pane.downgrade(); let pane = pane.downgrade();
Some(cx.spawn(|mut cx| async move { Some(cx.spawn(|mut cx| async move {
@ -109,10 +111,12 @@ impl FollowableItem for Editor {
MultiBuffer::new(replica_id, project.read(cx).capability()); MultiBuffer::new(replica_id, project.read(cx).capability());
let mut excerpts = state.excerpts.into_iter().peekable(); let mut excerpts = state.excerpts.into_iter().peekable();
while let Some(excerpt) = excerpts.peek() { while let Some(excerpt) = excerpts.peek() {
let buffer_id = excerpt.buffer_id; let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
continue;
};
let buffer_excerpts = iter::from_fn(|| { let buffer_excerpts = iter::from_fn(|| {
let excerpt = excerpts.peek()?; let excerpt = excerpts.peek()?;
(excerpt.buffer_id == buffer_id) (excerpt.buffer_id == u64::from(buffer_id))
.then(|| excerpts.next().unwrap()) .then(|| excerpts.next().unwrap())
}); });
let buffer = let buffer =
@ -189,7 +193,7 @@ impl FollowableItem for Editor {
.excerpts() .excerpts()
.map(|(id, buffer, range)| proto::Excerpt { .map(|(id, buffer, range)| proto::Excerpt {
id: id.to_proto(), id: id.to_proto(),
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
context_start: Some(serialize_text_anchor(&range.context.start)), context_start: Some(serialize_text_anchor(&range.context.start)),
context_end: Some(serialize_text_anchor(&range.context.end)), context_end: Some(serialize_text_anchor(&range.context.end)),
primary_start: range primary_start: range
@ -336,9 +340,9 @@ async fn update_editor_from_message(
let inserted_excerpt_buffers = project.update(cx, |project, cx| { let inserted_excerpt_buffers = project.update(cx, |project, cx| {
inserted_excerpt_buffer_ids inserted_excerpt_buffer_ids
.into_iter() .into_iter()
.map(|id| project.open_buffer_by_id(id, cx)) .map(|id| BufferId::new(id).map(|id| project.open_buffer_by_id(id, cx)))
.collect::<Vec<_>>() .collect::<Result<Vec<_>>>()
})?; })??;
let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?; let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?;
// Update the editor's excerpts. // Update the editor's excerpts.
@ -362,7 +366,7 @@ async fn update_editor_from_message(
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
continue; continue;
}; };
let buffer_id = excerpt.buffer_id; let buffer_id = BufferId::new(excerpt.buffer_id)?;
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id) else { let Some(buffer) = project.read(cx).buffer_for_id(buffer_id) else {
continue; continue;
}; };
@ -370,7 +374,7 @@ async fn update_editor_from_message(
let adjacent_excerpts = iter::from_fn(|| { let adjacent_excerpts = iter::from_fn(|| {
let insertion = insertions.peek()?; let insertion = insertions.peek()?;
if insertion.previous_excerpt_id.is_none() if insertion.previous_excerpt_id.is_none()
&& insertion.excerpt.as_ref()?.buffer_id == buffer_id && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id)
{ {
insertions.next()?.excerpt insertions.next()?.excerpt
} else { } else {
@ -395,8 +399,9 @@ async fn update_editor_from_message(
} }
multibuffer.remove_excerpts(removed_excerpt_ids, cx); multibuffer.remove_excerpts(removed_excerpt_ids, cx);
}); Result::<(), anyhow::Error>::Ok(())
})?; })
})??;
// Deserialize the editor state. // Deserialize the editor state.
let (selections, pending_selection, scroll_top_anchor) = this.update(cx, |editor, cx| { let (selections, pending_selection, scroll_top_anchor) = this.update(cx, |editor, cx| {
@ -450,13 +455,13 @@ async fn update_editor_from_message(
} }
fn serialize_excerpt( fn serialize_excerpt(
buffer_id: u64, buffer_id: BufferId,
id: &ExcerptId, id: &ExcerptId,
range: &ExcerptRange<language::Anchor>, range: &ExcerptRange<language::Anchor>,
) -> Option<proto::Excerpt> { ) -> Option<proto::Excerpt> {
Some(proto::Excerpt { Some(proto::Excerpt {
id: id.to_proto(), id: id.to_proto(),
buffer_id, buffer_id: buffer_id.into(),
context_start: Some(serialize_text_anchor(&range.context.start)), context_start: Some(serialize_text_anchor(&range.context.start)),
context_end: Some(serialize_text_anchor(&range.context.end)), context_end: Some(serialize_text_anchor(&range.context.end)),
primary_start: range primary_start: range

View File

@ -522,6 +522,7 @@ mod tests {
use language::Capability; use language::Capability;
use project::Project; use project::Project;
use settings::SettingsStore; use settings::SettingsStore;
use text::BufferId;
use util::post_inc; use util::post_inc;
#[gpui::test] #[gpui::test]
@ -822,8 +823,13 @@ mod tests {
let font = font("Helvetica"); let font = font("Helvetica");
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abc\ndefg\nhijkl\nmn")); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"abc\ndefg\nhijkl\nmn",
)
});
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
multibuffer.push_excerpts( multibuffer.push_excerpts(

View File

@ -314,7 +314,7 @@ mod tests {
use std::assert_eq; use std::assert_eq;
use super::*; use super::*;
use text::Buffer; use text::{Buffer, BufferId};
use unindent::Unindent as _; use unindent::Unindent as _;
#[test] #[test]
@ -333,7 +333,7 @@ mod tests {
" "
.unindent(); .unindent();
let mut buffer = Buffer::new(0, 0, buffer_text); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
let mut diff = BufferDiff::new(); let mut diff = BufferDiff::new();
smol::block_on(diff.update(&diff_base, &buffer)); smol::block_on(diff.update(&diff_base, &buffer));
assert_hunks( assert_hunks(
@ -393,7 +393,7 @@ mod tests {
" "
.unindent(); .unindent();
let buffer = Buffer::new(0, 0, buffer_text); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
let mut diff = BufferDiff::new(); let mut diff = BufferDiff::new();
smol::block_on(diff.update(&diff_base, &buffer)); smol::block_on(diff.update(&diff_base, &buffer));
assert_eq!(diff.hunks(&buffer).count(), 8); assert_eq!(diff.hunks(&buffer).count(), 8);

View File

@ -15,7 +15,7 @@ use crate::{
}, },
CodeLabel, LanguageScope, Outline, CodeLabel, LanguageScope, Outline,
}; };
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Context, Result};
pub use clock::ReplicaId; pub use clock::ReplicaId;
use futures::channel::oneshot; use futures::channel::oneshot;
use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel}; use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
@ -44,10 +44,10 @@ use sum_tree::TreeMap;
use text::operation_queue::OperationQueue; use text::operation_queue::OperationQueue;
use text::*; use text::*;
pub use text::{ pub use text::{
Anchor, Bias, Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Edit, OffsetRangeExt, Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
OffsetUtf16, Patch, Point, PointUtf16, Rope, RopeFingerprint, Selection, SelectionGoal, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, RopeFingerprint, Selection,
Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16, SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
Transaction, TransactionId, Unclipped, ToPointUtf16, Transaction, TransactionId, Unclipped,
}; };
use theme::SyntaxTheme; use theme::SyntaxTheme;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
@ -396,7 +396,7 @@ pub trait LocalFile: File {
/// Called when the buffer is reloaded from disk. /// Called when the buffer is reloaded from disk.
fn buffer_reloaded( fn buffer_reloaded(
&self, &self,
buffer_id: u64, buffer_id: BufferId,
version: &clock::Global, version: &clock::Global,
fingerprint: RopeFingerprint, fingerprint: RopeFingerprint,
line_ending: LineEnding, line_ending: LineEnding,
@ -517,7 +517,7 @@ pub enum CharKind {
impl Buffer { impl Buffer {
/// Create a new buffer with the given base text. /// Create a new buffer with the given base text.
pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self { pub fn new<T: Into<String>>(replica_id: ReplicaId, id: BufferId, base_text: T) -> Self {
Self::build( Self::build(
TextBuffer::new(replica_id, id, base_text.into()), TextBuffer::new(replica_id, id, base_text.into()),
None, None,
@ -528,7 +528,7 @@ impl Buffer {
/// Create a new buffer that is a replica of a remote buffer. /// Create a new buffer that is a replica of a remote buffer.
pub fn remote( pub fn remote(
remote_id: u64, remote_id: BufferId,
replica_id: ReplicaId, replica_id: ReplicaId,
capability: Capability, capability: Capability,
base_text: String, base_text: String,
@ -549,7 +549,9 @@ impl Buffer {
message: proto::BufferState, message: proto::BufferState,
file: Option<Arc<dyn File>>, file: Option<Arc<dyn File>>,
) -> Result<Self> { ) -> Result<Self> {
let buffer = TextBuffer::new(replica_id, message.id, message.base_text); let buffer_id = BufferId::new(message.id)
.with_context(|| anyhow!("Could not deserialize buffer_id"))?;
let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
let mut this = Self::build( let mut this = Self::build(
buffer, buffer,
message.diff_base.map(|text| text.into_boxed_str().into()), message.diff_base.map(|text| text.into_boxed_str().into()),
@ -572,7 +574,7 @@ impl Buffer {
/// Serialize the buffer's state to a protobuf message. /// Serialize the buffer's state to a protobuf message.
pub fn to_proto(&self) -> proto::BufferState { pub fn to_proto(&self) -> proto::BufferState {
proto::BufferState { proto::BufferState {
id: self.remote_id(), id: self.remote_id().into(),
file: self.file.as_ref().map(|f| f.to_proto()), file: self.file.as_ref().map(|f| f.to_proto()),
base_text: self.base_text().to_string(), base_text: self.base_text().to_string(),
diff_base: self.diff_base.as_ref().map(|h| h.to_string()), diff_base: self.diff_base.as_ref().map(|h| h.to_string()),

View File

@ -18,7 +18,7 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use text::network::Network; use text::network::Network;
use text::LineEnding; use text::{BufferId, LineEnding};
use text::{Point, ToPoint}; use text::{Point, ToPoint};
use unindent::Unindent as _; use unindent::Unindent as _;
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter}; use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
@ -43,8 +43,12 @@ fn test_line_endings(cx: &mut gpui::AppContext) {
init_settings(cx, |_| {}); init_settings(cx, |_| {});
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "one\r\ntwo\rthree") let mut buffer = Buffer::new(
.with_language(Arc::new(rust_lang()), cx); 0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"one\r\ntwo\rthree",
)
.with_language(Arc::new(rust_lang()), cx);
assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.text(), "one\ntwo\nthree");
assert_eq!(buffer.line_ending(), LineEnding::Windows); assert_eq!(buffer.line_ending(), LineEnding::Windows);
@ -138,8 +142,10 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
let buffer_1_events = Arc::new(Mutex::new(Vec::new())); let buffer_1_events = Arc::new(Mutex::new(Vec::new()));
let buffer_2_events = Arc::new(Mutex::new(Vec::new())); let buffer_2_events = Arc::new(Mutex::new(Vec::new()));
let buffer1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcdef")); let buffer1 = cx
let buffer2 = cx.new_model(|cx| Buffer::new(1, cx.entity_id().as_u64(), "abcdef")); .new_model(|cx| Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcdef"));
let buffer2 = cx
.new_model(|cx| Buffer::new(1, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcdef"));
let buffer1_ops = Arc::new(Mutex::new(Vec::new())); let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
buffer1.update(cx, { buffer1.update(cx, {
let buffer1_ops = buffer1_ops.clone(); let buffer1_ops = buffer1_ops.clone();
@ -218,7 +224,8 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
#[gpui::test] #[gpui::test]
async fn test_apply_diff(cx: &mut TestAppContext) { async fn test_apply_diff(cx: &mut TestAppContext) {
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text)); let buffer =
cx.new_model(|cx| Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text));
let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3))); let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
let text = "a\nccc\ndddd\nffffff\n"; let text = "a\nccc\ndddd\nffffff\n";
@ -250,7 +257,8 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
] ]
.join("\n"); .join("\n");
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text)); let buffer =
cx.new_model(|cx| Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text));
// Spawn a task to format the buffer's whitespace. // Spawn a task to format the buffer's whitespace.
// Pause so that the foratting task starts running. // Pause so that the foratting task starts running.
@ -315,7 +323,8 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
async fn test_reparse(cx: &mut gpui::TestAppContext) { async fn test_reparse(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}"; let text = "fn a() {}";
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(Arc::new(rust_lang()), cx)
}); });
// Wait for the initial text to parse // Wait for the initial text to parse
@ -443,8 +452,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_resetting_language(cx: &mut gpui::TestAppContext) { async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "{}")
Buffer::new(0, cx.entity_id().as_u64(), "{}").with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
buffer.set_sync_parse_timeout(Duration::ZERO); buffer.set_sync_parse_timeout(Duration::ZERO);
buffer buffer
}); });
@ -493,7 +502,8 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
.unindent(); .unindent();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(Arc::new(rust_lang()), cx)
}); });
let outline = buffer let outline = buffer
.update(cx, |buffer, _| buffer.snapshot().outline(None)) .update(cx, |buffer, _| buffer.snapshot().outline(None))
@ -579,7 +589,8 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
.unindent(); .unindent();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(Arc::new(rust_lang()), cx)
}); });
let outline = buffer let outline = buffer
.update(cx, |buffer, _| buffer.snapshot().outline(None)) .update(cx, |buffer, _| buffer.snapshot().outline(None))
@ -617,7 +628,8 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
.unindent(); .unindent();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(language), cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(Arc::new(language), cx)
}); });
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
@ -661,7 +673,8 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
.unindent(); .unindent();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx) Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(Arc::new(rust_lang()), cx)
}); });
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
@ -883,8 +896,8 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
fn test_range_for_syntax_ancestor(cx: &mut AppContext) { fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
cx.new_model(|cx| { cx.new_model(|cx| {
let text = "fn a() { b(|c| {}) }"; let text = "fn a() { b(|c| {}) }";
let buffer = let buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
assert_eq!( assert_eq!(
@ -924,8 +937,8 @@ fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
cx.new_model(|cx| { cx.new_model(|cx| {
let text = "fn a() {}"; let text = "fn a() {}";
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n \n}"); assert_eq!(buffer.text(), "fn a() {\n \n}");
@ -967,8 +980,8 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
cx.new_model(|cx| { cx.new_model(|cx| {
let text = "fn a() {}"; let text = "fn a() {}";
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n\t\n}"); assert_eq!(buffer.text(), "fn a() {\n\t\n}");
@ -1007,10 +1020,9 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
init_settings(cx, |_| {}); init_settings(cx, |_| {});
cx.new_model(|cx| { cx.new_model(|cx| {
let entity_id = cx.entity_id();
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
entity_id.as_u64(), BufferId::new(cx.entity_id().as_u64()).unwrap(),
" "
fn a() { fn a() {
c; c;
@ -1085,7 +1097,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.entity_id().as_u64(), BufferId::new(cx.entity_id().as_u64()).unwrap(),
" "
fn a() { fn a() {
b(); b();
@ -1150,7 +1162,7 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.entity_id().as_u64(), BufferId::new(cx.entity_id().as_u64()).unwrap(),
" "
fn a() { fn a() {
i i
@ -1212,7 +1224,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.entity_id().as_u64(), BufferId::new(cx.entity_id().as_u64()).unwrap(),
" "
fn a() {} fn a() {}
" "
@ -1268,8 +1280,8 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
cx.new_model(|cx| { cx.new_model(|cx| {
let text = "a\nb"; let text = "a\nb";
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
buffer.edit( buffer.edit(
[(0..1, "\n"), (2..3, "\n")], [(0..1, "\n"), (2..3, "\n")],
Some(AutoindentMode::EachLine), Some(AutoindentMode::EachLine),
@ -1295,8 +1307,8 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
" "
.unindent(); .unindent();
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
buffer.edit( buffer.edit(
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")], [(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
Some(AutoindentMode::EachLine), Some(AutoindentMode::EachLine),
@ -1333,8 +1345,8 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
} }
"# "#
.unindent(); .unindent();
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
// When this text was copied, both of the quotation marks were at the same // When this text was copied, both of the quotation marks were at the same
// indent level, but the indentation of the first line was not included in // indent level, but the indentation of the first line was not included in
@ -1419,8 +1431,8 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
} }
"# "#
.unindent(); .unindent();
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
// The original indent columns are not known, so this text is // The original indent columns are not known, so this text is
// auto-indented in a block as if the first line was copied in // auto-indented in a block as if the first line was copied in
@ -1499,17 +1511,18 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
" "
.unindent(); .unindent();
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text).with_language( let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
Arc::new(Language::new( .with_language(
LanguageConfig { Arc::new(Language::new(
name: "Markdown".into(), LanguageConfig {
auto_indent_using_last_non_empty_line: false, name: "Markdown".into(),
..Default::default() auto_indent_using_last_non_empty_line: false,
}, ..Default::default()
Some(tree_sitter_json::language()), },
)), Some(tree_sitter_json::language()),
cx, )),
); cx,
);
buffer.edit( buffer.edit(
[(Point::new(3, 0)..Point::new(3, 0), "\n")], [(Point::new(3, 0)..Point::new(3, 0), "\n")],
Some(AutoindentMode::EachLine), Some(AutoindentMode::EachLine),
@ -1575,7 +1588,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
false, false,
); );
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text); let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text);
buffer.set_language_registry(language_registry); buffer.set_language_registry(language_registry);
buffer.set_language(Some(html_language), cx); buffer.set_language(Some(html_language), cx);
buffer.edit( buffer.edit(
@ -1611,8 +1624,8 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
}); });
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "")
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(Arc::new(ruby_lang()), cx); .with_language(Arc::new(ruby_lang()), cx);
let text = r#" let text = r#"
class C class C
@ -1713,8 +1726,8 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
"# "#
.unindent(); .unindent();
let buffer = let buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), &text)
Buffer::new(0, cx.entity_id().as_u64(), &text).with_language(Arc::new(language), cx); .with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
let config = snapshot.language_scope_at(0).unwrap(); let config = snapshot.language_scope_at(0).unwrap();
@ -1831,8 +1844,12 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
"# "#
.unindent(); .unindent();
let buffer = Buffer::new(0, cx.entity_id().as_u64(), text.clone()) let buffer = Buffer::new(
.with_language(Arc::new(language), cx); 0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
text.clone(),
)
.with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
// By default, all brackets are enabled // By default, all brackets are enabled
@ -1876,7 +1893,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
language_registry.add(Arc::new(html_lang())); language_registry.add(Arc::new(html_lang()));
language_registry.add(Arc::new(erb_lang())); language_registry.add(Arc::new(erb_lang()));
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text); let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text);
buffer.set_language_registry(language_registry.clone()); buffer.set_language_registry(language_registry.clone());
buffer.set_language( buffer.set_language(
language_registry language_registry
@ -1911,7 +1928,7 @@ fn test_serialization(cx: &mut gpui::AppContext) {
let mut now = Instant::now(); let mut now = Instant::now();
let buffer1 = cx.new_model(|cx| { let buffer1 = cx.new_model(|cx| {
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "abc"); let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abc");
buffer.edit([(3..3, "D")], None, cx); buffer.edit([(3..3, "D")], None, cx);
now += Duration::from_secs(1); now += Duration::from_secs(1);
@ -1966,8 +1983,13 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
let mut replica_ids = Vec::new(); let mut replica_ids = Vec::new();
let mut buffers = Vec::new(); let mut buffers = Vec::new();
let network = Arc::new(Mutex::new(Network::new(rng.clone()))); let network = Arc::new(Mutex::new(Network::new(rng.clone())));
let base_buffer = let base_buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text.as_str())); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
base_text.as_str(),
)
});
for i in 0..rng.gen_range(min_peers..=max_peers) { for i in 0..rng.gen_range(min_peers..=max_peers) {
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
@ -2475,8 +2497,12 @@ fn assert_bracket_pairs(
) { ) {
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false); let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(0, cx.entity_id().as_u64(), expected_text.clone()) Buffer::new(
.with_language(Arc::new(language), cx) 0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
expected_text.clone(),
)
.with_language(Arc::new(language), cx)
}); });
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot()); let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());

View File

@ -241,7 +241,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
Bias::Left => proto::Bias::Left as i32, Bias::Left => proto::Bias::Left as i32,
Bias::Right => proto::Bias::Right as i32, Bias::Right => proto::Bias::Right as i32,
}, },
buffer_id: anchor.buffer_id, buffer_id: anchor.buffer_id.map(Into::into),
} }
} }
@ -420,6 +420,11 @@ pub fn deserialize_diagnostics(
/// Deserializes an [`Anchor`] from the RPC representation. /// Deserializes an [`Anchor`] from the RPC representation.
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> { pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
let buffer_id = if let Some(id) = anchor.buffer_id {
Some(BufferId::new(id).ok()?)
} else {
None
};
Some(Anchor { Some(Anchor {
timestamp: clock::Lamport { timestamp: clock::Lamport {
replica_id: anchor.replica_id as ReplicaId, replica_id: anchor.replica_id as ReplicaId,
@ -430,7 +435,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
proto::Bias::Left => Bias::Left, proto::Bias::Left => Bias::Left,
proto::Bias::Right => Bias::Right, proto::Bias::Right => Bias::Right,
}, },
buffer_id: anchor.buffer_id, buffer_id,
}) })
} }

View File

@ -2,7 +2,7 @@ use super::*;
use crate::LanguageConfig; use crate::LanguageConfig;
use rand::rngs::StdRng; use rand::rngs::StdRng;
use std::{env, ops::Range, sync::Arc}; use std::{env, ops::Range, sync::Arc};
use text::Buffer; use text::{Buffer, BufferId};
use tree_sitter::Node; use tree_sitter::Node;
use unindent::Unindent as _; use unindent::Unindent as _;
use util::test::marked_text_ranges; use util::test::marked_text_ranges;
@ -86,7 +86,7 @@ fn test_syntax_map_layers_for_range() {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
0, BufferId::new(1).unwrap(),
r#" r#"
fn a() { fn a() {
assert_eq!( assert_eq!(
@ -185,7 +185,7 @@ fn test_dynamic_language_injection() {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
0, BufferId::new(1).unwrap(),
r#" r#"
This is a code block: This is a code block:
@ -860,7 +860,7 @@ fn test_random_edits(
.map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10); .unwrap_or(10);
let mut buffer = Buffer::new(0, 0, text); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), text);
let mut syntax_map = SyntaxMap::new(); let mut syntax_map = SyntaxMap::new();
syntax_map.set_language_registry(registry.clone()); syntax_map.set_language_registry(registry.clone());
@ -1040,7 +1040,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap
.now_or_never() .now_or_never()
.unwrap() .unwrap()
.unwrap(); .unwrap();
let mut buffer = Buffer::new(0, 0, Default::default()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), Default::default());
let mut mutated_syntax_map = SyntaxMap::new(); let mut mutated_syntax_map = SyntaxMap::new();
mutated_syntax_map.set_language_registry(registry.clone()); mutated_syntax_map.set_language_registry(registry.clone());

View File

@ -5,10 +5,11 @@ use std::{
ops::{Range, Sub}, ops::{Range, Sub},
}; };
use sum_tree::Bias; use sum_tree::Bias;
use text::BufferId;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)] #[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct Anchor { pub struct Anchor {
pub buffer_id: Option<u64>, pub buffer_id: Option<BufferId>,
pub excerpt_id: ExcerptId, pub excerpt_id: ExcerptId,
pub text_anchor: text::Anchor, pub text_anchor: text::Anchor,
} }

View File

@ -33,7 +33,7 @@ use sum_tree::{Bias, Cursor, SumTree};
use text::{ use text::{
locator::Locator, locator::Locator,
subscription::{Subscription, Topic}, subscription::{Subscription, Topic},
Edit, TextSummary, BufferId, Edit, TextSummary,
}; };
use theme::SyntaxTheme; use theme::SyntaxTheme;
use util::post_inc; use util::post_inc;
@ -48,7 +48,7 @@ pub struct ExcerptId(usize);
pub struct MultiBuffer { pub struct MultiBuffer {
snapshot: RefCell<MultiBufferSnapshot>, snapshot: RefCell<MultiBufferSnapshot>,
buffers: RefCell<HashMap<u64, BufferState>>, buffers: RefCell<HashMap<BufferId, BufferState>>,
next_excerpt_id: usize, next_excerpt_id: usize,
subscriptions: Topic, subscriptions: Topic,
singleton: bool, singleton: bool,
@ -101,7 +101,7 @@ struct History {
#[derive(Clone)] #[derive(Clone)]
struct Transaction { struct Transaction {
id: TransactionId, id: TransactionId,
buffer_transactions: HashMap<u64, text::TransactionId>, buffer_transactions: HashMap<BufferId, text::TransactionId>,
first_edit_at: Instant, first_edit_at: Instant,
last_edit_at: Instant, last_edit_at: Instant,
suppress_grouping: bool, suppress_grouping: bool,
@ -161,7 +161,7 @@ pub struct ExcerptBoundary {
struct Excerpt { struct Excerpt {
id: ExcerptId, id: ExcerptId,
locator: Locator, locator: Locator,
buffer_id: u64, buffer_id: BufferId,
buffer: BufferSnapshot, buffer: BufferSnapshot,
range: ExcerptRange<text::Anchor>, range: ExcerptRange<text::Anchor>,
max_buffer_row: u32, max_buffer_row: u32,
@ -366,7 +366,7 @@ impl MultiBuffer {
offset: T, offset: T,
theme: Option<&SyntaxTheme>, theme: Option<&SyntaxTheme>,
cx: &AppContext, cx: &AppContext,
) -> Option<(u64, Vec<OutlineItem<Anchor>>)> { ) -> Option<(BufferId, Vec<OutlineItem<Anchor>>)> {
self.read(cx).symbols_containing(offset, theme) self.read(cx).symbols_containing(offset, theme)
} }
@ -412,7 +412,7 @@ impl MultiBuffer {
is_insertion: bool, is_insertion: bool,
original_indent_column: u32, original_indent_column: u32,
} }
let mut buffer_edits: HashMap<u64, Vec<BufferEdit>> = Default::default(); let mut buffer_edits: HashMap<BufferId, Vec<BufferEdit>> = Default::default();
let mut edited_excerpt_ids = Vec::new(); let mut edited_excerpt_ids = Vec::new();
let mut cursor = snapshot.excerpts.cursor::<usize>(); let mut cursor = snapshot.excerpts.cursor::<usize>();
for (ix, (range, new_text)) in edits.enumerate() { for (ix, (range, new_text)) in edits.enumerate() {
@ -514,7 +514,7 @@ impl MultiBuffer {
// Non-generic part of edit, hoisted out to avoid blowing up LLVM IR. // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR.
fn tail( fn tail(
this: &mut MultiBuffer, this: &mut MultiBuffer,
buffer_edits: HashMap<u64, Vec<BufferEdit>>, buffer_edits: HashMap<BufferId, Vec<BufferEdit>>,
autoindent_mode: Option<AutoindentMode>, autoindent_mode: Option<AutoindentMode>,
edited_excerpt_ids: Vec<ExcerptId>, edited_excerpt_ids: Vec<ExcerptId>,
cx: &mut ModelContext<MultiBuffer>, cx: &mut ModelContext<MultiBuffer>,
@ -720,7 +720,7 @@ impl MultiBuffer {
cursor_shape: CursorShape, cursor_shape: CursorShape,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
let mut selections_by_buffer: HashMap<u64, Vec<Selection<text::Anchor>>> = let mut selections_by_buffer: HashMap<BufferId, Vec<Selection<text::Anchor>>> =
Default::default(); Default::default();
let snapshot = self.read(cx); let snapshot = self.read(cx);
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(); let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
@ -1440,7 +1440,7 @@ impl MultiBuffer {
.collect() .collect()
} }
pub fn buffer(&self, buffer_id: u64) -> Option<Model<Buffer>> { pub fn buffer(&self, buffer_id: BufferId) -> Option<Model<Buffer>> {
self.buffers self.buffers
.borrow() .borrow()
.get(&buffer_id) .get(&buffer_id)
@ -1661,7 +1661,8 @@ impl MultiBuffer {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
impl MultiBuffer { impl MultiBuffer {
pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> Model<Self> { pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> Model<Self> {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text)); let buffer = cx
.new_model(|cx| Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text));
cx.new_model(|cx| Self::singleton(buffer, cx)) cx.new_model(|cx| Self::singleton(buffer, cx))
} }
@ -1671,7 +1672,9 @@ impl MultiBuffer {
) -> Model<Self> { ) -> Model<Self> {
let multi = cx.new_model(|_| Self::new(0, Capability::ReadWrite)); let multi = cx.new_model(|_| Self::new(0, Capability::ReadWrite));
for (text, ranges) in excerpts { for (text, ranges) in excerpts {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text)); let buffer = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
});
let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange { let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange {
context: range, context: range,
primary: None, primary: None,
@ -1760,7 +1763,9 @@ impl MultiBuffer {
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) { if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() { let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>(); let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
buffers.push(cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text))); buffers.push(cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
}));
let buffer = buffers.last().unwrap().read(cx); let buffer = buffers.last().unwrap().read(cx);
log::info!( log::info!(
"Creating new buffer {} with text: {:?}", "Creating new buffer {} with text: {:?}",
@ -1987,7 +1992,7 @@ impl MultiBufferSnapshot {
(start..end, word_kind) (start..end, word_kind)
} }
pub fn as_singleton(&self) -> Option<(&ExcerptId, u64, &BufferSnapshot)> { pub fn as_singleton(&self) -> Option<(&ExcerptId, BufferId, &BufferSnapshot)> {
if self.singleton { if self.singleton {
self.excerpts self.excerpts
.iter() .iter()
@ -3209,7 +3214,7 @@ impl MultiBufferSnapshot {
&self, &self,
offset: T, offset: T,
theme: Option<&SyntaxTheme>, theme: Option<&SyntaxTheme>,
) -> Option<(u64, Vec<OutlineItem<Anchor>>)> { ) -> Option<(BufferId, Vec<OutlineItem<Anchor>>)> {
let anchor = self.anchor_before(offset); let anchor = self.anchor_before(offset);
let excerpt_id = anchor.excerpt_id; let excerpt_id = anchor.excerpt_id;
let excerpt = self.excerpt(excerpt_id)?; let excerpt = self.excerpt(excerpt_id)?;
@ -3249,7 +3254,7 @@ impl MultiBufferSnapshot {
} }
} }
pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<u64> { pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<BufferId> {
Some(self.excerpt(excerpt_id)?.buffer_id) Some(self.excerpt(excerpt_id)?.buffer_id)
} }
@ -3387,7 +3392,7 @@ impl History {
fn end_transaction( fn end_transaction(
&mut self, &mut self,
now: Instant, now: Instant,
buffer_transactions: HashMap<u64, TransactionId>, buffer_transactions: HashMap<BufferId, TransactionId>,
) -> bool { ) -> bool {
assert_ne!(self.transaction_depth, 0); assert_ne!(self.transaction_depth, 0);
self.transaction_depth -= 1; self.transaction_depth -= 1;
@ -3561,7 +3566,7 @@ impl Excerpt {
fn new( fn new(
id: ExcerptId, id: ExcerptId,
locator: Locator, locator: Locator,
buffer_id: u64, buffer_id: BufferId,
buffer: BufferSnapshot, buffer: BufferSnapshot,
range: ExcerptRange<text::Anchor>, range: ExcerptRange<text::Anchor>,
has_trailing_newline: bool, has_trailing_newline: bool,
@ -4154,8 +4159,13 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_singleton(cx: &mut AppContext) { fn test_singleton(cx: &mut AppContext) {
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'a'))); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(6, 6, 'a'),
)
});
let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx); let snapshot = multibuffer.read(cx).snapshot(cx);
@ -4182,7 +4192,8 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_remote(cx: &mut AppContext) { fn test_remote(cx: &mut AppContext) {
let host_buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "a")); let host_buffer =
cx.new_model(|cx| Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "a"));
let guest_buffer = cx.new_model(|cx| { let guest_buffer = cx.new_model(|cx| {
let state = host_buffer.read(cx).to_proto(); let state = host_buffer.read(cx).to_proto();
let ops = cx let ops = cx
@ -4213,10 +4224,20 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) {
let buffer_1 = let buffer_1 = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'a'))); Buffer::new(
let buffer_2 = 0,
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'g'))); BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(6, 6, 'a'),
)
});
let buffer_2 = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(6, 6, 'g'),
)
});
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let events = Arc::new(RwLock::new(Vec::<Event>::new())); let events = Arc::new(RwLock::new(Vec::<Event>::new()));
@ -4449,10 +4470,20 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_excerpt_events(cx: &mut AppContext) { fn test_excerpt_events(cx: &mut AppContext) {
let buffer_1 = let buffer_1 = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(10, 3, 'a'))); Buffer::new(
let buffer_2 = 0,
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(10, 3, 'm'))); BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(10, 3, 'a'),
)
});
let buffer_2 = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(10, 3, 'm'),
)
});
let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
@ -4557,8 +4588,13 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { fn test_push_excerpts_with_context_lines(cx: &mut AppContext) {
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(20, 3, 'a'))); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(20, 3, 'a'),
)
});
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
multibuffer.push_excerpts_with_context_lines( multibuffer.push_excerpts_with_context_lines(
@ -4594,8 +4630,13 @@ mod tests {
#[gpui::test] #[gpui::test]
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
let buffer = let buffer = cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(20, 3, 'a'))); Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
sample_text(20, 3, 'a'),
)
});
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
let snapshot = buffer.read(cx); let snapshot = buffer.read(cx);
@ -4641,7 +4682,9 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_singleton_multibuffer_anchors(cx: &mut AppContext) { fn test_singleton_multibuffer_anchors(cx: &mut AppContext) {
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd")); let buffer = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcd")
});
let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let old_snapshot = multibuffer.read(cx).snapshot(cx); let old_snapshot = multibuffer.read(cx).snapshot(cx);
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
@ -4661,8 +4704,12 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_multibuffer_anchors(cx: &mut AppContext) { fn test_multibuffer_anchors(cx: &mut AppContext) {
let buffer_1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd")); let buffer_1 = cx.new_model(|cx| {
let buffer_2 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "efghi")); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcd")
});
let buffer_2 = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "efghi")
});
let multibuffer = cx.new_model(|cx| { let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
multibuffer.push_excerpts( multibuffer.push_excerpts(
@ -4719,9 +4766,16 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) {
let buffer_1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd")); let buffer_1 = cx.new_model(|cx| {
let buffer_2 = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcd")
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "ABCDEFGHIJKLMNOP")); });
let buffer_2 = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
"ABCDEFGHIJKLMNOP",
)
});
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
// Create an insertion id in buffer 1 that doesn't exist in buffer 2. // Create an insertion id in buffer 1 that doesn't exist in buffer 2.
@ -4932,9 +4986,13 @@ mod tests {
let base_text = util::RandomCharIter::new(&mut rng) let base_text = util::RandomCharIter::new(&mut rng)
.take(10) .take(10)
.collect::<String>(); .collect::<String>();
buffers.push( buffers.push(cx.new_model(|cx| {
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text)), Buffer::new(
); 0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
base_text,
)
}));
buffers.last().unwrap() buffers.last().unwrap()
} else { } else {
buffers.choose(&mut rng).unwrap() buffers.choose(&mut rng).unwrap()
@ -5276,8 +5334,12 @@ mod tests {
let test_settings = SettingsStore::test(cx); let test_settings = SettingsStore::test(cx);
cx.set_global(test_settings); cx.set_global(test_settings);
let buffer_1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "1234")); let buffer_1 = cx.new_model(|cx| {
let buffer_2 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "5678")); Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "1234")
});
let buffer_2 = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "5678")
});
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let group_interval = multibuffer.read(cx).history.group_interval; let group_interval = multibuffer.read(cx).history.group_interval;
multibuffer.update(cx, |multibuffer, cx| { multibuffer.update(cx, |multibuffer, cx| {

View File

@ -21,7 +21,7 @@ use lsp::{
OneOf, ServerCapabilities, OneOf, ServerCapabilities,
}; };
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc}; use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
use text::LineEnding; use text::{BufferId, LineEnding};
pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions { pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
lsp::FormattingOptions { lsp::FormattingOptions {
@ -84,7 +84,7 @@ pub trait LspCommand: 'static + Sized + Send {
cx: AsyncAppContext, cx: AsyncAppContext,
) -> Result<Self::Response>; ) -> Result<Self::Response>;
fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64; fn buffer_id_from_proto(message: &Self::ProtoRequest) -> Result<BufferId>;
} }
pub(crate) struct PrepareRename { pub(crate) struct PrepareRename {
@ -205,7 +205,7 @@ impl LspCommand for PrepareRename {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
proto::PrepareRename { proto::PrepareRename {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -274,8 +274,8 @@ impl LspCommand for PrepareRename {
} }
} }
fn buffer_id_from_proto(message: &proto::PrepareRename) -> u64 { fn buffer_id_from_proto(message: &proto::PrepareRename) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -332,7 +332,7 @@ impl LspCommand for PerformRename {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PerformRename { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PerformRename {
proto::PerformRename { proto::PerformRename {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -393,8 +393,8 @@ impl LspCommand for PerformRename {
.await .await
} }
fn buffer_id_from_proto(message: &proto::PerformRename) -> u64 { fn buffer_id_from_proto(message: &proto::PerformRename) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -437,7 +437,7 @@ impl LspCommand for GetDefinition {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition {
proto::GetDefinition { proto::GetDefinition {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -486,8 +486,8 @@ impl LspCommand for GetDefinition {
location_links_from_proto(message.links, project, cx).await location_links_from_proto(message.links, project, cx).await
} }
fn buffer_id_from_proto(message: &proto::GetDefinition) -> u64 { fn buffer_id_from_proto(message: &proto::GetDefinition) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -538,7 +538,7 @@ impl LspCommand for GetTypeDefinition {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetTypeDefinition { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetTypeDefinition {
proto::GetTypeDefinition { proto::GetTypeDefinition {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -587,8 +587,8 @@ impl LspCommand for GetTypeDefinition {
location_links_from_proto(message.links, project, cx).await location_links_from_proto(message.links, project, cx).await
} }
fn buffer_id_from_proto(message: &proto::GetTypeDefinition) -> u64 { fn buffer_id_from_proto(message: &proto::GetTypeDefinition) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -617,9 +617,10 @@ async fn location_links_from_proto(
for link in proto_links { for link in proto_links {
let origin = match link.origin { let origin = match link.origin {
Some(origin) => { Some(origin) => {
let buffer_id = BufferId::new(origin.buffer_id)?;
let buffer = project let buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(origin.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await?; .await?;
let start = origin let start = origin
@ -642,9 +643,10 @@ async fn location_links_from_proto(
}; };
let target = link.target.ok_or_else(|| anyhow!("missing target"))?; let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
let buffer_id = BufferId::new(target.buffer_id)?;
let buffer = project let buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(target.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await?; .await?;
let start = target let start = target
@ -761,7 +763,9 @@ fn location_links_to_proto(
.into_iter() .into_iter()
.map(|definition| { .map(|definition| {
let origin = definition.origin.map(|origin| { let origin = definition.origin.map(|origin| {
let buffer_id = project.create_buffer_for_peer(&origin.buffer, peer_id, cx); let buffer_id = project
.create_buffer_for_peer(&origin.buffer, peer_id, cx)
.into();
proto::Location { proto::Location {
start: Some(serialize_anchor(&origin.range.start)), start: Some(serialize_anchor(&origin.range.start)),
end: Some(serialize_anchor(&origin.range.end)), end: Some(serialize_anchor(&origin.range.end)),
@ -769,7 +773,9 @@ fn location_links_to_proto(
} }
}); });
let buffer_id = project.create_buffer_for_peer(&definition.target.buffer, peer_id, cx); let buffer_id = project
.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
.into();
let target = proto::Location { let target = proto::Location {
start: Some(serialize_anchor(&definition.target.range.start)), start: Some(serialize_anchor(&definition.target.range.start)),
end: Some(serialize_anchor(&definition.target.range.end)), end: Some(serialize_anchor(&definition.target.range.end)),
@ -859,7 +865,7 @@ impl LspCommand for GetReferences {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetReferences { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetReferences {
proto::GetReferences { proto::GetReferences {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -901,7 +907,7 @@ impl LspCommand for GetReferences {
proto::Location { proto::Location {
start: Some(serialize_anchor(&definition.range.start)), start: Some(serialize_anchor(&definition.range.start)),
end: Some(serialize_anchor(&definition.range.end)), end: Some(serialize_anchor(&definition.range.end)),
buffer_id, buffer_id: buffer_id.into(),
} }
}) })
.collect(); .collect();
@ -917,9 +923,10 @@ impl LspCommand for GetReferences {
) -> Result<Vec<Location>> { ) -> Result<Vec<Location>> {
let mut locations = Vec::new(); let mut locations = Vec::new();
for location in message.locations { for location in message.locations {
let buffer_id = BufferId::new(location.buffer_id)?;
let target_buffer = project let target_buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await?; .await?;
let start = location let start = location
@ -941,8 +948,8 @@ impl LspCommand for GetReferences {
Ok(locations) Ok(locations)
} }
fn buffer_id_from_proto(message: &proto::GetReferences) -> u64 { fn buffer_id_from_proto(message: &proto::GetReferences) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -1007,7 +1014,7 @@ impl LspCommand for GetDocumentHighlights {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDocumentHighlights { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDocumentHighlights {
proto::GetDocumentHighlights { proto::GetDocumentHighlights {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -1092,8 +1099,8 @@ impl LspCommand for GetDocumentHighlights {
Ok(highlights) Ok(highlights)
} }
fn buffer_id_from_proto(message: &proto::GetDocumentHighlights) -> u64 { fn buffer_id_from_proto(message: &proto::GetDocumentHighlights) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -1195,7 +1202,7 @@ impl LspCommand for GetHover {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest {
proto::GetHover { proto::GetHover {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -1308,8 +1315,8 @@ impl LspCommand for GetHover {
})) }))
} }
fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64 { fn buffer_id_from_proto(message: &Self::ProtoRequest) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -1492,7 +1499,7 @@ impl LspCommand for GetCompletions {
let anchor = buffer.anchor_after(self.position); let anchor = buffer.anchor_after(self.position);
proto::GetCompletions { proto::GetCompletions {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor(&anchor)), position: Some(language::proto::serialize_anchor(&anchor)),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -1556,8 +1563,8 @@ impl LspCommand for GetCompletions {
future::try_join_all(completions).await future::try_join_all(completions).await
} }
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 { fn buffer_id_from_proto(message: &proto::GetCompletions) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -1630,7 +1637,7 @@ impl LspCommand for GetCodeActions {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCodeActions { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCodeActions {
proto::GetCodeActions { proto::GetCodeActions {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
start: Some(language::proto::serialize_anchor(&self.range.start)), start: Some(language::proto::serialize_anchor(&self.range.start)),
end: Some(language::proto::serialize_anchor(&self.range.end)), end: Some(language::proto::serialize_anchor(&self.range.end)),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
@ -1695,8 +1702,8 @@ impl LspCommand for GetCodeActions {
.collect() .collect()
} }
fn buffer_id_from_proto(message: &proto::GetCodeActions) -> u64 { fn buffer_id_from_proto(message: &proto::GetCodeActions) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -1768,7 +1775,7 @@ impl LspCommand for OnTypeFormatting {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::OnTypeFormatting { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::OnTypeFormatting {
proto::OnTypeFormatting { proto::OnTypeFormatting {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -1831,8 +1838,8 @@ impl LspCommand for OnTypeFormatting {
Ok(Some(language::proto::deserialize_transaction(transaction)?)) Ok(Some(language::proto::deserialize_transaction(transaction)?))
} }
fn buffer_id_from_proto(message: &proto::OnTypeFormatting) -> u64 { fn buffer_id_from_proto(message: &proto::OnTypeFormatting) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }
@ -2291,7 +2298,7 @@ impl LspCommand for InlayHints {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
proto::InlayHints { proto::InlayHints {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
start: Some(language::proto::serialize_anchor(&self.range.start)), start: Some(language::proto::serialize_anchor(&self.range.start)),
end: Some(language::proto::serialize_anchor(&self.range.end)), end: Some(language::proto::serialize_anchor(&self.range.end)),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
@ -2358,7 +2365,7 @@ impl LspCommand for InlayHints {
Ok(hints) Ok(hints)
} }
fn buffer_id_from_proto(message: &proto::InlayHints) -> u64 { fn buffer_id_from_proto(message: &proto::InlayHints) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }

View File

@ -1,13 +1,13 @@
use std::{path::Path, sync::Arc}; use std::{path::Path, sync::Arc};
use anyhow::Context; use anyhow::{Context, Result};
use async_trait::async_trait; use async_trait::async_trait;
use gpui::{AppContext, AsyncAppContext, Model}; use gpui::{AppContext, AsyncAppContext, Model};
use language::{point_to_lsp, proto::deserialize_anchor, Buffer}; use language::{point_to_lsp, proto::deserialize_anchor, Buffer};
use lsp::{LanguageServer, LanguageServerId}; use lsp::{LanguageServer, LanguageServerId};
use rpc::proto::{self, PeerId}; use rpc::proto::{self, PeerId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use text::{PointUtf16, ToPointUtf16}; use text::{BufferId, PointUtf16, ToPointUtf16};
use crate::{lsp_command::LspCommand, Project}; use crate::{lsp_command::LspCommand, Project};
@ -83,7 +83,7 @@ impl LspCommand for ExpandMacro {
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtExpandMacro { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtExpandMacro {
proto::LspExtExpandMacro { proto::LspExtExpandMacro {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id().into(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
@ -131,7 +131,7 @@ impl LspCommand for ExpandMacro {
}) })
} }
fn buffer_id_from_proto(message: &proto::LspExtExpandMacro) -> u64 { fn buffer_id_from_proto(message: &proto::LspExtExpandMacro) -> Result<BufferId> {
message.buffer_id BufferId::new(message.buffer_id)
} }
} }

View File

@ -12,7 +12,7 @@ mod project_tests;
#[cfg(test)] #[cfg(test)]
mod worktree_tests; mod worktree_tests;
use anyhow::{anyhow, Context as _, Result}; use anyhow::{anyhow, bail, Context as _, Result};
use client::{proto, Client, Collaborator, TypedEnvelope, UserStore}; use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque}; use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
@ -81,7 +81,7 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use terminals::Terminals; use terminals::Terminals;
use text::Anchor; use text::{Anchor, BufferId};
use util::{ use util::{
debug_panic, defer, http::HttpClient, merge_json_value_into, debug_panic, defer, http::HttpClient, merge_json_value_into,
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
@ -120,9 +120,9 @@ pub struct Project {
collaborators: HashMap<proto::PeerId, Collaborator>, collaborators: HashMap<proto::PeerId, Collaborator>,
client_subscriptions: Vec<client::Subscription>, client_subscriptions: Vec<client::Subscription>,
_subscriptions: Vec<gpui::Subscription>, _subscriptions: Vec<gpui::Subscription>,
next_buffer_id: u64, next_buffer_id: BufferId,
opened_buffer: (watch::Sender<()>, watch::Receiver<()>), opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
shared_buffers: HashMap<proto::PeerId, HashSet<u64>>, shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
loading_buffers_by_path: HashMap< loading_buffers_by_path: HashMap<
ProjectPath, ProjectPath,
@ -131,14 +131,14 @@ pub struct Project {
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
loading_local_worktrees: loading_local_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>, HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
opened_buffers: HashMap<u64, OpenBuffer>, opened_buffers: HashMap<BufferId, OpenBuffer>,
local_buffer_ids_by_path: HashMap<ProjectPath, u64>, local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>, local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
/// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it. /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
/// Used for re-issuing buffer requests when peers temporarily disconnect /// Used for re-issuing buffer requests when peers temporarily disconnect
incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>, incomplete_remote_buffers: HashMap<BufferId, Option<Model<Buffer>>>,
buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
buffers_being_formatted: HashSet<u64>, buffers_being_formatted: HashSet<BufferId>,
buffers_needing_diff: HashSet<WeakModel<Buffer>>, buffers_needing_diff: HashSet<WeakModel<Buffer>>,
git_diff_debouncer: DelayedDebounced, git_diff_debouncer: DelayedDebounced,
nonce: u128, nonce: u128,
@ -210,7 +210,7 @@ struct LspBufferSnapshot {
/// Message ordered with respect to buffer operations /// Message ordered with respect to buffer operations
enum BufferOrderedMessage { enum BufferOrderedMessage {
Operation { Operation {
buffer_id: u64, buffer_id: BufferId,
operation: proto::Operation, operation: proto::Operation,
}, },
LanguageServerUpdate { LanguageServerUpdate {
@ -224,7 +224,7 @@ enum LocalProjectUpdate {
WorktreesChanged, WorktreesChanged,
CreateBufferForPeer { CreateBufferForPeer {
peer_id: proto::PeerId, peer_id: proto::PeerId,
buffer_id: u64, buffer_id: BufferId,
}, },
} }
@ -636,7 +636,7 @@ impl Project {
worktrees: Vec::new(), worktrees: Vec::new(),
buffer_ordered_messages_tx: tx, buffer_ordered_messages_tx: tx,
collaborators: Default::default(), collaborators: Default::default(),
next_buffer_id: 0, next_buffer_id: BufferId::new(1).unwrap(),
opened_buffers: Default::default(), opened_buffers: Default::default(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
incomplete_remote_buffers: Default::default(), incomplete_remote_buffers: Default::default(),
@ -722,7 +722,7 @@ impl Project {
worktrees: Vec::new(), worktrees: Vec::new(),
buffer_ordered_messages_tx: tx, buffer_ordered_messages_tx: tx,
loading_buffers_by_path: Default::default(), loading_buffers_by_path: Default::default(),
next_buffer_id: 0, next_buffer_id: BufferId::default(),
opened_buffer: watch::channel(), opened_buffer: watch::channel(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
incomplete_remote_buffers: Default::default(), incomplete_remote_buffers: Default::default(),
@ -997,7 +997,7 @@ impl Project {
cx.notify(); cx.notify();
} }
pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> { pub fn buffer_for_id(&self, remote_id: BufferId) -> Option<Model<Buffer>> {
self.opened_buffers self.opened_buffers
.get(&remote_id) .get(&remote_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
@ -1479,7 +1479,7 @@ impl Project {
variant: Some( variant: Some(
proto::create_buffer_for_peer::Variant::Chunk( proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk { proto::BufferChunk {
buffer_id, buffer_id: buffer_id.into(),
operations: chunk, operations: chunk,
is_last, is_last,
}, },
@ -1713,7 +1713,7 @@ impl Project {
if self.is_remote() { if self.is_remote() {
return Err(anyhow!("creating buffers as a guest is not supported yet")); return Err(anyhow!("creating buffers as a guest is not supported yet"));
} }
let id = post_inc(&mut self.next_buffer_id); let id = self.next_buffer_id.next();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new(self.replica_id(), id, text) Buffer::new(self.replica_id(), id, text)
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
@ -1814,7 +1814,7 @@ impl Project {
worktree: &Model<Worktree>, worktree: &Model<Worktree>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> { ) -> Task<Result<Model<Buffer>>> {
let buffer_id = post_inc(&mut self.next_buffer_id); let buffer_id = self.next_buffer_id.next();
let load_buffer = worktree.update(cx, |worktree, cx| { let load_buffer = worktree.update(cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap(); let worktree = worktree.as_local_mut().unwrap();
worktree.load_buffer(buffer_id, path, cx) worktree.load_buffer(buffer_id, path, cx)
@ -1845,8 +1845,9 @@ impl Project {
path: path_string, path: path_string,
}) })
.await?; .await?;
let buffer_id = BufferId::new(response.buffer_id)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(response.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await .await
}) })
@ -1895,7 +1896,7 @@ impl Project {
pub fn open_buffer_by_id( pub fn open_buffer_by_id(
&mut self, &mut self,
id: u64, id: BufferId,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> { ) -> Task<Result<Model<Buffer>>> {
if let Some(buffer) = self.buffer_for_id(id) { if let Some(buffer) = self.buffer_for_id(id) {
@ -1903,11 +1904,12 @@ impl Project {
} else if self.is_local() { } else if self.is_local() {
Task::ready(Err(anyhow!("buffer {} does not exist", id))) Task::ready(Err(anyhow!("buffer {} does not exist", id)))
} else if let Some(project_id) = self.remote_id() { } else if let Some(project_id) = self.remote_id() {
let request = self let request = self.client.request(proto::OpenBufferById {
.client project_id,
.request(proto::OpenBufferById { project_id, id }); id: id.into(),
});
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let buffer_id = request.await?.buffer_id; let buffer_id = BufferId::new(request.await?.buffer_id)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
@ -2223,7 +2225,7 @@ impl Project {
let mut operations_by_buffer_id = HashMap::default(); let mut operations_by_buffer_id = HashMap::default();
async fn flush_operations( async fn flush_operations(
this: &WeakModel<Project>, this: &WeakModel<Project>,
operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>, operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>,
needs_resync_with_host: &mut bool, needs_resync_with_host: &mut bool,
is_local: bool, is_local: bool,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
@ -2232,7 +2234,7 @@ impl Project {
let request = this.update(cx, |this, _| { let request = this.update(cx, |this, _| {
let project_id = this.remote_id()?; let project_id = this.remote_id()?;
Some(this.client.request(proto::UpdateBuffer { Some(this.client.request(proto::UpdateBuffer {
buffer_id, buffer_id: buffer_id.into(),
project_id, project_id,
operations, operations,
})) }))
@ -4078,7 +4080,9 @@ impl Project {
buffer_ids: remote_buffers buffer_ids: remote_buffers
.iter() .iter()
.filter_map(|buffer| { .filter_map(|buffer| {
buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok() buffer
.update(&mut cx, |buffer, _| buffer.remote_id().into())
.ok()
}) })
.collect(), .collect(),
}) })
@ -4324,7 +4328,7 @@ impl Project {
buffer_ids: buffers buffer_ids: buffers
.iter() .iter()
.map(|buffer| { .map(|buffer| {
buffer.update(&mut cx, |buffer, _| buffer.remote_id()) buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
}) })
.collect::<Result<_>>()?, .collect::<Result<_>>()?,
}) })
@ -4720,8 +4724,9 @@ impl Project {
}); });
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let response = request.await?; let response = request.await?;
let buffer_id = BufferId::new(response.buffer_id)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(response.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await .await
}) })
@ -5047,7 +5052,7 @@ impl Project {
let response = client let response = client
.request(proto::ApplyCompletionAdditionalEdits { .request(proto::ApplyCompletionAdditionalEdits {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
completion: Some(language::proto::serialize_completion(&completion)), completion: Some(language::proto::serialize_completion(&completion)),
}) })
.await?; .await?;
@ -5179,7 +5184,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let request = proto::ApplyCodeAction { let request = proto::ApplyCodeAction {
project_id, project_id,
buffer_id: buffer_handle.read(cx).remote_id(), buffer_id: buffer_handle.read(cx).remote_id().into(),
action: Some(language::proto::serialize_code_action(&action)), action: Some(language::proto::serialize_code_action(&action)),
}; };
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
@ -5242,7 +5247,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let request = proto::OnTypeFormatting { let request = proto::OnTypeFormatting {
project_id, project_id,
buffer_id: buffer.read(cx).remote_id(), buffer_id: buffer.read(cx).remote_id().into(),
position: Some(serialize_anchor(&position)), position: Some(serialize_anchor(&position)),
trigger, trigger,
version: serialize_version(&buffer.read(cx).version()), version: serialize_version(&buffer.read(cx).version()),
@ -5531,7 +5536,7 @@ impl Project {
let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
let range_start = range.start; let range_start = range.start;
let range_end = range.end; let range_end = range.end;
let buffer_id = buffer.remote_id(); let buffer_id = buffer.remote_id().into();
let buffer_version = buffer.version().clone(); let buffer_version = buffer.version().clone();
let lsp_request = InlayHints { range }; let lsp_request = InlayHints { range };
@ -5624,7 +5629,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let request = proto::ResolveInlayHint { let request = proto::ResolveInlayHint {
project_id, project_id,
buffer_id: buffer_handle.read(cx).remote_id(), buffer_id: buffer_handle.read(cx).remote_id().into(),
language_server_id: server_id.0 as u64, language_server_id: server_id.0 as u64,
hint: Some(InlayHints::project_to_proto_hint(hint.clone())), hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
}; };
@ -5659,9 +5664,10 @@ impl Project {
let response = request.await?; let response = request.await?;
let mut result = HashMap::default(); let mut result = HashMap::default();
for location in response.locations { for location in response.locations {
let buffer_id = BufferId::new(location.buffer_id)?;
let target_buffer = this let target_buffer = this
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
})? })?
.await?; .await?;
let start = location let start = location
@ -6555,7 +6561,7 @@ impl Project {
self.client self.client
.send(proto::UpdateBufferFile { .send(proto::UpdateBufferFile {
project_id, project_id,
buffer_id: buffer_id as u64, buffer_id: buffer_id.into(),
file: Some(new_file.to_proto()), file: Some(new_file.to_proto()),
}) })
.log_err(); .log_err();
@ -6721,7 +6727,7 @@ impl Project {
for (buffer, diff_base) in diff_bases_by_buffer { for (buffer, diff_base) in diff_bases_by_buffer {
let buffer_id = buffer.update(&mut cx, |buffer, cx| { let buffer_id = buffer.update(&mut cx, |buffer, cx| {
buffer.set_diff_base(diff_base.clone(), cx); buffer.set_diff_base(diff_base.clone(), cx);
buffer.remote_id() buffer.remote_id().into()
})?; })?;
if let Some(project_id) = remote_id { if let Some(project_id) = remote_id {
client client
@ -7353,7 +7359,7 @@ impl Project {
) -> Result<proto::Ack> { ) -> Result<proto::Ack> {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let payload = envelope.payload.clone(); let payload = envelope.payload.clone();
let buffer_id = payload.buffer_id; let buffer_id = BufferId::new(payload.buffer_id)?;
let ops = payload let ops = payload
.operations .operations
.into_iter() .into_iter()
@ -7404,7 +7410,7 @@ impl Project {
as Arc<dyn language::File>); as Arc<dyn language::File>);
} }
let buffer_id = state.id; let buffer_id = BufferId::new(state.id)?;
let buffer = cx.new_model(|_| { let buffer = cx.new_model(|_| {
Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file) Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
.unwrap() .unwrap()
@ -7413,9 +7419,10 @@ impl Project {
.insert(buffer_id, Some(buffer)); .insert(buffer_id, Some(buffer));
} }
proto::create_buffer_for_peer::Variant::Chunk(chunk) => { proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
let buffer_id = BufferId::new(chunk.buffer_id)?;
let buffer = this let buffer = this
.incomplete_remote_buffers .incomplete_remote_buffers
.get(&chunk.buffer_id) .get(&buffer_id)
.cloned() .cloned()
.flatten() .flatten()
.ok_or_else(|| { .ok_or_else(|| {
@ -7432,7 +7439,7 @@ impl Project {
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
if chunk.is_last { if chunk.is_last {
this.incomplete_remote_buffers.remove(&chunk.buffer_id); this.incomplete_remote_buffers.remove(&buffer_id);
this.register_buffer(&buffer, cx)?; this.register_buffer(&buffer, cx)?;
} }
} }
@ -7450,6 +7457,7 @@ impl Project {
) -> Result<()> { ) -> Result<()> {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let buffer_id = envelope.payload.buffer_id; let buffer_id = envelope.payload.buffer_id;
let buffer_id = BufferId::new(buffer_id)?;
let diff_base = envelope.payload.diff_base; let diff_base = envelope.payload.diff_base;
if let Some(buffer) = this if let Some(buffer) = this
.opened_buffers .opened_buffers
@ -7475,6 +7483,7 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<()> { ) -> Result<()> {
let buffer_id = envelope.payload.buffer_id; let buffer_id = envelope.payload.buffer_id;
let buffer_id = BufferId::new(buffer_id)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let payload = envelope.payload.clone(); let payload = envelope.payload.clone();
@ -7509,7 +7518,7 @@ impl Project {
_: Arc<Client>, _: Arc<Client>,
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::BufferSaved> { ) -> Result<proto::BufferSaved> {
let buffer_id = envelope.payload.buffer_id; let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let (project_id, buffer) = this.update(&mut cx, |this, _cx| { let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?; let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
let buffer = this let buffer = this
@ -7530,7 +7539,7 @@ impl Project {
.await?; .await?;
Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved { Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
version: serialize_version(buffer.saved_version()), version: serialize_version(buffer.saved_version()),
mtime: Some(buffer.saved_mtime().into()), mtime: Some(buffer.saved_mtime().into()),
fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()), fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
@ -7547,9 +7556,10 @@ impl Project {
let reload = this.update(&mut cx, |this, cx| { let reload = this.update(&mut cx, |this, cx| {
let mut buffers = HashSet::default(); let mut buffers = HashSet::default();
for buffer_id in &envelope.payload.buffer_ids { for buffer_id in &envelope.payload.buffer_ids {
let buffer_id = BufferId::new(*buffer_id)?;
buffers.insert( buffers.insert(
this.opened_buffers this.opened_buffers
.get(buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?, .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
); );
@ -7580,12 +7590,12 @@ impl Project {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let Some(guest_id) = envelope.original_sender_id else { let Some(guest_id) = envelope.original_sender_id else {
error!("missing original_sender_id on SynchronizeBuffers request"); error!("missing original_sender_id on SynchronizeBuffers request");
return; bail!("missing original_sender_id on SynchronizeBuffers request");
}; };
this.shared_buffers.entry(guest_id).or_default().clear(); this.shared_buffers.entry(guest_id).or_default().clear();
for buffer in envelope.payload.buffers { for buffer in envelope.payload.buffers {
let buffer_id = buffer.id; let buffer_id = BufferId::new(buffer.id)?;
let remote_version = language::proto::deserialize_version(&buffer.version); let remote_version = language::proto::deserialize_version(&buffer.version);
if let Some(buffer) = this.buffer_for_id(buffer_id) { if let Some(buffer) = this.buffer_for_id(buffer_id) {
this.shared_buffers this.shared_buffers
@ -7595,7 +7605,7 @@ impl Project {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
response.buffers.push(proto::BufferVersion { response.buffers.push(proto::BufferVersion {
id: buffer_id, id: buffer_id.into(),
version: language::proto::serialize_version(&buffer.version), version: language::proto::serialize_version(&buffer.version),
}); });
@ -7605,7 +7615,7 @@ impl Project {
client client
.send(proto::UpdateBufferFile { .send(proto::UpdateBufferFile {
project_id, project_id,
buffer_id: buffer_id as u64, buffer_id: buffer_id.into(),
file: Some(file.to_proto()), file: Some(file.to_proto()),
}) })
.log_err(); .log_err();
@ -7614,7 +7624,7 @@ impl Project {
client client
.send(proto::UpdateDiffBase { .send(proto::UpdateDiffBase {
project_id, project_id,
buffer_id: buffer_id as u64, buffer_id: buffer_id.into(),
diff_base: buffer.diff_base().map(Into::into), diff_base: buffer.diff_base().map(Into::into),
}) })
.log_err(); .log_err();
@ -7622,7 +7632,7 @@ impl Project {
client client
.send(proto::BufferReloaded { .send(proto::BufferReloaded {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
version: language::proto::serialize_version(buffer.saved_version()), version: language::proto::serialize_version(buffer.saved_version()),
mtime: Some(buffer.saved_mtime().into()), mtime: Some(buffer.saved_mtime().into()),
fingerprint: language::proto::serialize_fingerprint( fingerprint: language::proto::serialize_fingerprint(
@ -7642,7 +7652,7 @@ impl Project {
client client
.request(proto::UpdateBuffer { .request(proto::UpdateBuffer {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
operations: chunk, operations: chunk,
}) })
.await?; .await?;
@ -7654,7 +7664,8 @@ impl Project {
.detach(); .detach();
} }
} }
})?; Ok(())
})??;
Ok(response) Ok(response)
} }
@ -7669,9 +7680,10 @@ impl Project {
let format = this.update(&mut cx, |this, cx| { let format = this.update(&mut cx, |this, cx| {
let mut buffers = HashSet::default(); let mut buffers = HashSet::default();
for buffer_id in &envelope.payload.buffer_ids { for buffer_id in &envelope.payload.buffer_ids {
let buffer_id = BufferId::new(*buffer_id)?;
buffers.insert( buffers.insert(
this.opened_buffers this.opened_buffers
.get(buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?, .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
); );
@ -7696,11 +7708,12 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::ApplyCompletionAdditionalEditsResponse> { ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
let (buffer, completion) = this.update(&mut cx, |this, cx| { let (buffer, completion) = this.update(&mut cx, |this, cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this let buffer = this
.opened_buffers .opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
let language = buffer.read(cx).language(); let language = buffer.read(cx).language();
let completion = language::proto::deserialize_completion( let completion = language::proto::deserialize_completion(
envelope envelope
@ -7774,9 +7787,10 @@ impl Project {
.ok_or_else(|| anyhow!("invalid action"))?, .ok_or_else(|| anyhow!("invalid action"))?,
)?; )?;
let apply_code_action = this.update(&mut cx, |this, cx| { let apply_code_action = this.update(&mut cx, |this, cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this let buffer = this
.opened_buffers .opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx)) Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
@ -7798,11 +7812,12 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::OnTypeFormattingResponse> { ) -> Result<proto::OnTypeFormattingResponse> {
let on_type_formatting = this.update(&mut cx, |this, cx| { let on_type_formatting = this.update(&mut cx, |this, cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this let buffer = this
.opened_buffers .opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
let position = envelope let position = envelope
.payload .payload
.position .position
@ -7830,9 +7845,10 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::InlayHintsResponse> { ) -> Result<proto::InlayHintsResponse> {
let sender_id = envelope.original_sender_id()?; let sender_id = envelope.original_sender_id()?;
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this.update(&mut cx, |this, _| { let buffer = this.update(&mut cx, |this, _| {
this.opened_buffers this.opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id)) .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
})??; })??;
@ -7886,10 +7902,11 @@ impl Project {
let hint = InlayHints::proto_to_project_hint(proto_hint) let hint = InlayHints::proto_to_project_hint(proto_hint)
.context("resolved proto inlay hint conversion")?; .context("resolved proto inlay hint conversion")?;
let buffer = this.update(&mut cx, |this, _cx| { let buffer = this.update(&mut cx, |this, _cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
this.opened_buffers this.opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id)) .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
})??; })??;
let response_hint = this let response_hint = this
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
@ -7930,7 +7947,7 @@ impl Project {
<T::LspRequest as lsp::request::Request>::Result: Send, <T::LspRequest as lsp::request::Request>::Result: Send,
{ {
let sender_id = envelope.original_sender_id()?; let sender_id = envelope.original_sender_id()?;
let buffer_id = T::buffer_id_from_proto(&envelope.payload); let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
let buffer_handle = this.update(&mut cx, |this, _cx| { let buffer_handle = this.update(&mut cx, |this, _cx| {
this.opened_buffers this.opened_buffers
.get(&buffer_id) .get(&buffer_id)
@ -7995,7 +8012,7 @@ impl Project {
let start = serialize_anchor(&range.start); let start = serialize_anchor(&range.start);
let end = serialize_anchor(&range.end); let end = serialize_anchor(&range.end);
let buffer_id = this.update(&mut cx, |this, cx| { let buffer_id = this.update(&mut cx, |this, cx| {
this.create_buffer_for_peer(&buffer, peer_id, cx) this.create_buffer_for_peer(&buffer, peer_id, cx).into()
})?; })?;
locations.push(proto::Location { locations.push(proto::Location {
buffer_id, buffer_id,
@ -8037,7 +8054,7 @@ impl Project {
Ok(proto::OpenBufferForSymbolResponse { Ok(proto::OpenBufferForSymbolResponse {
buffer_id: this.update(&mut cx, |this, cx| { buffer_id: this.update(&mut cx, |this, cx| {
this.create_buffer_for_peer(&buffer, peer_id, cx) this.create_buffer_for_peer(&buffer, peer_id, cx).into()
})?, })?,
}) })
} }
@ -8057,14 +8074,13 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::OpenBufferResponse> { ) -> Result<proto::OpenBufferResponse> {
let peer_id = envelope.original_sender_id()?; let peer_id = envelope.original_sender_id()?;
let buffer_id = BufferId::new(envelope.payload.id)?;
let buffer = this let buffer = this
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
this.open_buffer_by_id(envelope.payload.id, cx)
})?
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
Ok(proto::OpenBufferResponse { Ok(proto::OpenBufferResponse {
buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx), buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
}) })
})? })?
} }
@ -8090,7 +8106,7 @@ impl Project {
let buffer = open_buffer.await?; let buffer = open_buffer.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
Ok(proto::OpenBufferResponse { Ok(proto::OpenBufferResponse {
buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx), buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
}) })
})? })?
} }
@ -8108,7 +8124,7 @@ impl Project {
for (buffer, transaction) in project_transaction.0 { for (buffer, transaction) in project_transaction.0 {
serialized_transaction serialized_transaction
.buffer_ids .buffer_ids
.push(self.create_buffer_for_peer(&buffer, peer_id, cx)); .push(self.create_buffer_for_peer(&buffer, peer_id, cx).into());
serialized_transaction serialized_transaction
.transactions .transactions
.push(language::proto::serialize_transaction(&transaction)); .push(language::proto::serialize_transaction(&transaction));
@ -8126,6 +8142,7 @@ impl Project {
let mut project_transaction = ProjectTransaction::default(); let mut project_transaction = ProjectTransaction::default();
for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
{ {
let buffer_id = BufferId::new(buffer_id)?;
let buffer = this let buffer = this
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx) this.wait_for_remote_buffer(buffer_id, cx)
@ -8158,7 +8175,7 @@ impl Project {
buffer: &Model<Buffer>, buffer: &Model<Buffer>,
peer_id: proto::PeerId, peer_id: proto::PeerId,
cx: &mut AppContext, cx: &mut AppContext,
) -> u64 { ) -> BufferId {
let buffer_id = buffer.read(cx).remote_id(); let buffer_id = buffer.read(cx).remote_id();
if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state { if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
updates_tx updates_tx
@ -8170,7 +8187,7 @@ impl Project {
fn wait_for_remote_buffer( fn wait_for_remote_buffer(
&mut self, &mut self,
id: u64, id: BufferId,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> { ) -> Task<Result<Model<Buffer>>> {
let mut opened_buffer_rx = self.opened_buffer.1.clone(); let mut opened_buffer_rx = self.opened_buffer.1.clone();
@ -8239,7 +8256,7 @@ impl Project {
.filter_map(|(id, buffer)| { .filter_map(|(id, buffer)| {
let buffer = buffer.upgrade()?; let buffer = buffer.upgrade()?;
Some(proto::BufferVersion { Some(proto::BufferVersion {
id: *id, id: (*id).into(),
version: language::proto::serialize_version(&buffer.read(cx).version), version: language::proto::serialize_version(&buffer.read(cx).version),
}) })
}) })
@ -8265,7 +8282,12 @@ impl Project {
.into_iter() .into_iter()
.map(|buffer| { .map(|buffer| {
let client = client.clone(); let client = client.clone();
let buffer_id = buffer.id; let buffer_id = match BufferId::new(buffer.id) {
Ok(id) => id,
Err(e) => {
return Task::ready(Err(e));
}
};
let remote_version = language::proto::deserialize_version(&buffer.version); let remote_version = language::proto::deserialize_version(&buffer.version);
if let Some(buffer) = this.buffer_for_id(buffer_id) { if let Some(buffer) = this.buffer_for_id(buffer_id) {
let operations = let operations =
@ -8276,7 +8298,7 @@ impl Project {
client client
.request(proto::UpdateBuffer { .request(proto::UpdateBuffer {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
operations: chunk, operations: chunk,
}) })
.await?; .await?;
@ -8294,7 +8316,10 @@ impl Project {
// creates these buffers for us again to unblock any waiting futures. // creates these buffers for us again to unblock any waiting futures.
for id in incomplete_buffer_ids { for id in incomplete_buffer_ids {
cx.background_executor() cx.background_executor()
.spawn(client.request(proto::OpenBufferById { project_id, id })) .spawn(client.request(proto::OpenBufferById {
project_id,
id: id.into(),
}))
.detach(); .detach();
} }
@ -8436,6 +8461,7 @@ impl Project {
) -> Result<()> { ) -> Result<()> {
let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?; let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
let version = deserialize_version(&envelope.payload.version); let version = deserialize_version(&envelope.payload.version);
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let mtime = envelope let mtime = envelope
.payload .payload
.mtime .mtime
@ -8445,11 +8471,11 @@ impl Project {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let buffer = this let buffer = this
.opened_buffers .opened_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.or_else(|| { .or_else(|| {
this.incomplete_remote_buffers this.incomplete_remote_buffers
.get(&envelope.payload.buffer_id) .get(&buffer_id)
.and_then(|b| b.clone()) .and_then(|b| b.clone())
}); });
if let Some(buffer) = buffer { if let Some(buffer) = buffer {
@ -8478,14 +8504,15 @@ impl Project {
.mtime .mtime
.ok_or_else(|| anyhow!("missing mtime"))? .ok_or_else(|| anyhow!("missing mtime"))?
.into(); .into();
let buffer_id = BufferId::new(payload.buffer_id)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let buffer = this let buffer = this
.opened_buffers .opened_buffers
.get(&payload.buffer_id) .get(&buffer_id)
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.or_else(|| { .or_else(|| {
this.incomplete_remote_buffers this.incomplete_remote_buffers
.get(&payload.buffer_id) .get(&buffer_id)
.cloned() .cloned()
.flatten() .flatten()
}); });

View File

@ -62,6 +62,7 @@ use std::{
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use text::BufferId;
use util::{ use util::{
paths::{PathMatcher, HOME}, paths::{PathMatcher, HOME},
ResultExt, ResultExt,
@ -672,7 +673,7 @@ impl LocalWorktree {
pub(crate) fn load_buffer( pub(crate) fn load_buffer(
&mut self, &mut self,
id: u64, id: BufferId,
path: &Path, path: &Path,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Model<Buffer>>> { ) -> Task<Result<Model<Buffer>>> {
@ -1043,7 +1044,7 @@ impl LocalWorktree {
let buffer = buffer_handle.read(cx); let buffer = buffer_handle.read(cx);
let rpc = self.client.clone(); let rpc = self.client.clone();
let buffer_id = buffer.remote_id(); let buffer_id: u64 = buffer.remote_id().into();
let project_id = self.share.as_ref().map(|share| share.project_id); let project_id = self.share.as_ref().map(|share| share.project_id);
let text = buffer.as_rope().clone(); let text = buffer.as_rope().clone();
@ -1481,7 +1482,7 @@ impl RemoteWorktree {
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx); let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id(); let buffer_id = buffer.remote_id().into();
let version = buffer.version(); let version = buffer.version();
let rpc = self.client.clone(); let rpc = self.client.clone();
let project_id = self.project_id; let project_id = self.project_id;
@ -2840,7 +2841,7 @@ impl language::LocalFile for File {
fn buffer_reloaded( fn buffer_reloaded(
&self, &self,
buffer_id: u64, buffer_id: BufferId,
version: &clock::Global, version: &clock::Global,
fingerprint: RopeFingerprint, fingerprint: RopeFingerprint,
line_ending: LineEnding, line_ending: LineEnding,
@ -2853,7 +2854,7 @@ impl language::LocalFile for File {
.client .client
.send(proto::BufferReloaded { .send(proto::BufferReloaded {
project_id, project_id,
buffer_id, buffer_id: buffer_id.into(),
version: serialize_version(version), version: serialize_version(version),
mtime: Some(mtime.into()), mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint), fingerprint: serialize_fingerprint(fingerprint),

View File

@ -21,6 +21,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use text::BufferId;
use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
#[gpui::test] #[gpui::test]
@ -511,9 +512,11 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
let prev_read_dir_count = fs.read_dir_call_count(); let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree let buffer = tree
.update(cx, |tree, cx| { .update(cx, |tree, cx| {
tree.as_local_mut() tree.as_local_mut().unwrap().load_buffer(
.unwrap() BufferId::new(1).unwrap(),
.load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx) "one/node_modules/b/b1.js".as_ref(),
cx,
)
}) })
.await .await
.unwrap(); .unwrap();
@ -553,9 +556,11 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
let prev_read_dir_count = fs.read_dir_call_count(); let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree let buffer = tree
.update(cx, |tree, cx| { .update(cx, |tree, cx| {
tree.as_local_mut() tree.as_local_mut().unwrap().load_buffer(
.unwrap() BufferId::new(1).unwrap(),
.load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx) "one/node_modules/a/a2.js".as_ref(),
cx,
)
}) })
.await .await
.unwrap(); .unwrap();

View File

@ -1007,7 +1007,7 @@ mod tests {
use super::*; use super::*;
use editor::{DisplayPoint, Editor}; use editor::{DisplayPoint, Editor};
use gpui::{Context, Hsla, TestAppContext, VisualTestContext}; use gpui::{Context, Hsla, TestAppContext, VisualTestContext};
use language::Buffer; use language::{Buffer, BufferId};
use smol::stream::StreamExt as _; use smol::stream::StreamExt as _;
use unindent::Unindent as _; use unindent::Unindent as _;
@ -1029,7 +1029,7 @@ mod tests {
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
Buffer::new( Buffer::new(
0, 0,
cx.entity_id().as_u64(), BufferId::new(cx.entity_id().as_u64()).unwrap(),
r#" r#"
A regular expression (shortened as regex or regexp;[1] also referred to as A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search rational expression[2][3]) is a sequence of characters that specifies a search
@ -1385,7 +1385,13 @@ mod tests {
expected_query_matches_count > 1, expected_query_matches_count > 1,
"Should pick a query with multiple results" "Should pick a query with multiple results"
); );
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), buffer_text)); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
buffer_text,
)
});
let window = cx.add_window(|_| ()); let window = cx.add_window(|_| ());
let editor = window.build_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx)); let editor = window.build_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx));
@ -1581,7 +1587,13 @@ mod tests {
for "find" or "find and replace" operations on strings, or for input validation. for "find" or "find and replace" operations on strings, or for input validation.
"# "#
.unindent(); .unindent();
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), buffer_text)); let buffer = cx.new_model(|cx| {
Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
buffer_text,
)
});
let cx = cx.add_empty_window(); let cx = cx.add_empty_window();
let editor = cx.new_view(|cx| Editor::for_buffer(buffer.clone(), None, cx)); let editor = cx.new_view(|cx| Editor::for_buffer(buffer.clone(), None, cx));

View File

@ -1,6 +1,6 @@
use crate::{ use crate::{
locator::Locator, BufferSnapshot, Point, PointUtf16, TextDimension, ToOffset, ToPoint, locator::Locator, BufferId, BufferSnapshot, Point, PointUtf16, TextDimension, ToOffset,
ToPointUtf16, ToPoint, ToPointUtf16,
}; };
use anyhow::Result; use anyhow::Result;
use std::{cmp::Ordering, fmt::Debug, ops::Range}; use std::{cmp::Ordering, fmt::Debug, ops::Range};
@ -11,7 +11,7 @@ pub struct Anchor {
pub timestamp: clock::Lamport, pub timestamp: clock::Lamport,
pub offset: usize, pub offset: usize,
pub bias: Bias, pub bias: Bias,
pub buffer_id: Option<u64>, pub buffer_id: Option<BufferId>,
} }
impl Anchor { impl Anchor {

View File

@ -18,7 +18,7 @@ fn init_logger() {
#[test] #[test]
fn test_edit() { fn test_edit() {
let mut buffer = Buffer::new(0, 0, "abc".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "abc".into());
assert_eq!(buffer.text(), "abc"); assert_eq!(buffer.text(), "abc");
buffer.edit([(3..3, "def")]); buffer.edit([(3..3, "def")]);
assert_eq!(buffer.text(), "abcdef"); assert_eq!(buffer.text(), "abcdef");
@ -42,7 +42,7 @@ fn test_random_edits(mut rng: StdRng) {
let mut reference_string = RandomCharIter::new(&mut rng) let mut reference_string = RandomCharIter::new(&mut rng)
.take(reference_string_len) .take(reference_string_len)
.collect::<String>(); .collect::<String>();
let mut buffer = Buffer::new(0, 0, reference_string.clone()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), reference_string.clone());
LineEnding::normalize(&mut reference_string); LineEnding::normalize(&mut reference_string);
buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
@ -164,7 +164,7 @@ fn test_line_endings() {
LineEnding::Windows LineEnding::Windows
); );
let mut buffer = Buffer::new(0, 0, "one\r\ntwo\rthree".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "one\r\ntwo\rthree".into());
assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.text(), "one\ntwo\nthree");
assert_eq!(buffer.line_ending(), LineEnding::Windows); assert_eq!(buffer.line_ending(), LineEnding::Windows);
buffer.check_invariants(); buffer.check_invariants();
@ -178,7 +178,7 @@ fn test_line_endings() {
#[test] #[test]
fn test_line_len() { fn test_line_len() {
let mut buffer = Buffer::new(0, 0, "".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "".into());
buffer.edit([(0..0, "abcd\nefg\nhij")]); buffer.edit([(0..0, "abcd\nefg\nhij")]);
buffer.edit([(12..12, "kl\nmno")]); buffer.edit([(12..12, "kl\nmno")]);
buffer.edit([(18..18, "\npqrs\n")]); buffer.edit([(18..18, "\npqrs\n")]);
@ -195,7 +195,7 @@ fn test_line_len() {
#[test] #[test]
fn test_common_prefix_at_position() { fn test_common_prefix_at_position() {
let text = "a = str; b = δα"; let text = "a = str; b = δα";
let buffer = Buffer::new(0, 0, text.into()); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), text.into());
let offset1 = offset_after(text, "str"); let offset1 = offset_after(text, "str");
let offset2 = offset_after(text, "δα"); let offset2 = offset_after(text, "δα");
@ -243,7 +243,11 @@ fn test_common_prefix_at_position() {
#[test] #[test]
fn test_text_summary_for_range() { fn test_text_summary_for_range() {
let buffer = Buffer::new(0, 0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz".into()); let buffer = Buffer::new(
0,
BufferId::new(1).unwrap(),
"ab\nefg\nhklm\nnopqrs\ntuvwxyz".into(),
);
assert_eq!( assert_eq!(
buffer.text_summary_for_range::<TextSummary, _>(1..3), buffer.text_summary_for_range::<TextSummary, _>(1..3),
TextSummary { TextSummary {
@ -313,7 +317,7 @@ fn test_text_summary_for_range() {
#[test] #[test]
fn test_chars_at() { fn test_chars_at() {
let mut buffer = Buffer::new(0, 0, "".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "".into());
buffer.edit([(0..0, "abcd\nefgh\nij")]); buffer.edit([(0..0, "abcd\nefgh\nij")]);
buffer.edit([(12..12, "kl\nmno")]); buffer.edit([(12..12, "kl\nmno")]);
buffer.edit([(18..18, "\npqrs")]); buffer.edit([(18..18, "\npqrs")]);
@ -335,7 +339,7 @@ fn test_chars_at() {
assert_eq!(chars.collect::<String>(), "PQrs"); assert_eq!(chars.collect::<String>(), "PQrs");
// Regression test: // Regression test:
let mut buffer = Buffer::new(0, 0, "".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "".into());
buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]); buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]);
buffer.edit([(60..60, "\n")]); buffer.edit([(60..60, "\n")]);
@ -345,7 +349,7 @@ fn test_chars_at() {
#[test] #[test]
fn test_anchors() { fn test_anchors() {
let mut buffer = Buffer::new(0, 0, "".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "".into());
buffer.edit([(0..0, "abc")]); buffer.edit([(0..0, "abc")]);
let left_anchor = buffer.anchor_before(2); let left_anchor = buffer.anchor_before(2);
let right_anchor = buffer.anchor_after(2); let right_anchor = buffer.anchor_after(2);
@ -463,7 +467,7 @@ fn test_anchors() {
#[test] #[test]
fn test_anchors_at_start_and_end() { fn test_anchors_at_start_and_end() {
let mut buffer = Buffer::new(0, 0, "".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "".into());
let before_start_anchor = buffer.anchor_before(0); let before_start_anchor = buffer.anchor_before(0);
let after_end_anchor = buffer.anchor_after(0); let after_end_anchor = buffer.anchor_after(0);
@ -486,7 +490,7 @@ fn test_anchors_at_start_and_end() {
#[test] #[test]
fn test_undo_redo() { fn test_undo_redo() {
let mut buffer = Buffer::new(0, 0, "1234".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "1234".into());
// Set group interval to zero so as to not group edits in the undo stack. // Set group interval to zero so as to not group edits in the undo stack.
buffer.set_group_interval(Duration::from_secs(0)); buffer.set_group_interval(Duration::from_secs(0));
@ -523,7 +527,7 @@ fn test_undo_redo() {
#[test] #[test]
fn test_history() { fn test_history() {
let mut now = Instant::now(); let mut now = Instant::now();
let mut buffer = Buffer::new(0, 0, "123456".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "123456".into());
buffer.set_group_interval(Duration::from_millis(300)); buffer.set_group_interval(Duration::from_millis(300));
let transaction_1 = buffer.start_transaction_at(now).unwrap(); let transaction_1 = buffer.start_transaction_at(now).unwrap();
@ -590,7 +594,7 @@ fn test_history() {
#[test] #[test]
fn test_finalize_last_transaction() { fn test_finalize_last_transaction() {
let now = Instant::now(); let now = Instant::now();
let mut buffer = Buffer::new(0, 0, "123456".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "123456".into());
buffer.start_transaction_at(now); buffer.start_transaction_at(now);
buffer.edit([(2..4, "cd")]); buffer.edit([(2..4, "cd")]);
@ -625,7 +629,7 @@ fn test_finalize_last_transaction() {
#[test] #[test]
fn test_edited_ranges_for_transaction() { fn test_edited_ranges_for_transaction() {
let now = Instant::now(); let now = Instant::now();
let mut buffer = Buffer::new(0, 0, "1234567".into()); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "1234567".into());
buffer.start_transaction_at(now); buffer.start_transaction_at(now);
buffer.edit([(2..4, "cd")]); buffer.edit([(2..4, "cd")]);
@ -664,9 +668,9 @@ fn test_edited_ranges_for_transaction() {
fn test_concurrent_edits() { fn test_concurrent_edits() {
let text = "abcdef"; let text = "abcdef";
let mut buffer1 = Buffer::new(1, 0, text.into()); let mut buffer1 = Buffer::new(1, BufferId::new(1).unwrap(), text.into());
let mut buffer2 = Buffer::new(2, 0, text.into()); let mut buffer2 = Buffer::new(2, BufferId::new(1).unwrap(), text.into());
let mut buffer3 = Buffer::new(3, 0, text.into()); let mut buffer3 = Buffer::new(3, BufferId::new(1).unwrap(), text.into());
let buf1_op = buffer1.edit([(1..2, "12")]); let buf1_op = buffer1.edit([(1..2, "12")]);
assert_eq!(buffer1.text(), "a12cdef"); assert_eq!(buffer1.text(), "a12cdef");
@ -705,7 +709,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
let mut network = Network::new(rng.clone()); let mut network = Network::new(rng.clone());
for i in 0..peers { for i in 0..peers {
let mut buffer = Buffer::new(i as ReplicaId, 0, base_text.clone()); let mut buffer = Buffer::new(i as ReplicaId, BufferId::new(1).unwrap(), base_text.clone());
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
buffers.push(buffer); buffers.push(buffer);
replica_ids.push(i as u16); replica_ids.push(i as u16);

View File

@ -26,6 +26,7 @@ pub use selection::*;
use std::{ use std::{
borrow::Cow, borrow::Cow,
cmp::{self, Ordering, Reverse}, cmp::{self, Ordering, Reverse},
fmt::Display,
future::Future, future::Future,
iter::Iterator, iter::Iterator,
ops::{self, Deref, Range, Sub}, ops::{self, Deref, Range, Sub},
@ -59,10 +60,39 @@ pub struct Buffer {
wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>, wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
} }
#[repr(transparent)]
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, PartialOrd, Ord, Eq)]
pub struct BufferId(u64);
impl Display for BufferId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl BufferId {
/// Returns Err if `id` is outside of BufferId domain.
pub fn new(id: u64) -> anyhow::Result<Self> {
Ok(Self(id))
}
/// Increments this buffer id, returning the old value.
/// So that's a post-increment operator in disguise.
pub fn next(&mut self) -> Self {
let old = *self;
self.0 += 1;
old
}
}
impl From<BufferId> for u64 {
fn from(id: BufferId) -> Self {
id.0
}
}
#[derive(Clone)] #[derive(Clone)]
pub struct BufferSnapshot { pub struct BufferSnapshot {
replica_id: ReplicaId, replica_id: ReplicaId,
remote_id: u64, remote_id: BufferId,
visible_text: Rope, visible_text: Rope,
deleted_text: Rope, deleted_text: Rope,
line_ending: LineEnding, line_ending: LineEnding,
@ -369,7 +399,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
old_end: D, old_end: D,
new_end: D, new_end: D,
range: Range<(&'a Locator, usize)>, range: Range<(&'a Locator, usize)>,
buffer_id: u64, buffer_id: BufferId,
} }
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
@ -478,7 +508,7 @@ pub struct UndoOperation {
} }
impl Buffer { impl Buffer {
pub fn new(replica_id: u16, remote_id: u64, mut base_text: String) -> Buffer { pub fn new(replica_id: u16, remote_id: BufferId, mut base_text: String) -> Buffer {
let line_ending = LineEnding::detect(&base_text); let line_ending = LineEnding::detect(&base_text);
LineEnding::normalize(&mut base_text); LineEnding::normalize(&mut base_text);
@ -545,7 +575,7 @@ impl Buffer {
self.lamport_clock.replica_id self.lamport_clock.replica_id
} }
pub fn remote_id(&self) -> u64 { pub fn remote_id(&self) -> BufferId {
self.remote_id self.remote_id
} }
@ -1590,7 +1620,7 @@ impl BufferSnapshot {
&self.visible_text &self.visible_text
} }
pub fn remote_id(&self) -> u64 { pub fn remote_id(&self) -> BufferId {
self.remote_id self.remote_id
} }

View File

@ -69,14 +69,14 @@ mod test {
use crate::{test::VimTestContext, Vim}; use crate::{test::VimTestContext, Vim};
use editor::Editor; use editor::Editor;
use gpui::{Context, Entity, VisualTestContext}; use gpui::{Context, Entity, VisualTestContext};
use language::Buffer; use language::{Buffer, BufferId};
// regression test for blur called with a different active editor // regression test for blur called with a different active editor
#[gpui::test] #[gpui::test]
async fn test_blur_focus(cx: &mut gpui::TestAppContext) { async fn test_blur_focus(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await; let mut cx = VimTestContext::new(cx, true).await;
let buffer = cx.new_model(|_| Buffer::new(0, 0, "a = 1\nb = 2\n")); let buffer = cx.new_model(|_| Buffer::new(0, BufferId::new(1).unwrap(), "a = 1\nb = 2\n"));
let window2 = cx.add_window(|cx| Editor::for_buffer(buffer, None, cx)); let window2 = cx.add_window(|cx| Editor::for_buffer(buffer, None, cx));
let editor2 = cx let editor2 = cx
.update(|cx| { .update(|cx| {
@ -111,7 +111,7 @@ mod test {
let mut cx1 = VisualTestContext::from_window(cx.window, &cx); let mut cx1 = VisualTestContext::from_window(cx.window, &cx);
let editor1 = cx.editor.clone(); let editor1 = cx.editor.clone();
let buffer = cx.new_model(|_| Buffer::new(0, 0, "a = 1\nb = 2\n")); let buffer = cx.new_model(|_| Buffer::new(0, BufferId::new(1).unwrap(), "a = 1\nb = 2\n"));
let (editor2, cx2) = cx.add_window_view(|cx| Editor::for_buffer(buffer, None, cx)); let (editor2, cx2) = cx.add_window_view(|cx| Editor::for_buffer(buffer, None, cx));
editor2.update(cx2, |_, cx| { editor2.update(cx2, |_, cx| {

View File

@ -278,6 +278,7 @@ mod tests {
use language::{language_settings::AllLanguageSettings, AutoindentMode, Buffer}; use language::{language_settings::AllLanguageSettings, AutoindentMode, Buffer};
use settings::SettingsStore; use settings::SettingsStore;
use std::num::NonZeroU32; use std::num::NonZeroU32;
use text::BufferId;
#[gpui::test] #[gpui::test]
async fn test_c_autoindent(cx: &mut TestAppContext) { async fn test_c_autoindent(cx: &mut TestAppContext) {
@ -295,8 +296,8 @@ mod tests {
let language = crate::languages::language("c", tree_sitter_c::language(), None).await; let language = crate::languages::language("c", tree_sitter_c::language(), None).await;
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "")
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(language, cx); .with_language(language, cx);
// empty function // empty function
buffer.edit([(0..0, "int main() {}")], None, cx); buffer.edit([(0..0, "int main() {}")], None, cx);

View File

@ -181,6 +181,7 @@ mod tests {
use language::{language_settings::AllLanguageSettings, AutoindentMode, Buffer}; use language::{language_settings::AllLanguageSettings, AutoindentMode, Buffer};
use settings::SettingsStore; use settings::SettingsStore;
use std::num::NonZeroU32; use std::num::NonZeroU32;
use text::BufferId;
#[gpui::test] #[gpui::test]
async fn test_python_autoindent(cx: &mut TestAppContext) { async fn test_python_autoindent(cx: &mut TestAppContext) {
@ -199,8 +200,8 @@ mod tests {
}); });
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "")
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(language, cx); .with_language(language, cx);
let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| { let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| {
let ix = buffer.len(); let ix = buffer.len();
buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx); buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx);

View File

@ -297,6 +297,7 @@ mod tests {
use gpui::{Context, Hsla, TestAppContext}; use gpui::{Context, Hsla, TestAppContext};
use language::language_settings::AllLanguageSettings; use language::language_settings::AllLanguageSettings;
use settings::SettingsStore; use settings::SettingsStore;
use text::BufferId;
use theme::SyntaxTheme; use theme::SyntaxTheme;
#[gpui::test] #[gpui::test]
@ -509,8 +510,8 @@ mod tests {
let language = crate::languages::language("rust", tree_sitter_rust::language(), None).await; let language = crate::languages::language("rust", tree_sitter_rust::language(), None).await;
cx.new_model(|cx| { cx.new_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "")
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(language, cx); .with_language(language, cx);
// indent between braces // indent between braces
buffer.set_text("fn a() {}", cx); buffer.set_text("fn a() {}", cx);

View File

@ -349,6 +349,7 @@ async fn get_cached_eslint_server_binary(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use gpui::{Context, TestAppContext}; use gpui::{Context, TestAppContext};
use text::BufferId;
use unindent::Unindent; use unindent::Unindent;
#[gpui::test] #[gpui::test]
@ -376,7 +377,8 @@ mod tests {
.unindent(); .unindent();
let buffer = cx.new_model(|cx| { let buffer = cx.new_model(|cx| {
language::Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx) language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
.with_language(language, cx)
}); });
let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None).unwrap()); let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None).unwrap());
assert_eq!( assert_eq!(