This commit is contained in:
Antonio Scandurra 2023-10-22 19:56:25 +02:00
parent e7c04d4aca
commit a0b667a2ca
21 changed files with 12361 additions and 10 deletions

54
Cargo.lock generated
View File

@ -4160,6 +4160,60 @@ dependencies = [
"util",
]
[[package]]
name = "language2"
version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
"async-trait",
"client2",
"clock",
"collections",
"ctor",
"env_logger 0.9.3",
"fs",
"futures 0.3.28",
"fuzzy",
"git",
"globset",
"gpui2",
"indoc",
"lazy_static",
"log",
"lsp",
"parking_lot 0.11.2",
"postage",
"rand 0.8.5",
"regex",
"rpc",
"schemars",
"serde",
"serde_derive",
"serde_json",
"settings",
"settings2",
"similar",
"smallvec",
"smol",
"sum_tree",
"text",
"tree-sitter",
"tree-sitter-elixir",
"tree-sitter-embedded-template",
"tree-sitter-heex",
"tree-sitter-html",
"tree-sitter-json 0.20.0",
"tree-sitter-markdown",
"tree-sitter-python",
"tree-sitter-ruby",
"tree-sitter-rust",
"tree-sitter-typescript",
"unicase",
"unindent",
"util",
]
[[package]]
name = "language_selector"
version = "0.1.0"

View File

@ -43,6 +43,7 @@ members = [
"crates/install_cli",
"crates/journal",
"crates/language",
"crates/language2",
"crates/language_selector",
"crates/language_tools",
"crates/live_kit_client",

View File

@ -22,6 +22,8 @@ fn generate_dispatch_bindings() {
.allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT")
.allowlist_function("dispatch_get_global_queue")
.allowlist_function("dispatch_async_f")
.allowlist_function("dispatch_after_f")
.allowlist_function("dispatch_time")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.layout_tests(false)
.generate()

View File

@ -2,9 +2,13 @@ use crate::{
AppContext, AsyncAppContext, Context, Effect, EntityId, EventEmitter, Handle, Reference,
Subscription, Task, WeakHandle,
};
use derive_more::{Deref, DerefMut};
use std::{future::Future, marker::PhantomData};
#[derive(Deref, DerefMut)]
pub struct ModelContext<'a, T> {
#[deref]
#[deref_mut]
app: Reference<'a, AppContext>,
entity_type: PhantomData<T>,
entity_id: EntityId,

View File

@ -152,6 +152,32 @@ impl Executor {
}
}
pub fn block_with_timeout<F, R>(
&self,
duration: Duration,
future: F,
) -> Result<R, impl Future<Output = R>>
where
F: Future<Output = R> + Send + Sync + 'static,
{
let mut future = Box::pin(future);
let timeout = {
let future = &mut future;
async {
let timer = async {
self.timer(duration).await;
Err(())
};
let future = async move { Ok(future.await) };
timer.race(future).await
}
};
match self.block(timeout) {
Ok(value) => Ok(value),
Err(_) => Err(future),
}
}
pub async fn scoped<'scope, F>(&self, scheduler: F)
where
F: FnOnce(&mut Scope<'scope>),
@ -167,9 +193,13 @@ impl Executor {
}
}
pub fn timer(&self, duration: Duration) -> smol::Timer {
// todo!("integrate with deterministic dispatcher")
smol::Timer::after(duration)
pub fn timer(&self, duration: Duration) -> Task<()> {
let (runnable, task) = async_task::spawn(async move {}, {
let dispatcher = self.dispatcher.clone();
move |runnable| dispatcher.dispatch_after(duration, runnable)
});
runnable.schedule();
Task::Spawned(task)
}
pub fn is_main_thread(&self) -> bool {

View File

@ -16,6 +16,7 @@ use seahash::SeaHasher;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::hash::{Hash, Hasher};
use std::time::Duration;
use std::{
any::Any,
fmt::{self, Debug, Display},
@ -157,8 +158,9 @@ pub(crate) trait PlatformWindow {
pub trait PlatformDispatcher: Send + Sync {
fn is_main_thread(&self) -> bool;
fn dispatch(&self, task: Runnable);
fn dispatch_on_main_thread(&self, task: Runnable);
fn dispatch(&self, runnable: Runnable);
fn dispatch_on_main_thread(&self, runnable: Runnable);
fn dispatch_after(&self, duration: Duration, runnable: Runnable);
}
pub trait PlatformTextSystem: Send + Sync {

View File

@ -9,7 +9,10 @@ use objc::{
runtime::{BOOL, YES},
sel, sel_impl,
};
use std::ffi::c_void;
use std::{
ffi::c_void,
time::{Duration, SystemTime},
};
include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs"));
@ -44,6 +47,26 @@ impl PlatformDispatcher for MacDispatcher {
);
}
}
fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
let now = SystemTime::now();
let after_duration = now
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_nanos() as u64
+ duration.as_nanos() as u64;
unsafe {
let queue =
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT.try_into().unwrap(), 0);
let when = dispatch_time(0, after_duration as i64);
dispatch_after_f(
when,
queue,
runnable.into_raw() as *mut c_void,
Some(trampoline),
);
}
}
}
extern "C" fn trampoline(runnable: *mut c_void) {

View File

@ -182,7 +182,7 @@ impl TextStyle {
}
}
#[derive(Clone, Debug, Default, PartialEq)]
#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub struct HighlightStyle {
pub color: Option<Hsla>,
pub font_weight: Option<FontWeight>,
@ -324,7 +324,7 @@ impl Default for Style {
}
}
#[derive(Refineable, Clone, Default, Debug, PartialEq, Eq)]
#[derive(Refineable, Copy, Clone, Default, Debug, PartialEq, Eq)]
#[refineable(debug)]
pub struct UnderlineStyle {
pub thickness: Pixels,

View File

@ -0,0 +1,86 @@
[package]
name = "language2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/language2.rs"
doctest = false
[features]
test-support = [
"rand",
"client2/test-support",
"collections/test-support",
"lsp/test-support",
"text/test-support",
"tree-sitter-rust",
"tree-sitter-typescript",
"settings/test-support",
"util/test-support",
]
[dependencies]
clock = { path = "../clock" }
collections = { path = "../collections" }
fuzzy = { path = "../fuzzy" }
fs = { path = "../fs" }
git = { path = "../git" }
gpui2 = { path = "../gpui2" }
lsp = { path = "../lsp" }
rpc = { path = "../rpc" }
settings2 = { path = "../settings2" }
sum_tree = { path = "../sum_tree" }
text = { path = "../text" }
# theme = { path = "../theme" }
util = { path = "../util" }
anyhow.workspace = true
async-broadcast = "0.4"
async-trait.workspace = true
futures.workspace = true
globset.workspace = true
lazy_static.workspace = true
log.workspace = true
parking_lot.workspace = true
postage.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
similar = "1.3"
smallvec.workspace = true
smol.workspace = true
tree-sitter.workspace = true
unicase = "2.6"
rand = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies]
client2 = { path = "../client2", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui2 = { path = "../gpui2", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
text = { path = "../text", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
indoc.workspace = true
rand.workspace = true
unindent.workspace = true
tree-sitter-embedded-template.workspace = true
tree-sitter-html.workspace = true
tree-sitter-json.workspace = true
tree-sitter-markdown.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-python.workspace = true
tree-sitter-typescript.workspace = true
tree-sitter-ruby.workspace = true
tree-sitter-elixir.workspace = true
tree-sitter-heex.workspace = true

View File

@ -0,0 +1,5 @@
fn main() {
if let Ok(bundled) = std::env::var("ZED_BUNDLE") {
println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,236 @@
use crate::Diagnostic;
use collections::HashMap;
use lsp::LanguageServerId;
use std::{
cmp::{Ordering, Reverse},
iter,
ops::Range,
};
use sum_tree::{self, Bias, SumTree};
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
#[derive(Clone, Debug, Default)]
pub struct DiagnosticSet {
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticEntry<T> {
pub range: Range<T>,
pub diagnostic: Diagnostic,
}
#[derive(Debug)]
pub struct DiagnosticGroup<T> {
pub entries: Vec<DiagnosticEntry<T>>,
pub primary_ix: usize,
}
#[derive(Clone, Debug)]
pub struct Summary {
start: Anchor,
end: Anchor,
min_start: Anchor,
max_end: Anchor,
count: usize,
}
impl<T> DiagnosticEntry<T> {
// Used to provide diagnostic context to lsp codeAction request
pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
let code = self
.diagnostic
.code
.clone()
.map(lsp::NumberOrString::String);
lsp::Diagnostic {
code,
severity: Some(self.diagnostic.severity),
..Default::default()
}
}
}
impl DiagnosticSet {
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
{
Self {
diagnostics: SumTree::from_iter(iter, buffer),
}
}
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
{
let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
Self {
diagnostics: SumTree::from_iter(
entries.into_iter().map(|entry| DiagnosticEntry {
range: buffer.anchor_before(entry.range.start)
..buffer.anchor_before(entry.range.end),
diagnostic: entry.diagnostic,
}),
buffer,
),
}
}
pub fn len(&self) -> usize {
self.diagnostics.summary().count
}
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
self.diagnostics.iter()
}
pub fn range<'a, T, O>(
&'a self,
range: Range<T>,
buffer: &'a text::BufferSnapshot,
inclusive: bool,
reversed: bool,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + ToOffset,
O: FromAnchor,
{
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
let mut cursor = self.diagnostics.filter::<_, ()>({
move |summary: &Summary| {
let start_cmp = range.start.cmp(&summary.max_end, buffer);
let end_cmp = range.end.cmp(&summary.min_start, buffer);
if inclusive {
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
} else {
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
}
});
if reversed {
cursor.prev(buffer);
} else {
cursor.next(buffer);
}
iter::from_fn({
move || {
if let Some(diagnostic) = cursor.item() {
if reversed {
cursor.prev(buffer);
} else {
cursor.next(buffer);
}
Some(diagnostic.resolve(buffer))
} else {
None
}
}
})
}
pub fn groups(
&self,
language_server_id: LanguageServerId,
output: &mut Vec<(LanguageServerId, DiagnosticGroup<Anchor>)>,
buffer: &text::BufferSnapshot,
) {
let mut groups = HashMap::default();
for entry in self.diagnostics.iter() {
groups
.entry(entry.diagnostic.group_id)
.or_insert(Vec::new())
.push(entry.clone());
}
let start_ix = output.len();
output.extend(groups.into_values().filter_map(|mut entries| {
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer));
entries
.iter()
.position(|entry| entry.diagnostic.is_primary)
.map(|primary_ix| {
(
language_server_id,
DiagnosticGroup {
entries,
primary_ix,
},
)
})
}));
output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| {
group_a.entries[group_a.primary_ix]
.range
.start
.cmp(&group_b.entries[group_b.primary_ix].range.start, buffer)
.then_with(|| id_a.cmp(&id_b))
});
}
pub fn group<'a, O: FromAnchor>(
&'a self,
group_id: usize,
buffer: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
self.iter()
.filter(move |entry| entry.diagnostic.group_id == group_id)
.map(|entry| entry.resolve(buffer))
}
}
impl sum_tree::Item for DiagnosticEntry<Anchor> {
type Summary = Summary;
fn summary(&self) -> Self::Summary {
Summary {
start: self.range.start,
end: self.range.end,
min_start: self.range.start,
max_end: self.range.end,
count: 1,
}
}
}
impl DiagnosticEntry<Anchor> {
pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
DiagnosticEntry {
range: O::from_anchor(&self.range.start, buffer)
..O::from_anchor(&self.range.end, buffer),
diagnostic: self.diagnostic.clone(),
}
}
}
impl Default for Summary {
fn default() -> Self {
Self {
start: Anchor::MIN,
end: Anchor::MAX,
min_start: Anchor::MAX,
max_end: Anchor::MIN,
count: 0,
}
}
}
impl sum_tree::Summary for Summary {
type Context = text::BufferSnapshot;
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
if other.min_start.cmp(&self.min_start, buffer).is_lt() {
self.min_start = other.min_start;
}
if other.max_end.cmp(&self.max_end, buffer).is_gt() {
self.max_end = other.max_end;
}
self.start = other.start;
self.end = other.end;
self.count += other.count;
}
}

View File

@ -0,0 +1,111 @@
use gpui2::HighlightStyle;
use std::sync::Arc;
use theme::SyntaxTheme;
#[derive(Clone, Debug)]
pub struct HighlightMap(Arc<[HighlightId]>);
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct HighlightId(pub u32);
const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
impl HighlightMap {
pub fn new(capture_names: &[String], theme: &SyntaxTheme) -> Self {
// For each capture name in the highlight query, find the longest
// key in the theme's syntax styles that matches all of the
// dot-separated components of the capture name.
HighlightMap(
capture_names
.iter()
.map(|capture_name| {
theme
.highlights
.iter()
.enumerate()
.filter_map(|(i, (key, _))| {
let mut len = 0;
let capture_parts = capture_name.split('.');
for key_part in key.split('.') {
if capture_parts.clone().any(|part| part == key_part) {
len += 1;
} else {
return None;
}
}
Some((i, len))
})
.max_by_key(|(_, len)| *len)
.map_or(DEFAULT_SYNTAX_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32))
})
.collect(),
)
}
pub fn get(&self, capture_id: u32) -> HighlightId {
self.0
.get(capture_id as usize)
.copied()
.unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID)
}
}
impl HighlightId {
pub fn is_default(&self) -> bool {
*self == DEFAULT_SYNTAX_HIGHLIGHT_ID
}
pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
theme.highlights.get(self.0 as usize).map(|entry| entry.1)
}
pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> {
theme.highlights.get(self.0 as usize).map(|e| e.0.as_str())
}
}
impl Default for HighlightMap {
fn default() -> Self {
Self(Arc::new([]))
}
}
impl Default for HighlightId {
fn default() -> Self {
DEFAULT_SYNTAX_HIGHLIGHT_ID
}
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::color::Color;
#[test]
fn test_highlight_map() {
let theme = SyntaxTheme::new(
[
("function", Color::from_u32(0x100000ff)),
("function.method", Color::from_u32(0x200000ff)),
("function.async", Color::from_u32(0x300000ff)),
("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
("variable.builtin", Color::from_u32(0x500000ff)),
("variable", Color::from_u32(0x600000ff)),
]
.iter()
.map(|(name, color)| (name.to_string(), (*color).into()))
.collect(),
);
let capture_names = &[
"function.special".to_string(),
"function.async.rust".to_string(),
"variable.builtin.self".to_string(),
];
let map = HighlightMap::new(capture_names, &theme);
assert_eq!(map.get(0).name(&theme), Some("function"));
assert_eq!(map.get(1).name(&theme), Some("function.async"));
assert_eq!(map.get(2).name(&theme), Some("variable.builtin"));
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,430 @@
use crate::{File, Language};
use anyhow::Result;
use collections::{HashMap, HashSet};
use globset::GlobMatcher;
use gpui2::AppContext;
use schemars::{
schema::{InstanceType, ObjectValidation, Schema, SchemaObject},
JsonSchema,
};
use serde::{Deserialize, Serialize};
use std::{num::NonZeroU32, path::Path, sync::Arc};
pub fn init(cx: &mut AppContext) {
settings2::register::<AllLanguageSettings>(cx);
}
pub fn language_settings<'a>(
language: Option<&Arc<Language>>,
file: Option<&Arc<dyn File>>,
cx: &'a AppContext,
) -> &'a LanguageSettings {
let language_name = language.map(|l| l.name());
all_language_settings(file, cx).language(language_name.as_deref())
}
pub fn all_language_settings<'a>(
file: Option<&Arc<dyn File>>,
cx: &'a AppContext,
) -> &'a AllLanguageSettings {
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
settings2::get_local(location, cx)
}
#[derive(Debug, Clone)]
pub struct AllLanguageSettings {
pub copilot: CopilotSettings,
defaults: LanguageSettings,
languages: HashMap<Arc<str>, LanguageSettings>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct LanguageSettings {
pub tab_size: NonZeroU32,
pub hard_tabs: bool,
pub soft_wrap: SoftWrap,
pub preferred_line_length: u32,
pub show_wrap_guides: bool,
pub wrap_guides: Vec<usize>,
pub format_on_save: FormatOnSave,
pub remove_trailing_whitespace_on_save: bool,
pub ensure_final_newline_on_save: bool,
pub formatter: Formatter,
pub prettier: HashMap<String, serde_json::Value>,
pub enable_language_server: bool,
pub show_copilot_suggestions: bool,
pub show_whitespaces: ShowWhitespaceSetting,
pub extend_comment_on_newline: bool,
pub inlay_hints: InlayHintSettings,
}
#[derive(Clone, Debug, Default)]
pub struct CopilotSettings {
pub feature_enabled: bool,
pub disabled_globs: Vec<GlobMatcher>,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct AllLanguageSettingsContent {
#[serde(default)]
pub features: Option<FeaturesContent>,
#[serde(default)]
pub copilot: Option<CopilotSettingsContent>,
#[serde(flatten)]
pub defaults: LanguageSettingsContent,
#[serde(default, alias = "language_overrides")]
pub languages: HashMap<Arc<str>, LanguageSettingsContent>,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct LanguageSettingsContent {
#[serde(default)]
pub tab_size: Option<NonZeroU32>,
#[serde(default)]
pub hard_tabs: Option<bool>,
#[serde(default)]
pub soft_wrap: Option<SoftWrap>,
#[serde(default)]
pub preferred_line_length: Option<u32>,
#[serde(default)]
pub show_wrap_guides: Option<bool>,
#[serde(default)]
pub wrap_guides: Option<Vec<usize>>,
#[serde(default)]
pub format_on_save: Option<FormatOnSave>,
#[serde(default)]
pub remove_trailing_whitespace_on_save: Option<bool>,
#[serde(default)]
pub ensure_final_newline_on_save: Option<bool>,
#[serde(default)]
pub formatter: Option<Formatter>,
#[serde(default)]
pub prettier: Option<HashMap<String, serde_json::Value>>,
#[serde(default)]
pub enable_language_server: Option<bool>,
#[serde(default)]
pub show_copilot_suggestions: Option<bool>,
#[serde(default)]
pub show_whitespaces: Option<ShowWhitespaceSetting>,
#[serde(default)]
pub extend_comment_on_newline: Option<bool>,
#[serde(default)]
pub inlay_hints: Option<InlayHintSettings>,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct CopilotSettingsContent {
#[serde(default)]
pub disabled_globs: Option<Vec<String>>,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct FeaturesContent {
pub copilot: Option<bool>,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum SoftWrap {
None,
EditorWidth,
PreferredLineLength,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum FormatOnSave {
On,
Off,
LanguageServer,
External {
command: Arc<str>,
arguments: Arc<[String]>,
},
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ShowWhitespaceSetting {
Selection,
None,
All,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum Formatter {
#[default]
Auto,
LanguageServer,
Prettier,
External {
command: Arc<str>,
arguments: Arc<[String]>,
},
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
pub struct InlayHintSettings {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_true")]
pub show_type_hints: bool,
#[serde(default = "default_true")]
pub show_parameter_hints: bool,
#[serde(default = "default_true")]
pub show_other_hints: bool,
}
fn default_true() -> bool {
true
}
impl InlayHintSettings {
pub fn enabled_inlay_hint_kinds(&self) -> HashSet<Option<InlayHintKind>> {
let mut kinds = HashSet::default();
if self.show_type_hints {
kinds.insert(Some(InlayHintKind::Type));
}
if self.show_parameter_hints {
kinds.insert(Some(InlayHintKind::Parameter));
}
if self.show_other_hints {
kinds.insert(None);
}
kinds
}
}
impl AllLanguageSettings {
pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings {
if let Some(name) = language_name {
if let Some(overrides) = self.languages.get(name) {
return overrides;
}
}
&self.defaults
}
pub fn copilot_enabled_for_path(&self, path: &Path) -> bool {
!self
.copilot
.disabled_globs
.iter()
.any(|glob| glob.is_match(path))
}
pub fn copilot_enabled(&self, language: Option<&Arc<Language>>, path: Option<&Path>) -> bool {
if !self.copilot.feature_enabled {
return false;
}
if let Some(path) = path {
if !self.copilot_enabled_for_path(path) {
return false;
}
}
self.language(language.map(|l| l.name()).as_deref())
.show_copilot_suggestions
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum InlayHintKind {
Type,
Parameter,
}
impl InlayHintKind {
pub fn from_name(name: &str) -> Option<Self> {
match name {
"type" => Some(InlayHintKind::Type),
"parameter" => Some(InlayHintKind::Parameter),
_ => None,
}
}
pub fn name(&self) -> &'static str {
match self {
InlayHintKind::Type => "type",
InlayHintKind::Parameter => "parameter",
}
}
}
impl settings2::Setting for AllLanguageSettings {
const KEY: Option<&'static str> = None;
type FileContent = AllLanguageSettingsContent;
fn load(
default_value: &Self::FileContent,
user_settings: &[&Self::FileContent],
_: &AppContext,
) -> Result<Self> {
// A default is provided for all settings.
let mut defaults: LanguageSettings =
serde_json::from_value(serde_json::to_value(&default_value.defaults)?)?;
let mut languages = HashMap::default();
for (language_name, settings) in &default_value.languages {
let mut language_settings = defaults.clone();
merge_settings(&mut language_settings, &settings);
languages.insert(language_name.clone(), language_settings);
}
let mut copilot_enabled = default_value
.features
.as_ref()
.and_then(|f| f.copilot)
.ok_or_else(Self::missing_default)?;
let mut copilot_globs = default_value
.copilot
.as_ref()
.and_then(|c| c.disabled_globs.as_ref())
.ok_or_else(Self::missing_default)?;
for user_settings in user_settings {
if let Some(copilot) = user_settings.features.as_ref().and_then(|f| f.copilot) {
copilot_enabled = copilot;
}
if let Some(globs) = user_settings
.copilot
.as_ref()
.and_then(|f| f.disabled_globs.as_ref())
{
copilot_globs = globs;
}
// A user's global settings override the default global settings and
// all default language-specific settings.
merge_settings(&mut defaults, &user_settings.defaults);
for language_settings in languages.values_mut() {
merge_settings(language_settings, &user_settings.defaults);
}
// A user's language-specific settings override default language-specific settings.
for (language_name, user_language_settings) in &user_settings.languages {
merge_settings(
languages
.entry(language_name.clone())
.or_insert_with(|| defaults.clone()),
&user_language_settings,
);
}
}
Ok(Self {
copilot: CopilotSettings {
feature_enabled: copilot_enabled,
disabled_globs: copilot_globs
.iter()
.filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher()))
.collect(),
},
defaults,
languages,
})
}
fn json_schema(
generator: &mut schemars::gen::SchemaGenerator,
params: &settings2::SettingsJsonSchemaParams,
_: &AppContext,
) -> schemars::schema::RootSchema {
let mut root_schema = generator.root_schema_for::<Self::FileContent>();
// Create a schema for a 'languages overrides' object, associating editor
// settings with specific languages.
assert!(root_schema
.definitions
.contains_key("LanguageSettingsContent"));
let languages_object_schema = SchemaObject {
instance_type: Some(InstanceType::Object.into()),
object: Some(Box::new(ObjectValidation {
properties: params
.language_names
.iter()
.map(|name| {
(
name.clone(),
Schema::new_ref("#/definitions/LanguageSettingsContent".into()),
)
})
.collect(),
..Default::default()
})),
..Default::default()
};
root_schema
.definitions
.extend([("Languages".into(), languages_object_schema.into())]);
root_schema
.schema
.object
.as_mut()
.unwrap()
.properties
.extend([
(
"languages".to_owned(),
Schema::new_ref("#/definitions/Languages".into()),
),
// For backward compatibility
(
"language_overrides".to_owned(),
Schema::new_ref("#/definitions/Languages".into()),
),
]);
root_schema
}
}
fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent) {
merge(&mut settings.tab_size, src.tab_size);
merge(&mut settings.hard_tabs, src.hard_tabs);
merge(&mut settings.soft_wrap, src.soft_wrap);
merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
merge(&mut settings.wrap_guides, src.wrap_guides.clone());
merge(
&mut settings.preferred_line_length,
src.preferred_line_length,
);
merge(&mut settings.formatter, src.formatter.clone());
merge(&mut settings.prettier, src.prettier.clone());
merge(&mut settings.format_on_save, src.format_on_save.clone());
merge(
&mut settings.remove_trailing_whitespace_on_save,
src.remove_trailing_whitespace_on_save,
);
merge(
&mut settings.ensure_final_newline_on_save,
src.ensure_final_newline_on_save,
);
merge(
&mut settings.enable_language_server,
src.enable_language_server,
);
merge(
&mut settings.show_copilot_suggestions,
src.show_copilot_suggestions,
);
merge(&mut settings.show_whitespaces, src.show_whitespaces);
merge(
&mut settings.extend_comment_on_newline,
src.extend_comment_on_newline,
);
merge(&mut settings.inlay_hints, src.inlay_hints);
fn merge<T>(target: &mut T, value: Option<T>) {
if let Some(value) = value {
*target = value;
}
}
}

View File

@ -0,0 +1,138 @@
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui2::{Executor, HighlightStyle};
use std::{ops::Range, sync::Arc};
#[derive(Debug)]
pub struct Outline<T> {
pub items: Vec<OutlineItem<T>>,
candidates: Vec<StringMatchCandidate>,
path_candidates: Vec<StringMatchCandidate>,
path_candidate_prefixes: Vec<usize>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct OutlineItem<T> {
pub depth: usize,
pub range: Range<T>,
pub text: String,
pub highlight_ranges: Vec<(Range<usize>, HighlightStyle)>,
pub name_ranges: Vec<Range<usize>>,
}
impl<T> Outline<T> {
pub fn new(items: Vec<OutlineItem<T>>) -> Self {
let mut candidates = Vec::new();
let mut path_candidates = Vec::new();
let mut path_candidate_prefixes = Vec::new();
let mut path_text = String::new();
let mut path_stack = Vec::new();
for (id, item) in items.iter().enumerate() {
if item.depth < path_stack.len() {
path_stack.truncate(item.depth);
path_text.truncate(path_stack.last().copied().unwrap_or(0));
}
if !path_text.is_empty() {
path_text.push(' ');
}
path_candidate_prefixes.push(path_text.len());
path_text.push_str(&item.text);
path_stack.push(path_text.len());
let candidate_text = item
.name_ranges
.iter()
.map(|range| &item.text[range.start as usize..range.end as usize])
.collect::<String>();
path_candidates.push(StringMatchCandidate::new(id, path_text.clone()));
candidates.push(StringMatchCandidate::new(id, candidate_text));
}
Self {
candidates,
path_candidates,
path_candidate_prefixes,
items,
}
}
pub async fn search(&self, query: &str, executor: Executor) -> Vec<StringMatch> {
let query = query.trim_start();
let is_path_query = query.contains(' ');
let smart_case = query.chars().any(|c| c.is_uppercase());
let mut matches = fuzzy::match_strings(
if is_path_query {
&self.path_candidates
} else {
&self.candidates
},
query,
smart_case,
100,
&Default::default(),
executor.clone(),
)
.await;
matches.sort_unstable_by_key(|m| m.candidate_id);
let mut tree_matches = Vec::new();
let mut prev_item_ix = 0;
for mut string_match in matches {
let outline_match = &self.items[string_match.candidate_id];
if is_path_query {
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
string_match
.positions
.retain(|position| *position >= prefix_len);
for position in &mut string_match.positions {
*position -= prefix_len;
}
} else {
let mut name_ranges = outline_match.name_ranges.iter();
let mut name_range = name_ranges.next().unwrap();
let mut preceding_ranges_len = 0;
for position in &mut string_match.positions {
while *position >= preceding_ranges_len + name_range.len() as usize {
preceding_ranges_len += name_range.len();
name_range = name_ranges.next().unwrap();
}
*position = name_range.start as usize + (*position - preceding_ranges_len);
}
}
let insertion_ix = tree_matches.len();
let mut cur_depth = outline_match.depth;
for (ix, item) in self.items[prev_item_ix..string_match.candidate_id]
.iter()
.enumerate()
.rev()
{
if cur_depth == 0 {
break;
}
let candidate_index = ix + prev_item_ix;
if item.depth == cur_depth - 1 {
tree_matches.insert(
insertion_ix,
StringMatch {
candidate_id: candidate_index,
score: Default::default(),
positions: Default::default(),
string: Default::default(),
},
);
cur_depth -= 1;
}
}
prev_item_ix = string_match.candidate_id + 1;
tree_matches.push(string_match);
}
tree_matches
}
}

View File

@ -0,0 +1,589 @@
use crate::{
diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, CursorShape, Diagnostic,
Language,
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use lsp::{DiagnosticSeverity, LanguageServerId};
use rpc::proto;
use std::{ops::Range, sync::Arc};
use text::*;
pub use proto::{BufferState, Operation};
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
fingerprint.to_hex()
}
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
RopeFingerprint::from_hex(fingerprint)
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
}
pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
match message {
proto::LineEnding::Unix => text::LineEnding::Unix,
proto::LineEnding::Windows => text::LineEnding::Windows,
}
}
pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
match message {
text::LineEnding::Unix => proto::LineEnding::Unix,
text::LineEnding::Windows => proto::LineEnding::Windows,
}
}
pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
proto::Operation {
variant: Some(match operation {
crate::Operation::Buffer(text::Operation::Edit(edit)) => {
proto::operation::Variant::Edit(serialize_edit_operation(edit))
}
crate::Operation::Buffer(text::Operation::Undo(undo)) => {
proto::operation::Variant::Undo(proto::operation::Undo {
replica_id: undo.timestamp.replica_id as u32,
lamport_timestamp: undo.timestamp.value,
version: serialize_version(&undo.version),
counts: undo
.counts
.iter()
.map(|(edit_id, count)| proto::UndoCount {
replica_id: edit_id.replica_id as u32,
lamport_timestamp: edit_id.value,
count: *count,
})
.collect(),
})
}
crate::Operation::UpdateSelections {
selections,
line_mode,
lamport_timestamp,
cursor_shape,
} => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
selections: serialize_selections(selections),
line_mode: *line_mode,
cursor_shape: serialize_cursor_shape(cursor_shape) as i32,
}),
crate::Operation::UpdateDiagnostics {
lamport_timestamp,
server_id,
diagnostics,
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
server_id: server_id.0 as u64,
diagnostics: serialize_diagnostics(diagnostics.iter()),
}),
crate::Operation::UpdateCompletionTriggers {
triggers,
lamport_timestamp,
} => proto::operation::Variant::UpdateCompletionTriggers(
proto::operation::UpdateCompletionTriggers {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
triggers: triggers.clone(),
},
),
}),
}
}
pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
proto::operation::Edit {
replica_id: operation.timestamp.replica_id as u32,
lamport_timestamp: operation.timestamp.value,
version: serialize_version(&operation.version),
ranges: operation.ranges.iter().map(serialize_range).collect(),
new_text: operation
.new_text
.iter()
.map(|text| text.to_string())
.collect(),
}
}
pub fn serialize_undo_map_entry(
(edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]),
) -> proto::UndoMapEntry {
proto::UndoMapEntry {
replica_id: edit_id.replica_id as u32,
local_timestamp: edit_id.value,
counts: counts
.iter()
.map(|(undo_id, count)| proto::UndoCount {
replica_id: undo_id.replica_id as u32,
lamport_timestamp: undo_id.value,
count: *count,
})
.collect(),
}
}
pub fn split_operations(
mut operations: Vec<proto::Operation>,
) -> impl Iterator<Item = Vec<proto::Operation>> {
#[cfg(any(test, feature = "test-support"))]
const CHUNK_SIZE: usize = 5;
#[cfg(not(any(test, feature = "test-support")))]
const CHUNK_SIZE: usize = 100;
let mut done = false;
std::iter::from_fn(move || {
if done {
return None;
}
let operations = operations
.drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
.collect::<Vec<_>>();
if operations.is_empty() {
done = true;
}
Some(operations)
})
}
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
selections.iter().map(serialize_selection).collect()
}
pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
proto::Selection {
id: selection.id as u64,
start: Some(proto::EditorAnchor {
anchor: Some(serialize_anchor(&selection.start)),
excerpt_id: 0,
}),
end: Some(proto::EditorAnchor {
anchor: Some(serialize_anchor(&selection.end)),
excerpt_id: 0,
}),
reversed: selection.reversed,
}
}
pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape {
match cursor_shape {
CursorShape::Bar => proto::CursorShape::CursorBar,
CursorShape::Block => proto::CursorShape::CursorBlock,
CursorShape::Underscore => proto::CursorShape::CursorUnderscore,
CursorShape::Hollow => proto::CursorShape::CursorHollow,
}
}
pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape {
match cursor_shape {
proto::CursorShape::CursorBar => CursorShape::Bar,
proto::CursorShape::CursorBlock => CursorShape::Block,
proto::CursorShape::CursorUnderscore => CursorShape::Underscore,
proto::CursorShape::CursorHollow => CursorShape::Hollow,
}
}
pub fn serialize_diagnostics<'a>(
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
) -> Vec<proto::Diagnostic> {
diagnostics
.into_iter()
.map(|entry| proto::Diagnostic {
source: entry.diagnostic.source.clone(),
start: Some(serialize_anchor(&entry.range.start)),
end: Some(serialize_anchor(&entry.range.end)),
message: entry.diagnostic.message.clone(),
severity: match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
_ => proto::diagnostic::Severity::None,
} as i32,
group_id: entry.diagnostic.group_id as u64,
is_primary: entry.diagnostic.is_primary,
is_valid: entry.diagnostic.is_valid,
code: entry.diagnostic.code.clone(),
is_disk_based: entry.diagnostic.is_disk_based,
is_unnecessary: entry.diagnostic.is_unnecessary,
})
.collect()
}
pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
proto::Anchor {
replica_id: anchor.timestamp.replica_id as u32,
timestamp: anchor.timestamp.value,
offset: anchor.offset as u64,
bias: match anchor.bias {
Bias::Left => proto::Bias::Left as i32,
Bias::Right => proto::Bias::Right as i32,
},
buffer_id: anchor.buffer_id,
}
}
// This behavior is currently copied in the collab database, for snapshotting channel notes
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok(
match message
.variant
.ok_or_else(|| anyhow!("missing operation variant"))?
{
proto::operation::Variant::Edit(edit) => {
crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
}
proto::operation::Variant::Undo(undo) => {
crate::Operation::Buffer(text::Operation::Undo(UndoOperation {
timestamp: clock::Lamport {
replica_id: undo.replica_id as ReplicaId,
value: undo.lamport_timestamp,
},
version: deserialize_version(&undo.version),
counts: undo
.counts
.into_iter()
.map(|c| {
(
clock::Lamport {
replica_id: c.replica_id as ReplicaId,
value: c.lamport_timestamp,
},
c.count,
)
})
.collect(),
}))
}
proto::operation::Variant::UpdateSelections(message) => {
let selections = message
.selections
.into_iter()
.filter_map(|selection| {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?.anchor?)?,
end: deserialize_anchor(selection.end?.anchor?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
})
})
.collect::<Vec<_>>();
crate::Operation::UpdateSelections {
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
selections: Arc::from(selections),
line_mode: message.line_mode,
cursor_shape: deserialize_cursor_shape(
proto::CursorShape::from_i32(message.cursor_shape)
.ok_or_else(|| anyhow!("Missing cursor shape"))?,
),
}
}
proto::operation::Variant::UpdateDiagnostics(message) => {
crate::Operation::UpdateDiagnostics {
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
server_id: LanguageServerId(message.server_id as usize),
diagnostics: deserialize_diagnostics(message.diagnostics),
}
}
proto::operation::Variant::UpdateCompletionTriggers(message) => {
crate::Operation::UpdateCompletionTriggers {
triggers: message.triggers,
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
}
}
},
)
}
pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
EditOperation {
timestamp: clock::Lamport {
replica_id: edit.replica_id as ReplicaId,
value: edit.lamport_timestamp,
},
version: deserialize_version(&edit.version),
ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
new_text: edit.new_text.into_iter().map(Arc::from).collect(),
}
}
pub fn deserialize_undo_map_entry(
entry: proto::UndoMapEntry,
) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) {
(
clock::Lamport {
replica_id: entry.replica_id as u16,
value: entry.local_timestamp,
},
entry
.counts
.into_iter()
.map(|undo_count| {
(
clock::Lamport {
replica_id: undo_count.replica_id as u16,
value: undo_count.lamport_timestamp,
},
undo_count.count,
)
})
.collect(),
)
}
pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
Arc::from(
selections
.into_iter()
.filter_map(deserialize_selection)
.collect::<Vec<_>>(),
)
}
pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?.anchor?)?,
end: deserialize_anchor(selection.end?.anchor?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
})
}
pub fn deserialize_diagnostics(
diagnostics: Vec<proto::Diagnostic>,
) -> Arc<[DiagnosticEntry<Anchor>]> {
diagnostics
.into_iter()
.filter_map(|diagnostic| {
Some(DiagnosticEntry {
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
diagnostic: Diagnostic {
source: diagnostic.source,
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
proto::diagnostic::Severity::None => return None,
},
message: diagnostic.message,
group_id: diagnostic.group_id as usize,
code: diagnostic.code,
is_valid: diagnostic.is_valid,
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
},
})
})
.collect()
}
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
Some(Anchor {
timestamp: clock::Lamport {
replica_id: anchor.replica_id as ReplicaId,
value: anchor.timestamp,
},
offset: anchor.offset as usize,
bias: match proto::Bias::from_i32(anchor.bias)? {
proto::Bias::Left => Bias::Left,
proto::Bias::Right => Bias::Right,
},
buffer_id: anchor.buffer_id,
})
}
pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
let replica_id;
let value;
match operation.variant.as_ref()? {
proto::operation::Variant::Edit(op) => {
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
proto::operation::Variant::Undo(op) => {
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
proto::operation::Variant::UpdateDiagnostics(op) => {
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
proto::operation::Variant::UpdateSelections(op) => {
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
proto::operation::Variant::UpdateCompletionTriggers(op) => {
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
}
Some(clock::Lamport {
replica_id: replica_id as ReplicaId,
value,
})
}
pub fn serialize_completion(completion: &Completion) -> proto::Completion {
proto::Completion {
old_start: Some(serialize_anchor(&completion.old_range.start)),
old_end: Some(serialize_anchor(&completion.old_range.end)),
new_text: completion.new_text.clone(),
server_id: completion.server_id.0 as u64,
lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
}
}
pub async fn deserialize_completion(
completion: proto::Completion,
language: Option<Arc<Language>>,
) -> Result<Completion> {
let old_start = completion
.old_start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old start"))?;
let old_end = completion
.old_end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old end"))?;
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
let mut label = None;
if let Some(language) = language {
label = language.label_for_completion(&lsp_completion).await;
}
Ok(Completion {
old_range: old_start..old_end,
new_text: completion.new_text,
label: label.unwrap_or_else(|| {
CodeLabel::plain(
lsp_completion.label.clone(),
lsp_completion.filter_text.as_deref(),
)
}),
server_id: LanguageServerId(completion.server_id as usize),
lsp_completion,
})
}
pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
proto::CodeAction {
server_id: action.server_id.0 as u64,
start: Some(serialize_anchor(&action.range.start)),
end: Some(serialize_anchor(&action.range.end)),
lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
}
}
pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
let start = action
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid start"))?;
let end = action
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid end"))?;
let lsp_action = serde_json::from_slice(&action.lsp_action)?;
Ok(CodeAction {
server_id: LanguageServerId(action.server_id as usize),
range: start..end,
lsp_action,
})
}
pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
proto::Transaction {
id: Some(serialize_timestamp(transaction.id)),
edit_ids: transaction
.edit_ids
.iter()
.copied()
.map(serialize_timestamp)
.collect(),
start: serialize_version(&transaction.start),
}
}
pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
Ok(Transaction {
id: deserialize_timestamp(
transaction
.id
.ok_or_else(|| anyhow!("missing transaction id"))?,
),
edit_ids: transaction
.edit_ids
.into_iter()
.map(deserialize_timestamp)
.collect(),
start: deserialize_version(&transaction.start),
})
}
pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp {
proto::LamportTimestamp {
replica_id: timestamp.replica_id as u32,
value: timestamp.value,
}
}
pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport {
clock::Lamport {
replica_id: timestamp.replica_id as ReplicaId,
value: timestamp.value,
}
}
pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
proto::Range {
start: range.start.0 as u64,
end: range.end.0 as u64,
}
}
pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
FullOffset(range.start as usize)..FullOffset(range.end as usize)
}
pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
let mut version = clock::Global::new();
for entry in message {
version.observe(clock::Lamport {
replica_id: entry.replica_id as ReplicaId,
value: entry.timestamp,
});
}
version
}
pub fn serialize_version(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
version
.iter()
.map(|entry| proto::VectorClockEntry {
replica_id: entry.replica_id as u32,
timestamp: entry.value,
})
.collect()
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
use gpui2::{
elements::div, interactive::Interactive, platform::MouseButton, style::StyleHelpers, ArcCow,
Element, EventContext, IntoElement, ParentElement, ViewContext,
div, ArcCow, Element, EventContext, Interactive, IntoElement, MouseButton, ParentElement,
StyleHelpers, ViewContext,
};
use std::{marker::PhantomData, rc::Rc};