Start work on API docs for the language crate (#3981)

This commit is contained in:
Max Brunsfeld 2024-01-19 10:04:45 -08:00 committed by GitHub
commit 9ec6855e6b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 648 additions and 101 deletions

File diff suppressed because it is too large Load Diff

View File

@ -9,20 +9,36 @@ use std::{
use sum_tree::{self, Bias, SumTree};
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
/// A set of diagnostics associated with a given buffer, provided
/// by a single language server.
///
/// The diagnostics are stored in a [SumTree], which allows this struct
/// to be cheaply copied, and allows for efficient retrieval of the
/// diagnostics that intersect a given range of the buffer.
#[derive(Clone, Debug, Default)]
pub struct DiagnosticSet {
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
}
/// A single diagnostic in a set. Generic over its range type, because
/// the diagnostics are stored internally as [Anchor]s, but can be
/// resolved to different coordinates types like [usize] byte offsets or
/// [Point]s.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticEntry<T> {
/// The range of the buffer where the diagnostic applies.
pub range: Range<T>,
/// The information about the diagnostic.
pub diagnostic: Diagnostic,
}
/// A group of related diagnostics, ordered by their start position
/// in the buffer.
#[derive(Debug)]
pub struct DiagnosticGroup<T> {
/// The diagnostics.
pub entries: Vec<DiagnosticEntry<T>>,
/// The index into `entries` where the primary diagnostic is stored.
pub primary_ix: usize,
}
@ -36,7 +52,8 @@ pub struct Summary {
}
impl<T> DiagnosticEntry<T> {
// Used to provide diagnostic context to lsp codeAction request
/// Returns a raw LSP diagnostic ssed to provide diagnostic context to lsp
/// codeAction request
pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
let code = self
.diagnostic
@ -53,6 +70,8 @@ impl<T> DiagnosticEntry<T> {
}
impl DiagnosticSet {
/// Constructs a [DiagnosticSet] from a sequence of entries, ordered by
/// their position in the buffer.
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
@ -62,6 +81,7 @@ impl DiagnosticSet {
}
}
/// Constructs a [DiagnosticSet] from a sequence of entries in an arbitrary order.
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
@ -80,14 +100,18 @@ impl DiagnosticSet {
}
}
/// Returns the number of diagnostics in the set.
pub fn len(&self) -> usize {
self.diagnostics.summary().count
}
/// Returns an iterator over the diagnostic entries in the set.
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
self.diagnostics.iter()
}
/// Returns an iterator over the diagnostic entries that intersect the
/// given range of the buffer.
pub fn range<'a, T, O>(
&'a self,
range: Range<T>,
@ -134,6 +158,7 @@ impl DiagnosticSet {
})
}
/// Adds all of this set's diagnostic groups to the given output vector.
pub fn groups(
&self,
language_server_id: LanguageServerId,
@ -173,6 +198,8 @@ impl DiagnosticSet {
});
}
/// Returns all of the diagnostics in a particular diagnostic group,
/// in order of their position in the buffer.
pub fn group<'a, O: FromAnchor>(
&'a self,
group_id: usize,
@ -183,6 +210,7 @@ impl DiagnosticSet {
.map(|entry| entry.resolve(buffer))
}
}
impl sum_tree::Item for DiagnosticEntry<Anchor> {
type Summary = Summary;
@ -198,6 +226,7 @@ impl sum_tree::Item for DiagnosticEntry<Anchor> {
}
impl DiagnosticEntry<Anchor> {
/// Converts the [DiagnosticEntry] to a different buffer coordinate type.
pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
DiagnosticEntry {
range: O::from_anchor(&self.range.start, buffer)

View File

@ -11,7 +11,7 @@ pub struct HighlightId(pub u32);
const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
impl HighlightMap {
pub fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
pub(crate) fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
// For each capture name in the highlight query, find the longest
// key in the theme's syntax styles that matches all of the
// dot-separated components of the capture name.
@ -51,7 +51,7 @@ impl HighlightMap {
}
impl HighlightId {
pub fn is_default(&self) -> bool {
pub(crate) fn is_default(&self) -> bool {
*self == DEFAULT_SYNTAX_HIGHLIGHT_ID
}

View File

@ -1,3 +1,11 @@
//! The `language` crate provides a large chunk of Zed's language-related
//! features (the other big contributors being project and lsp crates that revolve around LSP features).
//! Namely, this crate:
//! - Provides [`Language`], [`Grammar`] and [`LanguageRegistry`] types that
//! use Tree-sitter to provide syntax highlighting to the editor; note though that `language` doesn't perform the highlighting by itself. It only maps ranges in a buffer to colors. Treesitter is also used for buffer outlines (lists of symbols in a buffer)
//! - Exposes [`LanguageConfig`] that describes how constructs (like brackets or line comments) should be handled by the editor for a source file of a particular language.
//!
//! Notably we do *not* assign a single language to a single file; in real world a single file can consist of multiple programming languages - HTML is a good example of that - and `language` crate tends to reflect that status quo in it's API.
mod buffer;
mod diagnostic_set;
mod highlight_map;
@ -54,10 +62,13 @@ pub use buffer::*;
pub use diagnostic_set::DiagnosticEntry;
pub use lsp::LanguageServerId;
pub use outline::{Outline, OutlineItem};
pub use syntax_map::{OwnedSyntaxLayerInfo, SyntaxLayerInfo};
pub use syntax_map::{OwnedSyntaxLayer, SyntaxLayer};
pub use text::LineEnding;
pub use tree_sitter::{Parser, Tree};
/// Initializes the `language` crate.
///
/// This should be called before making use of items from the create.
pub fn init(cx: &mut AppContext) {
language_settings::init(cx);
}
@ -90,7 +101,9 @@ thread_local! {
}
lazy_static! {
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
pub(crate) static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
/// A shared grammar for plain text, exposed for reuse by downstream crates.
#[doc(hidden)]
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
LanguageConfig {
name: "Plain Text".into(),
@ -100,10 +113,14 @@ lazy_static! {
));
}
/// Types that represent a position in a buffer, and can be converted into
/// an LSP position, to send to a language server.
pub trait ToLspPosition {
/// Converts the value into an LSP position.
fn to_lsp_position(self) -> lsp::Position;
}
/// A name of a language server.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct LanguageServerName(pub Arc<str>);
@ -239,6 +256,8 @@ impl CachedLspAdapter {
}
}
/// [`LspAdapterDelegate`] allows [`LspAdapter]` implementations to interface with the application
// e.g. to display a notification or fetch data from the web.
pub trait LspAdapterDelegate: Send + Sync {
fn show_notification(&self, message: &str, cx: &mut AppContext);
fn http_client(&self) -> Arc<dyn HttpClient>;
@ -284,6 +303,10 @@ pub trait LspAdapter: 'static + Send + Sync {
delegate: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary>;
/// Returns true if a language server can be reinstalled.
/// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is false.
/// Implementations that rely on software already installed on user's system
/// should have [`can_be_reinstalled`] return false.
fn can_be_reinstalled(&self) -> bool {
true
}
@ -295,6 +318,9 @@ pub trait LspAdapter: 'static + Send + Sync {
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
/// A callback called for each [`lsp_types::CompletionItem`] obtained from LSP server.
/// Some LspAdapter implementations might want to modify the obtained item to
/// change how it's displayed.
async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
async fn label_for_completion(
@ -314,6 +340,7 @@ pub trait LspAdapter: 'static + Send + Sync {
None
}
/// Returns initialization options that are going to be sent to a LSP server as a part of [`lsp_types::InitializeParams`]
async fn initialization_options(&self) -> Option<Value> {
None
}
@ -322,6 +349,7 @@ pub trait LspAdapter: 'static + Send + Sync {
futures::future::ready(serde_json::json!({})).boxed()
}
/// Returns a list of code actions supported by a given LspAdapter
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
Some(vec![
CodeActionKind::EMPTY,
@ -358,36 +386,59 @@ pub struct CodeLabel {
#[derive(Clone, Deserialize)]
pub struct LanguageConfig {
/// Human-readable name of the language.
pub name: Arc<str>,
// The name of the grammar in a WASM bundle (experimental).
pub grammar_name: Option<Arc<str>>,
/// Given a list of `LanguageConfig`'s, the language of a file can be determined based on the path extension matching any of the `path_suffixes`.
pub path_suffixes: Vec<String>,
/// List of bracket types in a language.
pub brackets: BracketPairConfig,
/// A regex pattern that determines whether the language should be assigned to a file or not.
#[serde(default, deserialize_with = "deserialize_regex")]
pub first_line_pattern: Option<Regex>,
/// If set to true, auto indentation uses last non empty line to determine
/// the indentation level for a new line.
#[serde(default = "auto_indent_using_last_non_empty_line_default")]
pub auto_indent_using_last_non_empty_line: bool,
/// A regex that is used to determine whether the indentation level should be
/// increased in the following line.
#[serde(default, deserialize_with = "deserialize_regex")]
pub increase_indent_pattern: Option<Regex>,
/// A regex that is used to determine whether the indentation level should be
/// decreased in the following line.
#[serde(default, deserialize_with = "deserialize_regex")]
pub decrease_indent_pattern: Option<Regex>,
/// A list of characters that trigger the automatic insertion of a closing
/// bracket when they immediately precede the point where an opening
/// bracket is inserted.
#[serde(default)]
pub autoclose_before: String,
#[serde(default)]
pub line_comment: Option<Arc<str>>,
/// A placeholder used internally by Semantic Index.
#[serde(default)]
pub collapsed_placeholder: String,
/// A line comment string that is inserted in e.g. `toggle comments` action.
#[serde(default)]
pub line_comment: Option<Arc<str>>,
/// Starting and closing characters of a block comment.
#[serde(default)]
pub block_comment: Option<(Arc<str>, Arc<str>)>,
/// A list of language servers that are allowed to run on subranges of a given language.
#[serde(default)]
pub scope_opt_in_language_servers: Vec<String>,
#[serde(default)]
pub overrides: HashMap<String, LanguageConfigOverride>,
/// A list of characters that Zed should treat as word characters for the
/// purpose of features that operate on word boundaries, like 'move to next word end'
/// or a whole-word search in buffer search.
#[serde(default)]
pub word_characters: HashSet<char>,
/// The name of a Prettier parser that should be used for this language.
#[serde(default)]
pub prettier_parser_name: Option<String>,
}
/// Tree-sitter language queries for a given language.
#[derive(Debug, Default)]
pub struct LanguageQueries {
pub highlights: Option<Cow<'static, str>>,
@ -399,6 +450,9 @@ pub struct LanguageQueries {
pub overrides: Option<Cow<'static, str>>,
}
/// Represents a language for the given range. Some languages (e.g. HTML)
/// interleave several languages together, thus a single buffer might actually contain
/// several nested scopes.
#[derive(Clone, Debug)]
pub struct LanguageScope {
language: Arc<Language>,
@ -458,9 +512,9 @@ impl Default for LanguageConfig {
block_comment: Default::default(),
scope_opt_in_language_servers: Default::default(),
overrides: Default::default(),
collapsed_placeholder: Default::default(),
word_characters: Default::default(),
prettier_parser_name: None,
collapsed_placeholder: Default::default(),
}
}
}
@ -478,6 +532,7 @@ fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Regex>, D
}
}
#[doc(hidden)]
#[cfg(any(test, feature = "test-support"))]
pub struct FakeLspAdapter {
pub name: &'static str,
@ -489,9 +544,16 @@ pub struct FakeLspAdapter {
pub prettier_plugins: Vec<&'static str>,
}
/// Configuration of handling bracket pairs for a given language.
///
/// This struct includes settings for defining which pairs of characters are considered brackets and
/// also specifies any language-specific scopes where these pairs should be ignored for bracket matching purposes.
#[derive(Clone, Debug, Default)]
pub struct BracketPairConfig {
/// A list of character pairs that should be treated as brackets in the context of a given language.
pub pairs: Vec<BracketPair>,
/// A list of tree-sitter scopes for which a given bracket should not be active.
/// N-th entry in `[Self::disabled_scopes_by_bracket_ix]` contains a list of disabled scopes for an n-th entry in `[Self::pairs]`
pub disabled_scopes_by_bracket_ix: Vec<Vec<String>>,
}
@ -523,11 +585,18 @@ impl<'de> Deserialize<'de> for BracketPairConfig {
}
}
/// Describes a single bracket pair and how an editor should react to e.g. inserting
/// an opening bracket or to a newline character insertion inbetween `start` and `end` characters.
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
pub struct BracketPair {
/// Starting substring for a bracket.
pub start: String,
/// Ending substring for a bracket.
pub end: String,
/// True if `end` should be automatically inserted right after `start` characters.
pub close: bool,
/// True if an extra newline should be inserted while the cursor is in the middle
/// of that bracket pair.
pub newline: bool,
}
@ -1641,6 +1710,8 @@ impl LanguageScope {
self.language.config.collapsed_placeholder.as_ref()
}
/// Returns line prefix that is inserted in e.g. line continuations or
/// in `toggle comments` action.
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
Override::as_option(
self.config_override().map(|o| &o.line_comment),
@ -1656,6 +1727,11 @@ impl LanguageScope {
.map(|e| (&e.0, &e.1))
}
/// Returns a list of language-specific word characters.
///
/// By default, Zed treats alphanumeric characters (and '_') as word characters for
/// the purpose of actions like 'move to next word end` or whole-word search.
/// It additionally accounts for language's additional word characters.
pub fn word_characters(&self) -> Option<&HashSet<char>> {
Override::as_option(
self.config_override().map(|o| &o.word_characters),
@ -1663,6 +1739,8 @@ impl LanguageScope {
)
}
/// Returns a list of bracket pairs for a given language with an additional
/// piece of information about whether the particular bracket pair is currently active for a given language.
pub fn brackets(&self) -> impl Iterator<Item = (&BracketPair, bool)> {
let mut disabled_ids = self
.config_override()

View File

@ -1,3 +1,5 @@
//! Provides `language`-related settings.
use crate::{File, Language};
use anyhow::Result;
use collections::{HashMap, HashSet};
@ -11,10 +13,12 @@ use serde::{Deserialize, Serialize};
use settings::Settings;
use std::{num::NonZeroU32, path::Path, sync::Arc};
/// Initializes the language settings.
pub fn init(cx: &mut AppContext) {
AllLanguageSettings::register(cx);
}
/// Returns the settings for the specified language from the provided file.
pub fn language_settings<'a>(
language: Option<&Arc<Language>>,
file: Option<&Arc<dyn File>>,
@ -24,6 +28,7 @@ pub fn language_settings<'a>(
all_language_settings(file, cx).language(language_name.as_deref())
}
/// Returns the settings for all languages from the provided file.
pub fn all_language_settings<'a>(
file: Option<&Arc<dyn File>>,
cx: &'a AppContext,
@ -32,51 +37,89 @@ pub fn all_language_settings<'a>(
AllLanguageSettings::get(location, cx)
}
/// The settings for all languages.
#[derive(Debug, Clone)]
pub struct AllLanguageSettings {
/// The settings for GitHub Copilot.
pub copilot: CopilotSettings,
defaults: LanguageSettings,
languages: HashMap<Arc<str>, LanguageSettings>,
}
/// The settings for a particular language.
#[derive(Debug, Clone, Deserialize)]
pub struct LanguageSettings {
/// How many columns a tab should occupy.
pub tab_size: NonZeroU32,
/// Whether to indent lines using tab characters, as opposed to multiple
/// spaces.
pub hard_tabs: bool,
/// How to soft-wrap long lines of text.
pub soft_wrap: SoftWrap,
/// The column at which to soft-wrap lines, for buffers where soft-wrap
/// is enabled.
pub preferred_line_length: u32,
/// Whether to show wrap guides in the editor. Setting this to true will
/// show a guide at the 'preferred_line_length' value if softwrap is set to
/// 'preferred_line_length', and will show any additional guides as specified
/// by the 'wrap_guides' setting.
pub show_wrap_guides: bool,
/// Character counts at which to show wrap guides in the editor.
pub wrap_guides: Vec<usize>,
/// Whether or not to perform a buffer format before saving.
pub format_on_save: FormatOnSave,
/// Whether or not to remove any trailing whitespace from lines of a buffer
/// before saving it.
pub remove_trailing_whitespace_on_save: bool,
/// Whether or not to ensure there's a single newline at the end of a buffer
/// when saving it.
pub ensure_final_newline_on_save: bool,
/// How to perform a buffer format.
pub formatter: Formatter,
/// Zed's Prettier integration settings.
/// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
/// the project has no other Prettier installed.
pub prettier: HashMap<String, serde_json::Value>,
/// Whether to use language servers to provide code intelligence.
pub enable_language_server: bool,
/// Controls whether Copilot provides suggestion immediately (true)
/// or waits for a `copilot::Toggle` (false).
pub show_copilot_suggestions: bool,
/// Whether to show tabs and spaces in the editor.
pub show_whitespaces: ShowWhitespaceSetting,
/// Whether to start a new line with a comment when a previous line is a comment as well.
pub extend_comment_on_newline: bool,
/// Inlay hint related settings.
pub inlay_hints: InlayHintSettings,
}
/// The settings for [GitHub Copilot](https://github.com/features/copilot).
#[derive(Clone, Debug, Default)]
pub struct CopilotSettings {
/// Whether Copilot is enabled.
pub feature_enabled: bool,
/// A list of globs representing files that Copilot should be disabled for.
pub disabled_globs: Vec<GlobMatcher>,
}
/// The settings for all languages.
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct AllLanguageSettingsContent {
/// The settings for enabling/disabling features.
#[serde(default)]
pub features: Option<FeaturesContent>,
/// The settings for GitHub Copilot.
#[serde(default)]
pub copilot: Option<CopilotSettingsContent>,
/// The default language settings.
#[serde(flatten)]
pub defaults: LanguageSettingsContent,
/// The settings for individual languages.
#[serde(default, alias = "language_overrides")]
pub languages: HashMap<Arc<str>, LanguageSettingsContent>,
}
/// The settings for a particular language.
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct LanguageSettingsContent {
/// How many columns a tab should occupy.
@ -138,7 +181,7 @@ pub struct LanguageSettingsContent {
pub formatter: Option<Formatter>,
/// Zed's Prettier integration settings.
/// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
/// project has no other Prettier installed.
/// the project has no other Prettier installed.
///
/// Default: {}
#[serde(default)]
@ -148,7 +191,7 @@ pub struct LanguageSettingsContent {
/// Default: true
#[serde(default)]
pub enable_language_server: Option<bool>,
/// Controls whether copilot provides suggestion immediately (true)
/// Controls whether Copilot provides suggestion immediately (true)
/// or waits for a `copilot::Toggle` (false).
///
/// Default: true
@ -167,18 +210,23 @@ pub struct LanguageSettingsContent {
pub inlay_hints: Option<InlayHintSettings>,
}
/// The contents of the GitHub Copilot settings.
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct CopilotSettingsContent {
/// A list of globs representing files that Copilot should be disabled for.
#[serde(default)]
pub disabled_globs: Option<Vec<String>>,
}
/// The settings for enabling/disabling features.
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct FeaturesContent {
/// Whether the GitHub Copilot feature is enabled.
pub copilot: Option<bool>,
}
/// Controls the soft-wrapping behavior in the editor.
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum SoftWrap {
@ -190,29 +238,38 @@ pub enum SoftWrap {
PreferredLineLength,
}
/// Controls the behavior of formatting files when they are saved.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum FormatOnSave {
/// Files should be formatted on save.
On,
/// Files should not be formatted on save.
Off,
/// Files should be formatted using the current language server.
LanguageServer,
/// The external program to use to format the files on save.
External {
/// The external program to run.
command: Arc<str>,
/// The arguments to pass to the program.
arguments: Arc<[String]>,
},
}
/// Controls how whitespace should be displayedin the editor.
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ShowWhitespaceSetting {
/// Draw tabs and spaces only for the selected text.
/// Draw whitespace only for the selected text.
Selection,
/// Do not draw any tabs or spaces
/// Do not draw any tabs or spaces.
None,
/// Draw all invisible symbols
/// Draw all invisible symbols.
All,
}
/// Controls which formatter should be used when formatting code.
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum Formatter {
@ -226,11 +283,14 @@ pub enum Formatter {
Prettier,
/// Format code using an external command.
External {
/// The external program to run.
command: Arc<str>,
/// The arguments to pass to the program.
arguments: Arc<[String]>,
},
}
/// The settings for inlay hints.
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
pub struct InlayHintSettings {
/// Global switch to toggle hints on and off.
@ -238,10 +298,19 @@ pub struct InlayHintSettings {
/// Default: false
#[serde(default)]
pub enabled: bool,
/// Whether type hints should be shown.
///
/// Default: true
#[serde(default = "default_true")]
pub show_type_hints: bool,
/// Whether parameter hints should be shown.
///
/// Default: true
#[serde(default = "default_true")]
pub show_parameter_hints: bool,
/// Whether other hints should be shown.
///
/// Default: true
#[serde(default = "default_true")]
pub show_other_hints: bool,
}
@ -251,6 +320,7 @@ fn default_true() -> bool {
}
impl InlayHintSettings {
/// Returns the kinds of inlay hints that are enabled based on the settings.
pub fn enabled_inlay_hint_kinds(&self) -> HashSet<Option<InlayHintKind>> {
let mut kinds = HashSet::default();
if self.show_type_hints {
@ -267,6 +337,7 @@ impl InlayHintSettings {
}
impl AllLanguageSettings {
/// Returns the [`LanguageSettings`] for the language with the specified name.
pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings {
if let Some(name) = language_name {
if let Some(overrides) = self.languages.get(name) {
@ -276,6 +347,7 @@ impl AllLanguageSettings {
&self.defaults
}
/// Returns whether GitHub Copilot is enabled for the given path.
pub fn copilot_enabled_for_path(&self, path: &Path) -> bool {
!self
.copilot
@ -284,6 +356,7 @@ impl AllLanguageSettings {
.any(|glob| glob.is_match(path))
}
/// Returns whether GitHub Copilot is enabled for the given language and path.
pub fn copilot_enabled(&self, language: Option<&Arc<Language>>, path: Option<&Path>) -> bool {
if !self.copilot.feature_enabled {
return false;
@ -300,13 +373,20 @@ impl AllLanguageSettings {
}
}
/// The kind of an inlay hint.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum InlayHintKind {
/// An inlay hint for a type.
Type,
/// An inlay hint for a parameter.
Parameter,
}
impl InlayHintKind {
/// Returns the [`InlayHintKind`] from the given name.
///
/// Returns `None` if `name` does not match any of the expected
/// string representations.
pub fn from_name(name: &str) -> Option<Self> {
match name {
"type" => Some(InlayHintKind::Type),
@ -315,6 +395,7 @@ impl InlayHintKind {
}
}
/// Returns the name of this [`InlayHintKind`].
pub fn name(&self) -> &'static str {
match self {
InlayHintKind::Type => "type",

View File

@ -1,3 +1,5 @@
//! Provides Markdown-related constructs.
use std::sync::Arc;
use std::{ops::Range, path::PathBuf};
@ -5,21 +7,30 @@ use crate::{HighlightId, Language, LanguageRegistry};
use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
/// Parsed Markdown content.
#[derive(Debug, Clone)]
pub struct ParsedMarkdown {
/// The Markdown text.
pub text: String,
/// The list of highlights contained in the Markdown document.
pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
/// The regions of the various ranges in the Markdown document.
pub region_ranges: Vec<Range<usize>>,
/// The regions of the Markdown document.
pub regions: Vec<ParsedRegion>,
}
/// A run of highlighted Markdown text.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MarkdownHighlight {
/// A styled Markdown highlight.
Style(MarkdownHighlightStyle),
/// A highlighted code block.
Code(HighlightId),
}
impl MarkdownHighlight {
/// Converts this [`MarkdownHighlight`] to a [`HighlightStyle`].
pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option<HighlightStyle> {
match self {
MarkdownHighlight::Style(style) => {
@ -48,23 +59,39 @@ impl MarkdownHighlight {
}
}
/// The style for a Markdown highlight.
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct MarkdownHighlightStyle {
/// Whether the text should be italicized.
pub italic: bool,
/// Whether the text should be underlined.
pub underline: bool,
/// The weight of the text.
pub weight: FontWeight,
}
/// A parsed region in a Markdown document.
#[derive(Debug, Clone)]
pub struct ParsedRegion {
/// Whether the region is a code block.
pub code: bool,
/// The link contained in this region, if it has one.
pub link: Option<Link>,
}
/// A Markdown link.
#[derive(Debug, Clone)]
pub enum Link {
Web { url: String },
Path { path: PathBuf },
/// A link to a webpage.
Web {
/// The URL of the webpage.
url: String,
},
/// A link to a path on the filesystem.
Path {
/// The path to the item.
path: PathBuf,
},
}
impl Link {
@ -82,6 +109,7 @@ impl Link {
}
}
/// Parses a string of Markdown.
pub async fn parse_markdown(
markdown: &str,
language_registry: &Arc<LanguageRegistry>,
@ -111,6 +139,7 @@ pub async fn parse_markdown(
}
}
/// Parses a Markdown block.
pub async fn parse_markdown_block(
markdown: &str,
language_registry: &Arc<LanguageRegistry>,
@ -261,6 +290,7 @@ pub async fn parse_markdown_block(
}
}
/// Appends a highlighted run of text to the provided `text` buffer.
pub fn highlight_code(
text: &mut String,
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
@ -275,6 +305,7 @@ pub fn highlight_code(
}
}
/// Appends a new paragraph to the provided `text` buffer.
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
let mut is_subsequent_paragraph_of_list = false;
if let Some((_, has_content)) = list_stack.last_mut() {

View File

@ -2,6 +2,7 @@ use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{BackgroundExecutor, HighlightStyle};
use std::ops::Range;
/// An outline of all the symbols contained in a buffer.
#[derive(Debug)]
pub struct Outline<T> {
pub items: Vec<OutlineItem<T>>,

View File

@ -1,3 +1,5 @@
//! Handles conversions of `language` items to and from the [`rpc`] protocol.
use crate::{
diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, CursorShape, Diagnostic,
Language,
@ -11,15 +13,18 @@ use text::*;
pub use proto::{BufferState, Operation};
/// Serializes a [`RopeFingerprint`] to be sent over RPC.
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
fingerprint.to_hex()
}
/// Deserializes a [`RopeFingerprint`] from the RPC representation.
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
RopeFingerprint::from_hex(fingerprint)
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
}
/// Deserializes a `[text::LineEnding]` from the RPC representation.
pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
match message {
proto::LineEnding::Unix => text::LineEnding::Unix,
@ -27,6 +32,7 @@ pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
}
}
/// Serializes a [`text::LineEnding`] to be sent over RPC.
pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
match message {
text::LineEnding::Unix => proto::LineEnding::Unix,
@ -34,6 +40,7 @@ pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
}
}
/// Serializes a [`crate::Operation`] to be sent over RPC.
pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
proto::Operation {
variant: Some(match operation {
@ -96,6 +103,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
}
}
/// Serializes an [`operation::EditOperation`] to be sent over RPC.
pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
proto::operation::Edit {
replica_id: operation.timestamp.replica_id as u32,
@ -110,6 +118,7 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
}
}
/// Serializes an entry in the undo map to be sent over RPC.
pub fn serialize_undo_map_entry(
(edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]),
) -> proto::UndoMapEntry {
@ -127,6 +136,7 @@ pub fn serialize_undo_map_entry(
}
}
/// Splits the given list of operations into chunks.
pub fn split_operations(
mut operations: Vec<proto::Operation>,
) -> impl Iterator<Item = Vec<proto::Operation>> {
@ -152,10 +162,12 @@ pub fn split_operations(
})
}
/// Serializes selections to be sent over RPC.
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
selections.iter().map(serialize_selection).collect()
}
/// Serializes a [`Selection`] to be sent over RPC.
pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
proto::Selection {
id: selection.id as u64,
@ -171,6 +183,7 @@ pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
}
}
/// Serializes a [`CursorShape`] to be sent over RPC.
pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape {
match cursor_shape {
CursorShape::Bar => proto::CursorShape::CursorBar,
@ -180,6 +193,7 @@ pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape
}
}
/// Deserializes a [`CursorShape`] from the RPC representation.
pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape {
match cursor_shape {
proto::CursorShape::CursorBar => CursorShape::Bar,
@ -189,6 +203,7 @@ pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape
}
}
/// Serializes a list of diagnostics to be sent over RPC.
pub fn serialize_diagnostics<'a>(
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
) -> Vec<proto::Diagnostic> {
@ -208,7 +223,7 @@ pub fn serialize_diagnostics<'a>(
} as i32,
group_id: entry.diagnostic.group_id as u64,
is_primary: entry.diagnostic.is_primary,
is_valid: entry.diagnostic.is_valid,
is_valid: true,
code: entry.diagnostic.code.clone(),
is_disk_based: entry.diagnostic.is_disk_based,
is_unnecessary: entry.diagnostic.is_unnecessary,
@ -216,6 +231,7 @@ pub fn serialize_diagnostics<'a>(
.collect()
}
/// Serializes an [`Anchor`] to be sent over RPC.
pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
proto::Anchor {
replica_id: anchor.timestamp.replica_id as u32,
@ -230,6 +246,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
}
// This behavior is currently copied in the collab database, for snapshotting channel notes
/// Deserializes an [`crate::Operation`] from the RPC representation.
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok(
match message
@ -312,6 +329,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operati
)
}
/// Deserializes an [`EditOperation`] from the RPC representation.
pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
EditOperation {
timestamp: clock::Lamport {
@ -324,6 +342,7 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation
}
}
/// Deserializes an entry in the undo map from the RPC representation.
pub fn deserialize_undo_map_entry(
entry: proto::UndoMapEntry,
) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) {
@ -348,6 +367,7 @@ pub fn deserialize_undo_map_entry(
)
}
/// Deserializes selections from the RPC representation.
pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
Arc::from(
selections
@ -357,6 +377,7 @@ pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selecti
)
}
/// Deserializes a [`Selection`] from the RPC representation.
pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
Some(Selection {
id: selection.id as usize,
@ -367,6 +388,7 @@ pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<An
})
}
/// Deserializes a list of diagnostics from the RPC representation.
pub fn deserialize_diagnostics(
diagnostics: Vec<proto::Diagnostic>,
) -> Arc<[DiagnosticEntry<Anchor>]> {
@ -387,7 +409,6 @@ pub fn deserialize_diagnostics(
message: diagnostic.message,
group_id: diagnostic.group_id as usize,
code: diagnostic.code,
is_valid: diagnostic.is_valid,
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
@ -397,6 +418,7 @@ pub fn deserialize_diagnostics(
.collect()
}
/// Deserializes an [`Anchor`] from the RPC representation.
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
Some(Anchor {
timestamp: clock::Lamport {
@ -412,6 +434,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
})
}
/// Returns a `[clock::Lamport`] timestamp for the given [`proto::Operation`].
pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
let replica_id;
let value;
@ -444,6 +467,7 @@ pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<c
})
}
/// Serializes a [`Completion`] to be sent over RPC.
pub fn serialize_completion(completion: &Completion) -> proto::Completion {
proto::Completion {
old_start: Some(serialize_anchor(&completion.old_range.start)),
@ -454,6 +478,7 @@ pub fn serialize_completion(completion: &Completion) -> proto::Completion {
}
}
/// Deserializes a [`Completion`] from the RPC representation.
pub async fn deserialize_completion(
completion: proto::Completion,
language: Option<Arc<Language>>,
@ -488,6 +513,7 @@ pub async fn deserialize_completion(
})
}
/// Serializes a [`CodeAction`] to be sent over RPC.
pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
proto::CodeAction {
server_id: action.server_id.0 as u64,
@ -497,6 +523,7 @@ pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
}
}
/// Deserializes a [`CodeAction`] from the RPC representation.
pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
let start = action
.start
@ -514,6 +541,7 @@ pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction>
})
}
/// Serializes a [`Transaction`] to be sent over RPC.
pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
proto::Transaction {
id: Some(serialize_timestamp(transaction.id)),
@ -527,6 +555,7 @@ pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
}
}
/// Deserializes a [`Transaction`] from the RPC representation.
pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
Ok(Transaction {
id: deserialize_timestamp(
@ -543,6 +572,7 @@ pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transa
})
}
/// Serializes a [`clock::Lamport`] timestamp to be sent over RPC.
pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp {
proto::LamportTimestamp {
replica_id: timestamp.replica_id as u32,
@ -550,6 +580,7 @@ pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp
}
}
/// Deserializes a [`clock::Lamport`] timestamp from the RPC representation.
pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport {
clock::Lamport {
replica_id: timestamp.replica_id as ReplicaId,
@ -557,6 +588,7 @@ pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lampo
}
}
/// Serializes a range of [`FullOffset`]s to be sent over RPC.
pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
proto::Range {
start: range.start.0 as u64,
@ -564,10 +596,12 @@ pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
}
}
/// Deserializes a range of [`FullOffset`]s from the RPC representation.
pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
FullOffset(range.start as usize)..FullOffset(range.end as usize)
}
/// Deserializes a clock version from the RPC representation.
pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
let mut version = clock::Global::new();
for entry in message {
@ -579,6 +613,7 @@ pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global
version
}
/// Serializes a clock version to be sent over RPC.
pub fn serialize_version(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
version
.iter()

View File

@ -29,7 +29,7 @@ pub struct SyntaxMap {
#[derive(Clone, Default)]
pub struct SyntaxSnapshot {
layers: SumTree<SyntaxLayer>,
layers: SumTree<SyntaxLayerEntry>,
parsed_version: clock::Global,
interpolated_version: clock::Global,
language_registry_version: usize,
@ -84,7 +84,7 @@ struct SyntaxMapMatchesLayer<'a> {
}
#[derive(Clone)]
struct SyntaxLayer {
struct SyntaxLayerEntry {
depth: usize,
range: Range<Anchor>,
content: SyntaxLayerContent,
@ -117,17 +117,22 @@ impl SyntaxLayerContent {
}
}
/// A layer of syntax highlighting, corresponding to a single syntax
/// tree in a particular language.
#[derive(Debug)]
pub struct SyntaxLayerInfo<'a> {
pub depth: usize,
pub struct SyntaxLayer<'a> {
/// The language for this layer.
pub language: &'a Arc<Language>,
depth: usize,
tree: &'a Tree,
offset: (usize, tree_sitter::Point),
}
/// A layer of syntax highlighting. Like [SyntaxLayer], but holding
/// owned data instead of references.
#[derive(Clone)]
pub struct OwnedSyntaxLayerInfo {
pub depth: usize,
pub struct OwnedSyntaxLayer {
/// The language for this layer.
pub language: Arc<Language>,
tree: tree_sitter::Tree,
offset: (usize, tree_sitter::Point),
@ -691,7 +696,7 @@ impl SyntaxSnapshot {
};
layers.push(
SyntaxLayer {
SyntaxLayerEntry {
depth: step.depth,
range: step.range,
content,
@ -741,7 +746,7 @@ impl SyntaxSnapshot {
SyntaxMapCaptures::new(
range.clone(),
text,
[SyntaxLayerInfo {
[SyntaxLayer {
language,
tree,
depth: 0,
@ -781,7 +786,7 @@ impl SyntaxSnapshot {
}
#[cfg(test)]
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayer> {
self.layers_for_range(0..buffer.len(), buffer).collect()
}
@ -789,7 +794,7 @@ impl SyntaxSnapshot {
&'a self,
range: Range<T>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
) -> impl 'a + Iterator<Item = SyntaxLayer> {
let start_offset = range.start.to_offset(buffer);
let end_offset = range.end.to_offset(buffer);
let start = buffer.anchor_before(start_offset);
@ -813,7 +818,7 @@ impl SyntaxSnapshot {
let layer_start_offset = layer.range.start.to_offset(buffer);
let layer_start_point = layer.range.start.to_point(buffer).to_ts_point();
info = Some(SyntaxLayerInfo {
info = Some(SyntaxLayer {
tree,
language,
depth: layer.depth,
@ -842,7 +847,7 @@ impl<'a> SyntaxMapCaptures<'a> {
fn new(
range: Range<usize>,
text: &'a Rope,
layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
layers: impl Iterator<Item = SyntaxLayer<'a>>,
query: fn(&Grammar) -> Option<&Query>,
) -> Self {
let mut result = Self {
@ -964,7 +969,7 @@ impl<'a> SyntaxMapMatches<'a> {
fn new(
range: Range<usize>,
text: &'a Rope,
layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
layers: impl Iterator<Item = SyntaxLayer<'a>>,
query: fn(&Grammar) -> Option<&Query>,
) -> Self {
let mut result = Self::default();
@ -1436,23 +1441,25 @@ fn insert_newlines_between_ranges(
}
}
impl OwnedSyntaxLayerInfo {
impl OwnedSyntaxLayer {
/// Returns the root syntax node for this layer.
pub fn node(&self) -> Node {
self.tree
.root_node_with_offset(self.offset.0, self.offset.1)
}
}
impl<'a> SyntaxLayerInfo<'a> {
pub fn to_owned(&self) -> OwnedSyntaxLayerInfo {
OwnedSyntaxLayerInfo {
impl<'a> SyntaxLayer<'a> {
/// Returns an owned version of this layer.
pub fn to_owned(&self) -> OwnedSyntaxLayer {
OwnedSyntaxLayer {
tree: self.tree.clone(),
offset: self.offset,
depth: self.depth,
language: self.language.clone(),
}
}
/// Returns the root node for this layer.
pub fn node(&self) -> Node<'a> {
self.tree
.root_node_with_offset(self.offset.0, self.offset.1)
@ -1564,7 +1571,7 @@ impl ChangeRegionSet {
)
}
fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
fn intersects(&self, layer: &SyntaxLayerEntry, text: &BufferSnapshot) -> bool {
for region in &self.0 {
if region.depth < layer.depth {
continue;
@ -1675,7 +1682,7 @@ impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary>
}
}
impl sum_tree::Item for SyntaxLayer {
impl sum_tree::Item for SyntaxLayerEntry {
type Summary = SyntaxLayerSummary;
fn summary(&self) -> Self::Summary {
@ -1690,7 +1697,7 @@ impl sum_tree::Item for SyntaxLayer {
}
}
impl std::fmt::Debug for SyntaxLayer {
impl std::fmt::Debug for SyntaxLayerEntry {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SyntaxLayer")
.field("depth", &self.depth)

View File

@ -5,7 +5,7 @@ use gpui::{
MouseButton, MouseDownEvent, MouseMoveEvent, ParentElement, Render, Styled,
UniformListScrollHandle, View, ViewContext, VisualContext, WeakView, WindowContext,
};
use language::{Buffer, OwnedSyntaxLayerInfo};
use language::{Buffer, OwnedSyntaxLayer};
use std::{mem, ops::Range};
use theme::ActiveTheme;
use tree_sitter::{Node, TreeCursor};
@ -54,7 +54,7 @@ struct EditorState {
struct BufferState {
buffer: Model<Buffer>,
excerpt_id: ExcerptId,
active_layer: Option<OwnedSyntaxLayerInfo>,
active_layer: Option<OwnedSyntaxLayer>,
}
impl SyntaxTreeView {
@ -477,7 +477,7 @@ impl SyntaxTreeToolbarItemView {
})
}
fn render_header(active_layer: &OwnedSyntaxLayerInfo) -> ButtonLike {
fn render_header(active_layer: &OwnedSyntaxLayer) -> ButtonLike {
ButtonLike::new("syntax tree header")
.child(Label::new(active_layer.language.name()))
.child(Label::new(format_node_range(active_layer.node())))

View File

@ -3028,7 +3028,7 @@ impl MultiBufferSnapshot {
pub fn has_git_diffs(&self) -> bool {
for excerpt in self.excerpts.iter() {
if !excerpt.buffer.git_diff.is_empty() {
if excerpt.buffer.has_git_diff() {
return true;
}
}

View File

@ -3910,7 +3910,6 @@ impl Project {
message: diagnostic.message.clone(),
group_id,
is_primary: true,
is_valid: true,
is_disk_based,
is_unnecessary,
},
@ -3928,7 +3927,6 @@ impl Project {
message: info.message.clone(),
group_id,
is_primary: false,
is_valid: true,
is_disk_based,
is_unnecessary: false,
},

View File

@ -1472,7 +1472,10 @@ message Diagnostic {
optional string code = 6;
uint64 group_id = 7;
bool is_primary = 8;
// TODO: remove this field
bool is_valid = 9;
bool is_disk_based = 10;
bool is_unnecessary = 11;

View File

@ -11,6 +11,7 @@ use workspace::Workspace;
use crate::{
normal::normal_motion,
state::{Mode, Operator},
utils::coerce_punctuation,
visual::visual_motion,
Vim,
};
@ -680,8 +681,8 @@ pub(crate) fn next_word_start(
for _ in 0..times {
let mut crossed_newline = false;
point = movement::find_boundary(map, point, FindRange::MultiLine, |left, right| {
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
let left_kind = coerce_punctuation(char_kind(&scope, left), ignore_punctuation);
let right_kind = coerce_punctuation(char_kind(&scope, right), ignore_punctuation);
let at_newline = right == '\n';
let found = (left_kind != right_kind && right_kind != CharKind::Whitespace)
@ -710,8 +711,8 @@ fn next_word_end(
*point.column_mut() = 0;
}
point = movement::find_boundary(map, point, FindRange::MultiLine, |left, right| {
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
let left_kind = coerce_punctuation(char_kind(&scope, left), ignore_punctuation);
let right_kind = coerce_punctuation(char_kind(&scope, right), ignore_punctuation);
left_kind != right_kind && left_kind != CharKind::Whitespace
});
@ -743,8 +744,8 @@ fn previous_word_start(
// cursor because the newline is checked only once.
point =
movement::find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
let left_kind = coerce_punctuation(char_kind(&scope, left), ignore_punctuation);
let right_kind = coerce_punctuation(char_kind(&scope, right), ignore_punctuation);
(left_kind != right_kind && !right.is_whitespace()) || left == '\n'
});

View File

@ -1,4 +1,10 @@
use crate::{motion::Motion, object::Object, state::Mode, utils::copy_selections_content, Vim};
use crate::{
motion::Motion,
object::Object,
state::Mode,
utils::{coerce_punctuation, copy_selections_content},
Vim,
};
use editor::{
display_map::DisplaySnapshot,
movement::{self, FindRange, TextLayoutDetails},
@ -102,9 +108,9 @@ fn expand_changed_word_selection(
if in_word {
selection.end =
movement::find_boundary(map, selection.end, FindRange::MultiLine, |left, right| {
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
let left_kind = coerce_punctuation(char_kind(&scope, left), ignore_punctuation);
let right_kind =
char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
coerce_punctuation(char_kind(&scope, right), ignore_punctuation);
left_kind != right_kind && left_kind != CharKind::Whitespace
});

View File

@ -10,7 +10,10 @@ use language::{char_kind, CharKind, Selection};
use serde::Deserialize;
use workspace::Workspace;
use crate::{motion::right, normal::normal_object, state::Mode, visual::visual_object, Vim};
use crate::{
motion::right, normal::normal_object, state::Mode, utils::coerce_punctuation,
visual::visual_object, Vim,
};
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Object {
@ -213,14 +216,14 @@ fn in_word(
right(map, relative_to, 1),
movement::FindRange::SingleLine,
|left, right| {
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
coerce_punctuation(char_kind(&scope, left), ignore_punctuation)
!= coerce_punctuation(char_kind(&scope, right), ignore_punctuation)
},
);
let end = movement::find_boundary(map, relative_to, FindRange::SingleLine, |left, right| {
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
coerce_punctuation(char_kind(&scope, left), ignore_punctuation)
!= coerce_punctuation(char_kind(&scope, right), ignore_punctuation)
});
Some(start..end)
@ -283,15 +286,15 @@ fn around_next_word(
right(map, relative_to, 1),
FindRange::SingleLine,
|left, right| {
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
coerce_punctuation(char_kind(&scope, left), ignore_punctuation)
!= coerce_punctuation(char_kind(&scope, right), ignore_punctuation)
},
);
let mut word_found = false;
let end = movement::find_boundary(map, relative_to, FindRange::MultiLine, |left, right| {
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
let left_kind = coerce_punctuation(char_kind(&scope, left), ignore_punctuation);
let right_kind = coerce_punctuation(char_kind(&scope, right), ignore_punctuation);
let found = (word_found && left_kind != right_kind) || right == '\n' && left == '\n';

View File

@ -1,6 +1,6 @@
use editor::{ClipboardSelection, Editor};
use gpui::{AppContext, ClipboardItem};
use language::Point;
use language::{CharKind, Point};
pub fn copy_selections_content(editor: &mut Editor, linewise: bool, cx: &mut AppContext) {
let selections = editor.selections.all_adjusted(cx);
@ -48,3 +48,11 @@ pub fn copy_selections_content(editor: &mut Editor, linewise: bool, cx: &mut App
cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections));
}
pub fn coerce_punctuation(kind: CharKind, treat_punctuation_as_word: bool) -> CharKind {
if treat_punctuation_as_word && kind == CharKind::Punctuation {
CharKind::Word
} else {
kind
}
}