mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-29 14:43:55 +03:00
catchup with main
This commit is contained in:
commit
d2e769027a
7
.github/pull_request_template.md
vendored
7
.github/pull_request_template.md
vendored
@ -2,11 +2,4 @@
|
||||
|
||||
Release Notes:
|
||||
|
||||
- N/A
|
||||
|
||||
or
|
||||
|
||||
- (Added|Fixed|Improved) ... ([#<public_issue_number_if_exists>](https://github.com/zed-industries/community/issues/<public_issue_number_if_exists>)).
|
||||
|
||||
If the release notes are only intended for a specific release channel only, add `(<release_channel>-only)` to the end of the release note line.
|
||||
These will be removed by the person making the release.
|
||||
|
21
.github/workflows/release_actions.yml
vendored
21
.github/workflows/release_actions.yml
vendored
@ -6,8 +6,8 @@ jobs:
|
||||
discord_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get appropriate URL
|
||||
id: get-appropriate-url
|
||||
- name: Get release URL
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
@ -15,14 +15,19 @@ jobs:
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.2.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||
|
||||
Restart your Zed or head to ${{ steps.get-release-url.outputs.URL }} to grab it.
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: |
|
||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||
|
||||
Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
|
||||
|
||||
${{ github.event.release.body }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
109
Cargo.lock
generated
109
Cargo.lock
generated
@ -1083,7 +1083,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-broadcast",
|
||||
"audio",
|
||||
"channel",
|
||||
"client",
|
||||
"collections",
|
||||
"fs",
|
||||
@ -1468,7 +1467,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.23.2"
|
||||
version = "0.24.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1503,6 +1502,7 @@ dependencies = [
|
||||
"log",
|
||||
"lsp",
|
||||
"nanoid",
|
||||
"node_runtime",
|
||||
"parking_lot 0.11.2",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
@ -2080,9 +2080,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "curl-sys"
|
||||
version = "0.4.66+curl-8.3.0"
|
||||
version = "0.4.67+curl-8.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70c44a72e830f0e40ad90dda8a6ab6ed6314d39776599a58a2e5e37fbc6db5b9"
|
||||
checksum = "3cc35d066510b197a0f72de863736641539957628c8a42e70e27c66849e77c34"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@ -2405,7 +2405,6 @@ dependencies = [
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"project",
|
||||
"pulldown-cmark",
|
||||
"rand 0.8.5",
|
||||
"rich_text",
|
||||
"rpc",
|
||||
@ -2833,7 +2832,6 @@ dependencies = [
|
||||
"parking_lot 0.11.2",
|
||||
"regex",
|
||||
"rope",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
@ -3991,6 +3989,7 @@ dependencies = [
|
||||
"lsp",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"pulldown-cmark",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"rpc",
|
||||
@ -5520,6 +5519,26 @@ version = "0.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||
|
||||
[[package]]
|
||||
name = "prettier"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pretty_assertions"
|
||||
version = "1.4.0"
|
||||
@ -5632,8 +5651,10 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"prettier",
|
||||
"pretty_assertions",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
@ -6603,12 +6624,6 @@ dependencies = [
|
||||
"untrusted",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
|
||||
|
||||
[[package]]
|
||||
name = "rustybuzz"
|
||||
version = "0.3.0"
|
||||
@ -6942,7 +6957,6 @@ dependencies = [
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace",
|
||||
"zed",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -7662,28 +7676,6 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
||||
|
||||
[[package]]
|
||||
name = "storybook"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap 4.4.4",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui2",
|
||||
"itertools 0.11.0",
|
||||
"log",
|
||||
"rust-embed",
|
||||
"serde",
|
||||
"settings",
|
||||
"simplelog",
|
||||
"strum",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.4"
|
||||
@ -7706,22 +7698,6 @@ name = "strum"
|
||||
version = "0.25.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.25.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.37",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
@ -8802,6 +8778,15 @@ dependencies = [
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-vue"
|
||||
version = "0.0.1"
|
||||
source = "git+https://github.com/zed-industries/tree-sitter-vue?rev=95b2890#95b28908d90e928c308866f7631e73ef6e1d4b5f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-yaml"
|
||||
version = "0.0.1"
|
||||
@ -8873,21 +8858,6 @@ version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||
|
||||
[[package]]
|
||||
name = "ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"gpui2",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"strum",
|
||||
"theme",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.7.0"
|
||||
@ -9651,6 +9621,7 @@ dependencies = [
|
||||
"theme",
|
||||
"theme_selector",
|
||||
"util",
|
||||
"vim",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@ -9957,7 +9928,6 @@ dependencies = [
|
||||
"async-recursion 1.0.5",
|
||||
"bincode",
|
||||
"call",
|
||||
"channel",
|
||||
"client",
|
||||
"collections",
|
||||
"context_menu",
|
||||
@ -9974,6 +9944,7 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"menu",
|
||||
"node_runtime",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"project",
|
||||
@ -10069,9 +10040,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.108.0"
|
||||
version = "0.109.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"ai",
|
||||
"anyhow",
|
||||
"assistant",
|
||||
"async-compression",
|
||||
@ -10184,6 +10156,7 @@ dependencies = [
|
||||
"tree-sitter-svelte",
|
||||
"tree-sitter-toml",
|
||||
"tree-sitter-typescript",
|
||||
"tree-sitter-vue",
|
||||
"tree-sitter-yaml",
|
||||
"unindent",
|
||||
"url",
|
||||
|
@ -52,6 +52,7 @@ members = [
|
||||
"crates/plugin",
|
||||
"crates/plugin_macros",
|
||||
"crates/plugin_runtime",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
@ -65,13 +66,11 @@ members = [
|
||||
"crates/sqlez_macros",
|
||||
"crates/feature_flags",
|
||||
"crates/rich_text",
|
||||
"crates/storybook",
|
||||
"crates/sum_tree",
|
||||
"crates/terminal",
|
||||
"crates/text",
|
||||
"crates/theme",
|
||||
"crates/theme_selector",
|
||||
"crates/ui",
|
||||
"crates/util",
|
||||
"crates/semantic_index",
|
||||
"crates/vim",
|
||||
@ -150,7 +149,7 @@ tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml",
|
||||
tree-sitter-lua = "0.0.14"
|
||||
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
|
||||
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "786689b0562b9799ce53e824cb45a1a2a04dc673"}
|
||||
|
||||
tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "95b2890"}
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "35a6052fbcafc5e5fc0f9415b8652be7dcaf7222" }
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
|
@ -83,9 +83,7 @@ foreman start
|
||||
If you want to run Zed pointed at the local servers, you can run:
|
||||
|
||||
```
|
||||
script/zed-with-local-servers
|
||||
# or...
|
||||
script/zed-with-local-servers --release
|
||||
script/zed-local
|
||||
```
|
||||
|
||||
### Dump element JSON
|
||||
|
@ -408,6 +408,7 @@
|
||||
"vim::PushOperator",
|
||||
"Yank"
|
||||
],
|
||||
"shift-y": "vim::YankLine",
|
||||
"i": "vim::InsertBefore",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"a": "vim::InsertAfter",
|
||||
|
@ -50,6 +50,9 @@
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
// explicitly requesting it.
|
||||
"show_completions_on_input": true,
|
||||
// Whether to display inline and alongside documentation for items in the
|
||||
// completions menu
|
||||
"show_completion_documentation": true,
|
||||
// Whether to show wrap guides in the editor. Setting this to true will
|
||||
// show a guide at the 'preferred_line_length' value if softwrap is set to
|
||||
// 'preferred_line_length', and will show any additional guides as specified
|
||||
@ -76,7 +79,7 @@
|
||||
// Settings related to calls in Zed
|
||||
"calls": {
|
||||
// Join calls with the microphone muted by default
|
||||
"mute_on_join": true
|
||||
"mute_on_join": false
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
@ -199,7 +202,12 @@
|
||||
// "arguments": ["--stdin-filepath", "{buffer_path}"]
|
||||
// }
|
||||
// }
|
||||
"formatter": "language_server",
|
||||
// 3. Format code using Zed's Prettier integration:
|
||||
// "formatter": "prettier"
|
||||
// 4. Default. Format files using Zed's Prettier integration (if applicable),
|
||||
// or falling back to formatting via language server:
|
||||
// "formatter": "auto"
|
||||
"formatter": "auto",
|
||||
// How to soft-wrap long lines of text. This setting can take
|
||||
// three values:
|
||||
//
|
||||
@ -429,6 +437,16 @@
|
||||
"tab_size": 2
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
|
||||
// project has no other Prettier installed.
|
||||
"prettier": {
|
||||
// Use regular Prettier json configuration:
|
||||
// "trailingComma": "es5",
|
||||
// "tabWidth": 4,
|
||||
// "semi": false,
|
||||
// "singleQuote": true
|
||||
},
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
// Specify the LSP name as a key here.
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings, OpenAIModel},
|
||||
codegen::{self, Codegen, CodegenKind},
|
||||
prompts::generate_content_prompt,
|
||||
prompts::{generate_content_prompt, PromptCodeSnippet},
|
||||
MessageId, MessageMetadata, MessageStatus, Role, SavedConversation, SavedConversationMetadata,
|
||||
SavedMessage,
|
||||
};
|
||||
@ -17,7 +17,7 @@ use editor::{
|
||||
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint,
|
||||
},
|
||||
scroll::autoscroll::{Autoscroll, AutoscrollStrategy},
|
||||
Anchor, Editor, MoveDown, MoveUp, MultiBufferSnapshot, ToOffset,
|
||||
Anchor, Editor, MoveDown, MoveUp, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
@ -296,22 +296,36 @@ impl AssistantPanel {
|
||||
if selection.start.excerpt_id() != selection.end.excerpt_id() {
|
||||
return;
|
||||
}
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
|
||||
// Extend the selection to the start and the end of the line.
|
||||
let mut point_selection = selection.map(|selection| selection.to_point(&snapshot));
|
||||
if point_selection.end > point_selection.start {
|
||||
point_selection.start.column = 0;
|
||||
// If the selection ends at the start of the line, we don't want to include it.
|
||||
if point_selection.end.column == 0 {
|
||||
point_selection.end.row -= 1;
|
||||
}
|
||||
point_selection.end.column = snapshot.line_len(point_selection.end.row);
|
||||
}
|
||||
|
||||
let codegen_kind = if point_selection.start == point_selection.end {
|
||||
CodegenKind::Generate {
|
||||
position: snapshot.anchor_after(point_selection.start),
|
||||
}
|
||||
} else {
|
||||
CodegenKind::Transform {
|
||||
range: snapshot.anchor_before(point_selection.start)
|
||||
..snapshot.anchor_after(point_selection.end),
|
||||
}
|
||||
};
|
||||
|
||||
let inline_assist_id = post_inc(&mut self.next_inline_assist_id);
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let provider = Arc::new(OpenAICompletionProvider::new(
|
||||
api_key,
|
||||
cx.background().clone(),
|
||||
));
|
||||
let codegen_kind = if editor.read(cx).selections.newest::<usize>(cx).is_empty() {
|
||||
CodegenKind::Generate {
|
||||
position: selection.start,
|
||||
}
|
||||
} else {
|
||||
CodegenKind::Transform {
|
||||
range: selection.start..selection.end,
|
||||
}
|
||||
};
|
||||
|
||||
let codegen = cx.add_model(|cx| {
|
||||
Codegen::new(editor.read(cx).buffer().clone(), codegen_kind, provider, cx)
|
||||
});
|
||||
@ -361,7 +375,7 @@ impl AssistantPanel {
|
||||
editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
style: BlockStyle::Flex,
|
||||
position: selection.head().bias_left(&snapshot),
|
||||
position: snapshot.anchor_before(point_selection.head()),
|
||||
height: 2,
|
||||
render: Arc::new({
|
||||
let inline_assistant = inline_assistant.clone();
|
||||
@ -654,12 +668,14 @@ impl AssistantPanel {
|
||||
let snippets = cx.spawn(|_, cx| async move {
|
||||
let mut snippets = Vec::new();
|
||||
for result in search_results.await {
|
||||
snippets.push(result.buffer.read_with(&cx, |buffer, _| {
|
||||
buffer
|
||||
.snapshot()
|
||||
.text_for_range(result.range)
|
||||
.collect::<String>()
|
||||
}));
|
||||
snippets.push(PromptCodeSnippet::new(result, &cx));
|
||||
|
||||
// snippets.push(result.buffer.read_with(&cx, |buffer, _| {
|
||||
// buffer
|
||||
// .snapshot()
|
||||
// .text_for_range(result.range)
|
||||
// .collect::<String>()
|
||||
// }));
|
||||
}
|
||||
snippets
|
||||
});
|
||||
|
@ -1,9 +1,7 @@
|
||||
use crate::streaming_diff::{Hunk, StreamingDiff};
|
||||
use ai::completion::{CompletionProvider, OpenAIRequest};
|
||||
use anyhow::Result;
|
||||
use editor::{
|
||||
multi_buffer, Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use editor::{multi_buffer, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{Entity, ModelContext, ModelHandle, Task};
|
||||
use language::{Rope, TransactionId};
|
||||
@ -40,26 +38,11 @@ impl Entity for Codegen {
|
||||
impl Codegen {
|
||||
pub fn new(
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
mut kind: CodegenKind,
|
||||
kind: CodegenKind,
|
||||
provider: Arc<dyn CompletionProvider>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let snapshot = buffer.read(cx).snapshot(cx);
|
||||
match &mut kind {
|
||||
CodegenKind::Transform { range } => {
|
||||
let mut point_range = range.to_point(&snapshot);
|
||||
point_range.start.column = 0;
|
||||
if point_range.end.column > 0 || point_range.start.row == point_range.end.row {
|
||||
point_range.end.column = snapshot.line_len(point_range.end.row);
|
||||
}
|
||||
range.start = snapshot.anchor_before(point_range.start);
|
||||
range.end = snapshot.anchor_after(point_range.end);
|
||||
}
|
||||
CodegenKind::Generate { position } => {
|
||||
*position = position.bias_right(&snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
provider,
|
||||
buffer: buffer.clone(),
|
||||
@ -386,7 +369,7 @@ mod tests {
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 4))..snapshot.anchor_after(Point::new(4, 4))
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
|
||||
});
|
||||
let provider = Arc::new(TestCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
|
@ -1,10 +1,65 @@
|
||||
use crate::codegen::CodegenKind;
|
||||
use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
|
||||
use gpui::{AppContext, AsyncAppContext};
|
||||
use language::{BufferSnapshot, Language, OffsetRangeExt, ToOffset};
|
||||
use semantic_index::SearchResult;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::{self, Reverse};
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tiktoken_rs::ChatCompletionRequestMessage;
|
||||
|
||||
pub struct PromptCodeSnippet {
|
||||
path: Option<PathBuf>,
|
||||
language_name: Option<String>,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl PromptCodeSnippet {
|
||||
pub fn new(search_result: SearchResult, cx: &AsyncAppContext) -> Self {
|
||||
let (content, language_name, file_path) =
|
||||
search_result.buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let content = snapshot
|
||||
.text_for_range(search_result.range.clone())
|
||||
.collect::<String>();
|
||||
|
||||
let language_name = buffer
|
||||
.language()
|
||||
.and_then(|language| Some(language.name().to_string()));
|
||||
|
||||
let language = buffer.language();
|
||||
let file_path = buffer
|
||||
.file()
|
||||
.and_then(|file| Some(file.path().to_path_buf()));
|
||||
|
||||
(content, language_name, file_path)
|
||||
});
|
||||
|
||||
PromptCodeSnippet {
|
||||
path: file_path,
|
||||
language_name,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for PromptCodeSnippet {
|
||||
fn to_string(&self) -> String {
|
||||
let path = self
|
||||
.path
|
||||
.as_ref()
|
||||
.and_then(|path| Some(path.to_string_lossy().to_string()))
|
||||
.unwrap_or("".to_string());
|
||||
let language_name = self.language_name.clone().unwrap_or("".to_string());
|
||||
let content = self.content.clone();
|
||||
|
||||
format!("The below code snippet may be relevant from file: {path}\n```{language_name}\n{content}\n```")
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn summarize(buffer: &BufferSnapshot, selected_range: Range<impl ToOffset>) -> String {
|
||||
#[derive(Debug)]
|
||||
struct Match {
|
||||
@ -121,13 +176,14 @@ pub fn generate_content_prompt(
|
||||
buffer: &BufferSnapshot,
|
||||
range: Range<impl ToOffset>,
|
||||
kind: CodegenKind,
|
||||
search_results: Vec<String>,
|
||||
search_results: Vec<PromptCodeSnippet>,
|
||||
model: &str,
|
||||
) -> String {
|
||||
const MAXIMUM_SNIPPET_TOKEN_COUNT: usize = 500;
|
||||
const RESERVED_TOKENS_FOR_GENERATION: usize = 1000;
|
||||
|
||||
let mut prompts = Vec::new();
|
||||
let range = range.to_offset(buffer);
|
||||
|
||||
// General Preamble
|
||||
if let Some(language_name) = language_name {
|
||||
@ -139,13 +195,26 @@ pub fn generate_content_prompt(
|
||||
// Snippets
|
||||
let mut snippet_position = prompts.len() - 1;
|
||||
|
||||
let outline = summarize(buffer, range);
|
||||
prompts.push("The file you are currently working on has the following outline:".to_string());
|
||||
let mut content = String::new();
|
||||
content.extend(buffer.text_for_range(0..range.start));
|
||||
if range.start == range.end {
|
||||
content.push_str("<|START|>");
|
||||
} else {
|
||||
content.push_str("<|START|");
|
||||
}
|
||||
content.extend(buffer.text_for_range(range.clone()));
|
||||
if range.start != range.end {
|
||||
content.push_str("|END|>");
|
||||
}
|
||||
content.extend(buffer.text_for_range(range.end..buffer.len()));
|
||||
|
||||
prompts.push("The file you are currently working on has the following content:\n".to_string());
|
||||
|
||||
if let Some(language_name) = language_name {
|
||||
let language_name = language_name.to_lowercase();
|
||||
prompts.push(format!("```{language_name}\n{outline}\n```"));
|
||||
prompts.push(format!("```{language_name}\n{content}\n```"));
|
||||
} else {
|
||||
prompts.push(format!("```\n{outline}\n```"));
|
||||
prompts.push(format!("```\n{content}\n```"));
|
||||
}
|
||||
|
||||
match kind {
|
||||
@ -164,17 +233,20 @@ pub fn generate_content_prompt(
|
||||
CodegenKind::Transform { range: _ } => {
|
||||
prompts.push("In particular, the user has selected a section of the text between the '<|START|' and '|END|>' spans.".to_string());
|
||||
prompts.push(format!(
|
||||
"Modify the users code selected text based upon the users prompt: {user_prompt}"
|
||||
"Modify the users code selected text based upon the users prompt: '{user_prompt}'"
|
||||
));
|
||||
prompts.push("You MUST reply with only the adjusted code (within the '<|START|' and '|END|>' spans), not the entire file.".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(language_name) = language_name {
|
||||
prompts.push(format!("Your answer MUST always be valid {language_name}"));
|
||||
prompts.push(format!(
|
||||
"Your answer MUST always and only be valid {language_name}"
|
||||
));
|
||||
}
|
||||
prompts.push("Always wrap your response in a Markdown codeblock".to_string());
|
||||
prompts.push("Never make remarks about the output.".to_string());
|
||||
prompts.push("DO NOT return any text, except the generated code.".to_string());
|
||||
prompts.push("DO NOT wrap your text in a Markdown block".to_string());
|
||||
|
||||
let current_messages = [ChatCompletionRequestMessage {
|
||||
role: "user".to_string(),
|
||||
@ -208,7 +280,8 @@ pub fn generate_content_prompt(
|
||||
|
||||
for search_result in search_results {
|
||||
let mut snippet_prompt = template.to_string();
|
||||
writeln!(snippet_prompt, "```\n{search_result}\n```").unwrap();
|
||||
let snippet = search_result.to_string();
|
||||
writeln!(snippet_prompt, "```\n{snippet}\n```").unwrap();
|
||||
|
||||
let token_count = encoding
|
||||
.encode_with_special_tokens(snippet_prompt.as_str())
|
||||
|
@ -20,7 +20,6 @@ test-support = [
|
||||
|
||||
[dependencies]
|
||||
audio = { path = "../audio" }
|
||||
channel = { path = "../channel" }
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
|
@ -5,7 +5,6 @@ pub mod room;
|
||||
use anyhow::{anyhow, Result};
|
||||
use audio::Audio;
|
||||
use call_settings::CallSettings;
|
||||
use channel::ChannelId;
|
||||
use client::{
|
||||
proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
|
||||
ZED_ALWAYS_ACTIVE,
|
||||
@ -79,7 +78,7 @@ impl ActiveCall {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn channel_id(&self, cx: &AppContext) -> Option<ChannelId> {
|
||||
pub fn channel_id(&self, cx: &AppContext) -> Option<u64> {
|
||||
self.room()?.read(cx).channel_id()
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,7 @@ use live_kit_client::{
|
||||
LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RemoteAudioTrackUpdate,
|
||||
RemoteVideoTrackUpdate,
|
||||
};
|
||||
use postage::stream::Stream;
|
||||
use postage::{sink::Sink, stream::Stream, watch};
|
||||
use project::Project;
|
||||
use std::{future::Future, mem, pin::Pin, sync::Arc, time::Duration};
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
@ -70,6 +70,8 @@ pub struct Room {
|
||||
user_store: ModelHandle<UserStore>,
|
||||
follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec<PeerId>>,
|
||||
subscriptions: Vec<client::Subscription>,
|
||||
room_update_completed_tx: watch::Sender<Option<()>>,
|
||||
room_update_completed_rx: watch::Receiver<Option<()>>,
|
||||
pending_room_update: Option<Task<()>>,
|
||||
maintain_connection: Option<Task<Option<()>>>,
|
||||
}
|
||||
@ -211,6 +213,8 @@ impl Room {
|
||||
|
||||
Audio::play_sound(Sound::Joined, cx);
|
||||
|
||||
let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
|
||||
|
||||
Self {
|
||||
id,
|
||||
channel_id,
|
||||
@ -230,6 +234,8 @@ impl Room {
|
||||
user_store,
|
||||
follows_by_leader_id_project_id: Default::default(),
|
||||
maintain_connection: Some(maintain_connection),
|
||||
room_update_completed_tx,
|
||||
room_update_completed_rx,
|
||||
}
|
||||
}
|
||||
|
||||
@ -599,28 +605,40 @@ impl Room {
|
||||
}
|
||||
|
||||
/// Returns the most 'active' projects, defined as most people in the project
|
||||
pub fn most_active_project(&self) -> Option<(u64, u64)> {
|
||||
let mut projects = HashMap::default();
|
||||
let mut hosts = HashMap::default();
|
||||
pub fn most_active_project(&self, cx: &AppContext) -> Option<(u64, u64)> {
|
||||
let mut project_hosts_and_guest_counts = HashMap::<u64, (Option<u64>, u32)>::default();
|
||||
for participant in self.remote_participants.values() {
|
||||
match participant.location {
|
||||
ParticipantLocation::SharedProject { project_id } => {
|
||||
*projects.entry(project_id).or_insert(0) += 1;
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project_id)
|
||||
.or_default()
|
||||
.1 += 1;
|
||||
}
|
||||
ParticipantLocation::External | ParticipantLocation::UnsharedProject => {}
|
||||
}
|
||||
for project in &participant.projects {
|
||||
*projects.entry(project.id).or_insert(0) += 1;
|
||||
hosts.insert(project.id, participant.user.id);
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project.id)
|
||||
.or_default()
|
||||
.0 = Some(participant.user.id);
|
||||
}
|
||||
}
|
||||
|
||||
let mut pairs: Vec<(u64, usize)> = projects.into_iter().collect();
|
||||
pairs.sort_by_key(|(_, count)| *count as i32);
|
||||
if let Some(user) = self.user_store.read(cx).current_user() {
|
||||
for project in &self.local_participant.projects {
|
||||
project_hosts_and_guest_counts
|
||||
.entry(project.id)
|
||||
.or_default()
|
||||
.0 = Some(user.id);
|
||||
}
|
||||
}
|
||||
|
||||
pairs
|
||||
.first()
|
||||
.map(|(project_id, _)| (*project_id, hosts[&project_id]))
|
||||
project_hosts_and_guest_counts
|
||||
.into_iter()
|
||||
.filter_map(|(id, (host, guest_count))| Some((id, host?, guest_count)))
|
||||
.max_by_key(|(_, _, guest_count)| *guest_count)
|
||||
.map(|(id, host, _)| (id, host))
|
||||
}
|
||||
|
||||
async fn handle_room_updated(
|
||||
@ -686,6 +704,7 @@ impl Room {
|
||||
let Some(peer_id) = participant.peer_id else {
|
||||
continue;
|
||||
};
|
||||
let participant_index = ParticipantIndex(participant.participant_index);
|
||||
this.participant_user_ids.insert(participant.user_id);
|
||||
|
||||
let old_projects = this
|
||||
@ -736,8 +755,9 @@ impl Room {
|
||||
if let Some(remote_participant) =
|
||||
this.remote_participants.get_mut(&participant.user_id)
|
||||
{
|
||||
remote_participant.projects = participant.projects;
|
||||
remote_participant.peer_id = peer_id;
|
||||
remote_participant.projects = participant.projects;
|
||||
remote_participant.participant_index = participant_index;
|
||||
if location != remote_participant.location {
|
||||
remote_participant.location = location;
|
||||
cx.emit(Event::ParticipantLocationChanged {
|
||||
@ -749,9 +769,7 @@ impl Room {
|
||||
participant.user_id,
|
||||
RemoteParticipant {
|
||||
user: user.clone(),
|
||||
participant_index: ParticipantIndex(
|
||||
participant.participant_index,
|
||||
),
|
||||
participant_index,
|
||||
peer_id,
|
||||
projects: participant.projects,
|
||||
location,
|
||||
@ -855,6 +873,7 @@ impl Room {
|
||||
});
|
||||
|
||||
this.check_invariants();
|
||||
this.room_update_completed_tx.try_send(Some(())).ok();
|
||||
cx.notify();
|
||||
});
|
||||
}));
|
||||
@ -863,6 +882,17 @@ impl Room {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn room_update_completed(&mut self) -> impl Future<Output = ()> {
|
||||
let mut done_rx = self.room_update_completed_rx.clone();
|
||||
async move {
|
||||
while let Some(result) = done_rx.next().await {
|
||||
if result.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remote_video_track_updated(
|
||||
&mut self,
|
||||
change: RemoteVideoTrackUpdate,
|
||||
|
@ -2,19 +2,21 @@ mod channel_buffer;
|
||||
mod channel_chat;
|
||||
mod channel_store;
|
||||
|
||||
use client::{Client, UserStore};
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent, ACKNOWLEDGE_DEBOUNCE_INTERVAL};
|
||||
pub use channel_chat::{ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId};
|
||||
pub use channel_store::{
|
||||
Channel, ChannelData, ChannelEvent, ChannelId, ChannelMembership, ChannelPath, ChannelStore,
|
||||
};
|
||||
|
||||
use client::Client;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
|
||||
channel_store::init(client, user_store, cx);
|
||||
channel_buffer::init(client);
|
||||
channel_chat::init(client);
|
||||
}
|
||||
|
@ -99,6 +99,10 @@ impl ChannelBuffer {
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn remote_id(&self, cx: &AppContext) -> u64 {
|
||||
self.buffer.read(cx).remote_id()
|
||||
}
|
||||
|
||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||
&self.user_store
|
||||
}
|
||||
|
@ -2,8 +2,10 @@ mod channel_index;
|
||||
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{Client, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||
use rpc::{
|
||||
@ -14,7 +16,11 @@ use serde_derive::{Deserialize, Serialize};
|
||||
use std::{borrow::Cow, hash::Hash, mem, ops::Deref, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
use self::channel_index::ChannelIndex;
|
||||
pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
|
||||
let channel_store =
|
||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||
cx.set_global(channel_store);
|
||||
}
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
@ -47,6 +53,26 @@ pub struct Channel {
|
||||
pub unseen_message_id: Option<u64>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
pub fn link(&self) -> String {
|
||||
RELEASE_CHANNEL.link_prefix().to_owned()
|
||||
+ "channel/"
|
||||
+ &self.slug()
|
||||
+ "-"
|
||||
+ &self.id.to_string()
|
||||
}
|
||||
|
||||
pub fn slug(&self) -> String {
|
||||
let slug: String = self
|
||||
.name
|
||||
.chars()
|
||||
.map(|c| if c.is_alphanumeric() { c } else { '-' })
|
||||
.collect();
|
||||
|
||||
slug.trim_matches(|c| c == '-').to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)]
|
||||
pub struct ChannelPath(Arc<[ChannelId]>);
|
||||
|
||||
@ -71,6 +97,10 @@ enum OpenedModelHandle<E: Entity> {
|
||||
}
|
||||
|
||||
impl ChannelStore {
|
||||
pub fn global(cx: &AppContext) -> ModelHandle<Self> {
|
||||
cx.global::<ModelHandle<Self>>().clone()
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
@ -84,12 +114,21 @@ impl ChannelStore {
|
||||
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some(status) = connection_status.next().await {
|
||||
let this = this.upgrade(&cx)?;
|
||||
match status {
|
||||
client::Status::Connected { .. } => {
|
||||
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||
.await
|
||||
.log_err()?;
|
||||
}
|
||||
client::Status::SignedOut | client::Status::UpgradeRequired => {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx));
|
||||
}
|
||||
_ => {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx));
|
||||
}
|
||||
}
|
||||
if status.is_connected() {
|
||||
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||
.await
|
||||
.log_err()?;
|
||||
} else {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(cx));
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
@ -793,7 +832,7 @@ impl ChannelStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut ModelContext<Self>) {
|
||||
self.channel_index.clear();
|
||||
self.channel_invitations.clear();
|
||||
self.channel_participants.clear();
|
||||
@ -804,7 +843,10 @@ impl ChannelStore {
|
||||
|
||||
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||
if wait_for_reconnect {
|
||||
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||
}
|
||||
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for (_, buffer) in this.opened_buffers.drain() {
|
||||
|
@ -340,10 +340,10 @@ fn init_test(cx: &mut AppContext) -> ModelHandle<ChannelStore> {
|
||||
|
||||
cx.foreground().forbid_parking();
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
crate::init(&client);
|
||||
client::init(&client, cx);
|
||||
crate::init(&client, user_store, cx);
|
||||
|
||||
cx.add_model(|cx| ChannelStore::new(client, user_store, cx))
|
||||
ChannelStore::global(cx)
|
||||
}
|
||||
|
||||
fn update_channels(
|
||||
|
@ -182,6 +182,7 @@ impl Bundle {
|
||||
kCFStringEncodingUTF8,
|
||||
ptr::null(),
|
||||
));
|
||||
// equivalent to: open zed-cli:... -a /Applications/Zed\ Preview.app
|
||||
let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]);
|
||||
LSOpenFromURLSpec(
|
||||
&LSLaunchURLSpec {
|
||||
|
@ -70,7 +70,7 @@ pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
|
||||
pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(100);
|
||||
pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
actions!(client, [SignIn, SignOut]);
|
||||
actions!(client, [SignIn, SignOut, Reconnect]);
|
||||
|
||||
pub fn init_settings(cx: &mut AppContext) {
|
||||
settings::register::<TelemetrySettings>(cx);
|
||||
@ -102,6 +102,17 @@ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
|
||||
}
|
||||
}
|
||||
});
|
||||
cx.add_global_action({
|
||||
let client = client.clone();
|
||||
move |_: &Reconnect, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(|cx| async move {
|
||||
client.reconnect(&cx);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub struct Client {
|
||||
@ -1212,6 +1223,11 @@ impl Client {
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
}
|
||||
|
||||
pub fn reconnect(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.peer.teardown();
|
||||
self.set_status(Status::ConnectionLost, cx);
|
||||
}
|
||||
|
||||
fn connection_id(&self) -> Result<ConnectionId> {
|
||||
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
|
||||
Ok(connection_id)
|
||||
|
@ -8,7 +8,6 @@ use sysinfo::{Pid, PidExt, ProcessExt, System, SystemExt};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::http::HttpClient;
|
||||
use util::{channel::ReleaseChannel, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct Telemetry {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
@ -20,7 +19,7 @@ pub struct Telemetry {
|
||||
struct TelemetryState {
|
||||
metrics_id: Option<Arc<str>>, // Per logged-in user
|
||||
installation_id: Option<Arc<str>>, // Per app installation (different for dev, preview, and stable)
|
||||
session_id: String, // Per app launch
|
||||
session_id: Option<Arc<str>>, // Per app launch
|
||||
app_version: Option<Arc<str>>,
|
||||
release_channel: Option<&'static str>,
|
||||
os_name: &'static str,
|
||||
@ -43,7 +42,7 @@ lazy_static! {
|
||||
struct ClickhouseEventRequestBody {
|
||||
token: &'static str,
|
||||
installation_id: Option<Arc<str>>,
|
||||
session_id: String,
|
||||
session_id: Option<Arc<str>>,
|
||||
is_staff: Option<bool>,
|
||||
app_version: Option<Arc<str>>,
|
||||
os_name: &'static str,
|
||||
@ -134,7 +133,7 @@ impl Telemetry {
|
||||
release_channel,
|
||||
installation_id: None,
|
||||
metrics_id: None,
|
||||
session_id: Uuid::new_v4().to_string(),
|
||||
session_id: None,
|
||||
clickhouse_events_queue: Default::default(),
|
||||
flush_clickhouse_events_task: Default::default(),
|
||||
log_file: None,
|
||||
@ -149,9 +148,15 @@ impl Telemetry {
|
||||
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
|
||||
}
|
||||
|
||||
pub fn start(self: &Arc<Self>, installation_id: Option<String>, cx: &mut AppContext) {
|
||||
pub fn start(
|
||||
self: &Arc<Self>,
|
||||
installation_id: Option<String>,
|
||||
session_id: String,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let mut state = self.state.lock();
|
||||
state.installation_id = installation_id.map(|id| id.into());
|
||||
state.session_id = Some(session_id.into());
|
||||
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
|
||||
drop(state);
|
||||
|
||||
@ -283,23 +288,21 @@ impl Telemetry {
|
||||
|
||||
{
|
||||
let state = this.state.lock();
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(
|
||||
&mut json_bytes,
|
||||
&ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name,
|
||||
os_version: state.os_version.clone(),
|
||||
architecture: state.architecture,
|
||||
let request_body = ClickhouseEventRequestBody {
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
installation_id: state.installation_id.clone(),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff.clone(),
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name,
|
||||
os_version: state.os_version.clone(),
|
||||
architecture: state.architecture,
|
||||
|
||||
release_channel: state.release_channel,
|
||||
events,
|
||||
},
|
||||
)?;
|
||||
release_channel: state.release_channel,
|
||||
events,
|
||||
};
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &request_body)?;
|
||||
}
|
||||
|
||||
this.http_client
|
||||
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.23.2"
|
||||
version = "0.24.0"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
@ -72,6 +72,7 @@ fs = { path = "../fs", features = ["test-support"] }
|
||||
git = { path = "../git", features = ["test-support"] }
|
||||
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
|
||||
lsp = { path = "../lsp", features = ["test-support"] }
|
||||
node_runtime = { path = "../node_runtime" }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
|
@ -37,8 +37,10 @@ CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
|
||||
CREATE TABLE "rooms" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"live_kit_room" VARCHAR NOT NULL,
|
||||
"enviroment" VARCHAR,
|
||||
"channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
|
||||
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
@ -0,0 +1 @@
|
||||
ALTER TABLE rooms ADD COLUMN enviroment TEXT;
|
@ -0,0 +1 @@
|
||||
CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
|
@ -19,21 +19,14 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_root_channel(
|
||||
&self,
|
||||
name: &str,
|
||||
live_kit_room: &str,
|
||||
creator_id: UserId,
|
||||
) -> Result<ChannelId> {
|
||||
self.create_channel(name, None, live_kit_room, creator_id)
|
||||
.await
|
||||
pub async fn create_root_channel(&self, name: &str, creator_id: UserId) -> Result<ChannelId> {
|
||||
self.create_channel(name, None, creator_id).await
|
||||
}
|
||||
|
||||
pub async fn create_channel(
|
||||
&self,
|
||||
name: &str,
|
||||
parent: Option<ChannelId>,
|
||||
live_kit_room: &str,
|
||||
creator_id: UserId,
|
||||
) -> Result<ChannelId> {
|
||||
let name = Self::sanitize_channel_name(name)?;
|
||||
@ -90,14 +83,6 @@ impl Database {
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
room::ActiveModel {
|
||||
channel_id: ActiveValue::Set(Some(channel.id)),
|
||||
live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(channel.id)
|
||||
})
|
||||
.await
|
||||
@ -797,18 +782,36 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn room_id_for_channel(&self, channel_id: ChannelId) -> Result<RoomId> {
|
||||
pub async fn get_or_create_channel_room(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
live_kit_room: &str,
|
||||
enviroment: &str,
|
||||
) -> Result<RoomId> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
let room = channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(room::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("invalid channel"))?;
|
||||
Ok(room.id)
|
||||
|
||||
let room = room::Entity::find()
|
||||
.filter(room::Column::ChannelId.eq(channel_id))
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let room_id = if let Some(room) = room {
|
||||
room.id
|
||||
} else {
|
||||
let result = room::Entity::insert(room::ActiveModel {
|
||||
channel_id: ActiveValue::Set(Some(channel_id)),
|
||||
live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
|
||||
enviroment: ActiveValue::Set(Some(enviroment.to_string())),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
result.last_insert_id
|
||||
};
|
||||
|
||||
Ok(room_id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ impl Database {
|
||||
|
||||
let mut rows = channel_message::Entity::find()
|
||||
.filter(condition)
|
||||
.order_by_asc(channel_message::Column::Id)
|
||||
.order_by_desc(channel_message::Column::Id)
|
||||
.limit(count as u64)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
@ -110,6 +110,7 @@ impl Database {
|
||||
});
|
||||
}
|
||||
drop(rows);
|
||||
messages.reverse();
|
||||
Ok(messages)
|
||||
})
|
||||
.await
|
||||
|
@ -107,10 +107,12 @@ impl Database {
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
live_kit_room: &str,
|
||||
release_channel: &str,
|
||||
) -> Result<proto::Room> {
|
||||
self.transaction(|tx| async move {
|
||||
let room = room::ActiveModel {
|
||||
live_kit_room: ActiveValue::set(live_kit_room.into()),
|
||||
enviroment: ActiveValue::set(Some(release_channel.to_string())),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
@ -270,20 +272,31 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
enviroment: &str,
|
||||
) -> Result<RoomGuard<JoinRoom>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelId {
|
||||
enum QueryChannelIdAndEnviroment {
|
||||
ChannelId,
|
||||
Enviroment,
|
||||
}
|
||||
|
||||
let (channel_id, release_channel): (Option<ChannelId>, Option<String>) =
|
||||
room::Entity::find()
|
||||
.select_only()
|
||||
.column(room::Column::ChannelId)
|
||||
.column(room::Column::Enviroment)
|
||||
.filter(room::Column::Id.eq(room_id))
|
||||
.into_values::<_, QueryChannelIdAndEnviroment>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
if let Some(release_channel) = release_channel {
|
||||
if &release_channel != enviroment {
|
||||
Err(anyhow!("must join using the {} release", release_channel))?;
|
||||
}
|
||||
}
|
||||
let channel_id: Option<ChannelId> = room::Entity::find()
|
||||
.select_only()
|
||||
.column(room::Column::ChannelId)
|
||||
.filter(room::Column::Id.eq(room_id))
|
||||
.into_values::<_, QueryChannelId>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryParticipantIndices {
|
||||
@ -300,6 +313,7 @@ impl Database {
|
||||
.into_values::<_, QueryParticipantIndices>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut participant_index = 0;
|
||||
while existing_participant_indices.contains(&participant_index) {
|
||||
participant_index += 1;
|
||||
@ -818,10 +832,7 @@ impl Database {
|
||||
|
||||
let (channel_id, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let deleted = if room.participants.is_empty() {
|
||||
let result = room::Entity::delete_by_id(room_id)
|
||||
.filter(room::Column::ChannelId.is_null())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
|
||||
result.rows_affected > 0
|
||||
} else {
|
||||
false
|
||||
|
@ -8,6 +8,7 @@ pub struct Model {
|
||||
pub id: RoomId,
|
||||
pub live_kit_room: String,
|
||||
pub channel_id: Option<ChannelId>,
|
||||
pub enviroment: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
@ -12,6 +12,8 @@ use sea_orm::ConnectionTrait;
|
||||
use sqlx::migrate::MigrateDatabase;
|
||||
use std::sync::Arc;
|
||||
|
||||
const TEST_RELEASE_CHANNEL: &'static str = "test";
|
||||
|
||||
pub struct TestDb {
|
||||
pub db: Option<Arc<Database>>,
|
||||
pub connection: Option<sqlx::AnyConnection>,
|
||||
|
@ -54,7 +54,7 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
|
||||
let owner_id = db.create_server("production").await.unwrap().0 as u32;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
db.invite_channel_member(zed_id, b_id, a_id, false)
|
||||
.await
|
||||
@ -141,7 +141,7 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
|
||||
assert_eq!(left_buffer.connections, &[connection_id_a],);
|
||||
|
||||
let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap();
|
||||
let cargo_id = db.create_root_channel("cargo", a_id).await.unwrap();
|
||||
let _ = db
|
||||
.join_channel_buffer(cargo_id, a_id, connection_id_a)
|
||||
.await
|
||||
@ -207,7 +207,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
|
||||
let mut text_buffers = Vec::new();
|
||||
for i in 0..3 {
|
||||
let channel = db
|
||||
.create_root_channel(&format!("channel-{i}"), &format!("room-{i}"), user_id)
|
||||
.create_root_channel(&format!("channel-{i}"), user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -5,7 +5,11 @@ use rpc::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
db::{queries::channels::ChannelGraph, tests::graph, ChannelId, Database, NewUserParams},
|
||||
db::{
|
||||
queries::channels::ChannelGraph,
|
||||
tests::{graph, TEST_RELEASE_CHANNEL},
|
||||
ChannelId, Database, NewUserParams,
|
||||
},
|
||||
test_both_dbs,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
@ -41,7 +45,7 @@ async fn test_channels(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
// Make sure that people cannot read channels they haven't been invited to
|
||||
assert!(db.get_channel(zed_id, b_id).await.unwrap().is_none());
|
||||
@ -54,16 +58,13 @@ async fn test_channels(db: &Arc<Database>) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let crdb_id = db
|
||||
.create_channel("crdb", Some(zed_id), "2", a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let crdb_id = db.create_channel("crdb", Some(zed_id), a_id).await.unwrap();
|
||||
let livestreaming_id = db
|
||||
.create_channel("livestreaming", Some(zed_id), "3", a_id)
|
||||
.create_channel("livestreaming", Some(zed_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let replace_id = db
|
||||
.create_channel("replace", Some(zed_id), "4", a_id)
|
||||
.create_channel("replace", Some(zed_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -71,14 +72,14 @@ async fn test_channels(db: &Arc<Database>) {
|
||||
members.sort();
|
||||
assert_eq!(members, &[a_id, b_id]);
|
||||
|
||||
let rust_id = db.create_root_channel("rust", "5", a_id).await.unwrap();
|
||||
let rust_id = db.create_root_channel("rust", a_id).await.unwrap();
|
||||
let cargo_id = db
|
||||
.create_channel("cargo", Some(rust_id), "6", a_id)
|
||||
.create_channel("cargo", Some(rust_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let cargo_ra_id = db
|
||||
.create_channel("cargo-ra", Some(cargo_id), "7", a_id)
|
||||
.create_channel("cargo-ra", Some(cargo_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -198,15 +199,20 @@ async fn test_joining_channels(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let channel_1 = db
|
||||
.create_root_channel("channel_1", "1", user_1)
|
||||
let channel_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
|
||||
let room_1 = db
|
||||
.get_or_create_channel_room(channel_1, "1", TEST_RELEASE_CHANNEL)
|
||||
.await
|
||||
.unwrap();
|
||||
let room_1 = db.room_id_for_channel(channel_1).await.unwrap();
|
||||
|
||||
// can join a room with membership to its channel
|
||||
let joined_room = db
|
||||
.join_room(room_1, user_1, ConnectionId { owner_id, id: 1 })
|
||||
.join_room(
|
||||
room_1,
|
||||
user_1,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
TEST_RELEASE_CHANNEL,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(joined_room.room.participants.len(), 1);
|
||||
@ -214,7 +220,12 @@ async fn test_joining_channels(db: &Arc<Database>) {
|
||||
drop(joined_room);
|
||||
// cannot join a room without membership to its channel
|
||||
assert!(db
|
||||
.join_room(room_1, user_2, ConnectionId { owner_id, id: 1 })
|
||||
.join_room(
|
||||
room_1,
|
||||
user_2,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
TEST_RELEASE_CHANNEL
|
||||
)
|
||||
.await
|
||||
.is_err());
|
||||
}
|
||||
@ -269,15 +280,9 @@ async fn test_channel_invites(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let channel_1_1 = db
|
||||
.create_root_channel("channel_1", "1", user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_1_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
|
||||
|
||||
let channel_1_2 = db
|
||||
.create_root_channel("channel_2", "2", user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_1_2 = db.create_root_channel("channel_2", user_1).await.unwrap();
|
||||
|
||||
db.invite_channel_member(channel_1_1, user_2, user_1, false)
|
||||
.await
|
||||
@ -339,7 +344,7 @@ async fn test_channel_invites(db: &Arc<Database>) {
|
||||
.unwrap();
|
||||
|
||||
let channel_1_3 = db
|
||||
.create_channel("channel_3", Some(channel_1_1), "1", user_1)
|
||||
.create_channel("channel_3", Some(channel_1_1), user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -401,7 +406,7 @@ async fn test_channel_renames(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", user_1).await.unwrap();
|
||||
let zed_id = db.create_root_channel("zed", user_1).await.unwrap();
|
||||
|
||||
db.rename_channel(zed_id, user_1, "#zed-archive")
|
||||
.await
|
||||
@ -446,25 +451,22 @@ async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
let crdb_id = db
|
||||
.create_channel("crdb", Some(zed_id), "2", a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let crdb_id = db.create_channel("crdb", Some(zed_id), a_id).await.unwrap();
|
||||
|
||||
let gpui2_id = db
|
||||
.create_channel("gpui2", Some(zed_id), "3", a_id)
|
||||
.create_channel("gpui2", Some(zed_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_id = db
|
||||
.create_channel("livestreaming", Some(crdb_id), "4", a_id)
|
||||
.create_channel("livestreaming", Some(crdb_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_dag_id = db
|
||||
.create_channel("livestreaming_dag", Some(livestreaming_id), "5", a_id)
|
||||
.create_channel("livestreaming_dag", Some(livestreaming_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -517,12 +519,7 @@ async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
// ========================================================================
|
||||
// Create a new channel below a channel with multiple parents
|
||||
let livestreaming_dag_sub_id = db
|
||||
.create_channel(
|
||||
"livestreaming_dag_sub",
|
||||
Some(livestreaming_dag_id),
|
||||
"6",
|
||||
a_id,
|
||||
)
|
||||
.create_channel("livestreaming_dag_sub", Some(livestreaming_dag_id), a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -812,15 +809,15 @@ async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", user_id).await.unwrap();
|
||||
let zed_id = db.create_root_channel("zed", user_id).await.unwrap();
|
||||
|
||||
let projects_id = db
|
||||
.create_channel("projects", Some(zed_id), "2", user_id)
|
||||
.create_channel("projects", Some(zed_id), user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_id = db
|
||||
.create_channel("livestreaming", Some(projects_id), "3", user_id)
|
||||
.create_channel("livestreaming", Some(projects_id), user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -479,7 +479,7 @@ async fn test_project_count(db: &Arc<Database>) {
|
||||
.unwrap();
|
||||
|
||||
let room_id = RoomId::from_proto(
|
||||
db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "")
|
||||
db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "", "dev")
|
||||
.await
|
||||
.unwrap()
|
||||
.id,
|
||||
@ -493,9 +493,14 @@ async fn test_project_count(db: &Arc<Database>) {
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 })
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_room(
|
||||
room_id,
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"dev",
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
|
||||
@ -575,6 +580,85 @@ async fn test_fuzzy_search_users() {
|
||||
}
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_non_matching_release_channels,
|
||||
test_non_matching_release_channels_postgres,
|
||||
test_non_matching_release_channels_sqlite
|
||||
);
|
||||
|
||||
async fn test_non_matching_release_channels(db: &Arc<Database>) {
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
|
||||
let user1 = db
|
||||
.create_user(
|
||||
&format!("admin@example.com"),
|
||||
true,
|
||||
NewUserParams {
|
||||
github_login: "admin".into(),
|
||||
github_user_id: 0,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user2 = db
|
||||
.create_user(
|
||||
&format!("user@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 1,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let room = db
|
||||
.create_room(
|
||||
user1.user_id,
|
||||
ConnectionId { owner_id, id: 0 },
|
||||
"",
|
||||
"stable",
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.call(
|
||||
RoomId::from_proto(room.id),
|
||||
user1.user_id,
|
||||
ConnectionId { owner_id, id: 0 },
|
||||
user2.user_id,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User attempts to join from preview
|
||||
let result = db
|
||||
.join_room(
|
||||
RoomId::from_proto(room.id),
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"preview",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
|
||||
// User switches to stable
|
||||
let result = db
|
||||
.join_room(
|
||||
RoomId::from_proto(room.id),
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"stable",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok())
|
||||
}
|
||||
|
||||
fn build_background_executor() -> Arc<Background> {
|
||||
Deterministic::new(0).build_background()
|
||||
}
|
||||
|
@ -1,10 +1,72 @@
|
||||
use crate::{
|
||||
db::{Database, NewUserParams},
|
||||
db::{Database, MessageId, NewUserParams},
|
||||
test_both_dbs,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_message_retrieval,
|
||||
test_channel_message_retrieval_postgres,
|
||||
test_channel_message_retrieval_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_message_retrieval(db: &Arc<Database>) {
|
||||
let user = db
|
||||
.create_user(
|
||||
"user@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 1,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let channel = db.create_channel("channel", None, user).await.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 0 }, user)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut all_messages = Vec::new();
|
||||
for i in 0..10 {
|
||||
all_messages.push(
|
||||
db.create_channel_message(channel, user, &i.to_string(), OffsetDateTime::now_utc(), i)
|
||||
.await
|
||||
.unwrap()
|
||||
.0
|
||||
.to_proto(),
|
||||
);
|
||||
}
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(channel, user, 3, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(messages, &all_messages[7..10]);
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(
|
||||
channel,
|
||||
user,
|
||||
4,
|
||||
Some(MessageId::from_proto(all_messages[6])),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(messages, &all_messages[2..6]);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_message_nonces,
|
||||
test_channel_message_nonces_postgres,
|
||||
@ -25,10 +87,7 @@ async fn test_channel_message_nonces(db: &Arc<Database>) {
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let channel = db
|
||||
.create_channel("channel", None, "room", user)
|
||||
.await
|
||||
.unwrap();
|
||||
let channel = db.create_channel("channel", None, user).await.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
|
||||
@ -92,15 +151,9 @@ async fn test_channel_message_new_notification(db: &Arc<Database>) {
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let channel_1 = db
|
||||
.create_channel("channel", None, "room", user)
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_1 = db.create_channel("channel", None, user).await.unwrap();
|
||||
|
||||
let channel_2 = db
|
||||
.create_channel("channel-2", None, "room", user)
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_2 = db.create_channel("channel-2", None, user).await.unwrap();
|
||||
|
||||
db.invite_channel_member(channel_1, observer, user, false)
|
||||
.await
|
||||
|
@ -63,6 +63,7 @@ use time::OffsetDateTime;
|
||||
use tokio::sync::{watch, Semaphore};
|
||||
use tower::ServiceBuilder;
|
||||
use tracing::{info_span, instrument, Instrument};
|
||||
use util::channel::RELEASE_CHANNEL_NAME;
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
pub const CLEANUP_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
@ -224,6 +225,7 @@ impl Server {
|
||||
.add_request_handler(forward_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_project_request::<proto::GetCompletions>)
|
||||
.add_request_handler(forward_project_request::<proto::ApplyCompletionAdditionalEdits>)
|
||||
.add_request_handler(forward_project_request::<proto::ResolveCompletionDocumentation>)
|
||||
.add_request_handler(forward_project_request::<proto::GetCodeActions>)
|
||||
.add_request_handler(forward_project_request::<proto::ApplyCodeAction>)
|
||||
.add_request_handler(forward_project_request::<proto::PrepareRename>)
|
||||
@ -937,11 +939,6 @@ async fn create_room(
|
||||
util::async_iife!({
|
||||
let live_kit = live_kit?;
|
||||
|
||||
live_kit
|
||||
.create_room(live_kit_room.clone())
|
||||
.await
|
||||
.trace_err()?;
|
||||
|
||||
let token = live_kit
|
||||
.room_token(&live_kit_room, &session.user_id.to_string())
|
||||
.trace_err()?;
|
||||
@ -957,7 +954,12 @@ async fn create_room(
|
||||
let room = session
|
||||
.db()
|
||||
.await
|
||||
.create_room(session.user_id, session.connection_id, &live_kit_room)
|
||||
.create_room(
|
||||
session.user_id,
|
||||
session.connection_id,
|
||||
&live_kit_room,
|
||||
RELEASE_CHANNEL_NAME.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
response.send(proto::CreateRoomResponse {
|
||||
@ -979,7 +981,12 @@ async fn join_room(
|
||||
let room = session
|
||||
.db()
|
||||
.await
|
||||
.join_room(room_id, session.user_id, session.connection_id)
|
||||
.join_room(
|
||||
room_id,
|
||||
session.user_id,
|
||||
session.connection_id,
|
||||
RELEASE_CHANNEL_NAME.as_str(),
|
||||
)
|
||||
.await?;
|
||||
room_updated(&room.room, &session.peer);
|
||||
room.into_inner()
|
||||
@ -2195,15 +2202,10 @@ async fn create_channel(
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
let live_kit_room = format!("channel-{}", nanoid::nanoid!(30));
|
||||
|
||||
if let Some(live_kit) = session.live_kit_client.as_ref() {
|
||||
live_kit.create_room(live_kit_room.clone()).await?;
|
||||
}
|
||||
|
||||
let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id));
|
||||
let id = db
|
||||
.create_channel(&request.name, parent_id, &live_kit_room, session.user_id)
|
||||
.create_channel(&request.name, parent_id, session.user_id)
|
||||
.await?;
|
||||
|
||||
let channel = proto::Channel {
|
||||
@ -2608,15 +2610,23 @@ async fn join_channel(
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
let live_kit_room = format!("channel-{}", nanoid::nanoid!(30));
|
||||
|
||||
let joined_room = {
|
||||
leave_room_for_session(&session).await?;
|
||||
let db = session.db().await;
|
||||
|
||||
let room_id = db.room_id_for_channel(channel_id).await?;
|
||||
let room_id = db
|
||||
.get_or_create_channel_room(channel_id, &live_kit_room, &*RELEASE_CHANNEL_NAME)
|
||||
.await?;
|
||||
|
||||
let joined_room = db
|
||||
.join_room(room_id, session.user_id, session.connection_id)
|
||||
.join_room(
|
||||
room_id,
|
||||
session.user_id,
|
||||
session.connection_id,
|
||||
RELEASE_CHANNEL_NAME.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let live_kit_connection_info = session.live_kit_client.as_ref().and_then(|live_kit| {
|
||||
|
@ -380,6 +380,8 @@ async fn test_channel_room(
|
||||
|
||||
// Give everyone a chance to observe user A joining
|
||||
deterministic.run_until_parked();
|
||||
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
|
||||
room_a.read_with(cx_a, |room, _| assert!(room.is_connected()));
|
||||
|
||||
client_a.channel_store().read_with(cx_a, |channels, _| {
|
||||
assert_participants_eq(
|
||||
|
@ -184,20 +184,12 @@ async fn test_basic_following(
|
||||
|
||||
// All clients see that clients B and C are following client A.
|
||||
cx_c.foreground().run_until_parked();
|
||||
for (name, active_call, cx) in [
|
||||
("A", &active_call_a, &cx_a),
|
||||
("B", &active_call_b, &cx_b),
|
||||
("C", &active_call_c, &cx_c),
|
||||
("D", &active_call_d, &cx_d),
|
||||
] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_a, project_id),
|
||||
&[peer_id_b, peer_id_c],
|
||||
"checking followers for A as {name}"
|
||||
);
|
||||
});
|
||||
for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
assert_eq!(
|
||||
followers_by_leader(project_id, cx),
|
||||
&[(peer_id_a, vec![peer_id_b, peer_id_c])],
|
||||
"followers seen by {name}"
|
||||
);
|
||||
}
|
||||
|
||||
// Client C unfollows client A.
|
||||
@ -207,46 +199,39 @@ async fn test_basic_following(
|
||||
|
||||
// All clients see that clients B is following client A.
|
||||
cx_c.foreground().run_until_parked();
|
||||
for (name, active_call, cx) in [
|
||||
("A", &active_call_a, &cx_a),
|
||||
("B", &active_call_b, &cx_b),
|
||||
("C", &active_call_c, &cx_c),
|
||||
("D", &active_call_d, &cx_d),
|
||||
] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_a, project_id),
|
||||
&[peer_id_b],
|
||||
"checking followers for A as {name}"
|
||||
);
|
||||
});
|
||||
for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
assert_eq!(
|
||||
followers_by_leader(project_id, cx),
|
||||
&[(peer_id_a, vec![peer_id_b])],
|
||||
"followers seen by {name}"
|
||||
);
|
||||
}
|
||||
|
||||
// Client C re-follows client A.
|
||||
workspace_c.update(cx_c, |workspace, cx| {
|
||||
workspace.follow(peer_id_a, cx);
|
||||
});
|
||||
workspace_c
|
||||
.update(cx_c, |workspace, cx| {
|
||||
workspace.follow(peer_id_a, cx).unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// All clients see that clients B and C are following client A.
|
||||
cx_c.foreground().run_until_parked();
|
||||
for (name, active_call, cx) in [
|
||||
("A", &active_call_a, &cx_a),
|
||||
("B", &active_call_b, &cx_b),
|
||||
("C", &active_call_c, &cx_c),
|
||||
("D", &active_call_d, &cx_d),
|
||||
] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_a, project_id),
|
||||
&[peer_id_b, peer_id_c],
|
||||
"checking followers for A as {name}"
|
||||
);
|
||||
});
|
||||
for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
assert_eq!(
|
||||
followers_by_leader(project_id, cx),
|
||||
&[(peer_id_a, vec![peer_id_b, peer_id_c])],
|
||||
"followers seen by {name}"
|
||||
);
|
||||
}
|
||||
|
||||
// Client D follows client C.
|
||||
// Client D follows client B, then switches to following client C.
|
||||
workspace_d
|
||||
.update(cx_d, |workspace, cx| {
|
||||
workspace.follow(peer_id_b, cx).unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
workspace_d
|
||||
.update(cx_d, |workspace, cx| {
|
||||
workspace.follow(peer_id_c, cx).unwrap()
|
||||
@ -256,20 +241,15 @@ async fn test_basic_following(
|
||||
|
||||
// All clients see that D is following C
|
||||
cx_d.foreground().run_until_parked();
|
||||
for (name, active_call, cx) in [
|
||||
("A", &active_call_a, &cx_a),
|
||||
("B", &active_call_b, &cx_b),
|
||||
("C", &active_call_c, &cx_c),
|
||||
("D", &active_call_d, &cx_d),
|
||||
] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_c, project_id),
|
||||
&[peer_id_d],
|
||||
"checking followers for C as {name}"
|
||||
);
|
||||
});
|
||||
for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
assert_eq!(
|
||||
followers_by_leader(project_id, cx),
|
||||
&[
|
||||
(peer_id_a, vec![peer_id_b, peer_id_c]),
|
||||
(peer_id_c, vec![peer_id_d])
|
||||
],
|
||||
"followers seen by {name}"
|
||||
);
|
||||
}
|
||||
|
||||
// Client C closes the project.
|
||||
@ -278,32 +258,12 @@ async fn test_basic_following(
|
||||
|
||||
// Clients A and B see that client B is following A, and client C is not present in the followers.
|
||||
cx_c.foreground().run_until_parked();
|
||||
for (name, active_call, cx) in [("A", &active_call_a, &cx_a), ("B", &active_call_b, &cx_b)] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_a, project_id),
|
||||
&[peer_id_b],
|
||||
"checking followers for A as {name}"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// All clients see that no-one is following C
|
||||
for (name, active_call, cx) in [
|
||||
("A", &active_call_a, &cx_a),
|
||||
("B", &active_call_b, &cx_b),
|
||||
("C", &active_call_c, &cx_c),
|
||||
("D", &active_call_d, &cx_d),
|
||||
] {
|
||||
active_call.read_with(*cx, |call, cx| {
|
||||
let room = call.room().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
room.followers_for(peer_id_c, project_id),
|
||||
&[],
|
||||
"checking followers for C as {name}"
|
||||
);
|
||||
});
|
||||
for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
assert_eq!(
|
||||
followers_by_leader(project_id, cx),
|
||||
&[(peer_id_a, vec![peer_id_b]),],
|
||||
"followers seen by {name}"
|
||||
);
|
||||
}
|
||||
|
||||
// When client A activates a different editor, client B does so as well.
|
||||
@ -1667,6 +1627,30 @@ struct PaneSummary {
|
||||
items: Vec<(bool, String)>,
|
||||
}
|
||||
|
||||
fn followers_by_leader(project_id: u64, cx: &TestAppContext) -> Vec<(PeerId, Vec<PeerId>)> {
|
||||
cx.read(|cx| {
|
||||
let active_call = ActiveCall::global(cx).read(cx);
|
||||
let peer_id = active_call.client().peer_id();
|
||||
let room = active_call.room().unwrap().read(cx);
|
||||
let mut result = room
|
||||
.remote_participants()
|
||||
.values()
|
||||
.map(|participant| participant.peer_id)
|
||||
.chain(peer_id)
|
||||
.filter_map(|peer_id| {
|
||||
let followers = room.followers_for(peer_id, project_id);
|
||||
if followers.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((peer_id, followers.to_vec()))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
result.sort_by_key(|e| e.0);
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> {
|
||||
workspace.read_with(cx, |workspace, cx| {
|
||||
let active_pane = workspace.active_pane();
|
||||
|
@ -15,12 +15,14 @@ use gpui::{executor::Deterministic, test::EmptyView, AppContext, ModelHandle, Te
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, Formatter, InlayHintSettings},
|
||||
tree_sitter_rust, Anchor, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language,
|
||||
LanguageConfig, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
tree_sitter_rust, Anchor, BundledFormatter, Diagnostic, DiagnosticEntry, FakeLspAdapter,
|
||||
Language, LanguageConfig, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
use project::{search::SearchQuery, DiagnosticSummary, HoverBlockKind, Project, ProjectPath};
|
||||
use project::{
|
||||
search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
@ -4407,8 +4409,6 @@ async fn test_formatting_buffer(
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
use project::FormatTrigger;
|
||||
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
@ -4511,6 +4511,134 @@ async fn test_formatting_buffer(
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_prettier_formatting_buffer(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
// Set up a fake language server.
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
let test_plugin = "test_plugin";
|
||||
let mut fake_language_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
enabled_formatters: vec![BundledFormatter::Prettier {
|
||||
parser_name: Some("test_parser"),
|
||||
plugin_names: vec![test_plugin],
|
||||
}],
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
let language = Arc::new(language);
|
||||
client_a.language_registry().add(Arc::clone(&language));
|
||||
|
||||
// Here we insert a fake tree with a directory that exists on disk. This is needed
|
||||
// because later we'll invoke a command, which requires passing a working directory
|
||||
// that points to a valid location on disk.
|
||||
let directory = env::current_dir().unwrap();
|
||||
let buffer_text = "let one = \"two\"";
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(&directory, json!({ "a.rs": buffer_text }))
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await;
|
||||
let prettier_format_suffix = project_a.update(cx_a, |project, _| {
|
||||
let suffix = project.enable_test_prettier(&[test_plugin]);
|
||||
project.languages().add(language);
|
||||
suffix
|
||||
});
|
||||
let buffer_a = cx_a
|
||||
.background()
|
||||
.spawn(project_a.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let buffer_b = cx_b
|
||||
.background()
|
||||
.spawn(project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_a.update(|cx| {
|
||||
cx.update_global(|store: &mut SettingsStore, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(Formatter::Auto);
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
cx.update_global(|store: &mut SettingsStore, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(Formatter::LanguageServer);
|
||||
});
|
||||
});
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
|
||||
panic!(
|
||||
"Unexpected: prettier should be preferred since it's enabled and language supports it"
|
||||
)
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx_a.foreground().run_until_parked();
|
||||
cx_b.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after client's request"
|
||||
);
|
||||
|
||||
project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_a.clone()]),
|
||||
true,
|
||||
FormatTrigger::Manual,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx_a.foreground().run_until_parked();
|
||||
cx_b.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after host's request"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_definition(
|
||||
deterministic: Arc<Deterministic>,
|
||||
|
@ -46,12 +46,7 @@ impl RandomizedTest for RandomChannelBufferTest {
|
||||
let db = &server.app_state.db;
|
||||
for ix in 0..CHANNEL_COUNT {
|
||||
let id = db
|
||||
.create_channel(
|
||||
&format!("channel-{ix}"),
|
||||
None,
|
||||
&format!("livekit-room-{ix}"),
|
||||
users[0].user_id,
|
||||
)
|
||||
.create_channel(&format!("channel-{ix}"), None, users[0].user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
for user in &users[1..] {
|
||||
|
@ -15,6 +15,7 @@ use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
use rpc::RECEIVE_TIMEOUT;
|
||||
@ -44,6 +45,7 @@ pub struct TestServer {
|
||||
pub struct TestClient {
|
||||
pub username: String,
|
||||
pub app_state: Arc<workspace::AppState>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
state: RefCell<TestClientState>,
|
||||
}
|
||||
|
||||
@ -206,20 +208,18 @@ impl TestServer {
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let workspace_store = cx.add_model(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
let channel_store =
|
||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||
let mut language_registry = LanguageRegistry::test();
|
||||
language_registry.set_executor(cx.background());
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
workspace_store,
|
||||
channel_store: channel_store.clone(),
|
||||
languages: Arc::new(language_registry),
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _, _| Default::default(),
|
||||
initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
|
||||
background_actions: || &[],
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
@ -231,7 +231,7 @@ impl TestServer {
|
||||
workspace::init(app_state.clone(), cx);
|
||||
audio::init((), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client);
|
||||
channel::init(&client, user_store, cx);
|
||||
});
|
||||
|
||||
client
|
||||
@ -242,6 +242,7 @@ impl TestServer {
|
||||
let client = TestClient {
|
||||
app_state,
|
||||
username: name.to_string(),
|
||||
channel_store: cx.read(ChannelStore::global).clone(),
|
||||
state: Default::default(),
|
||||
};
|
||||
client.wait_for_current_user(cx).await;
|
||||
@ -310,10 +311,9 @@ impl TestServer {
|
||||
admin: (&TestClient, &mut TestAppContext),
|
||||
members: &mut [(&TestClient, &mut TestAppContext)],
|
||||
) -> u64 {
|
||||
let (admin_client, admin_cx) = admin;
|
||||
let channel_id = admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
let (_, admin_cx) = admin;
|
||||
let channel_id = admin_cx
|
||||
.read(ChannelStore::global)
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.create_channel(channel, parent, cx)
|
||||
})
|
||||
@ -321,9 +321,8 @@ impl TestServer {
|
||||
.unwrap();
|
||||
|
||||
for (member_client, member_cx) in members {
|
||||
admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
admin_cx
|
||||
.read(ChannelStore::global)
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.invite_member(
|
||||
channel_id,
|
||||
@ -337,9 +336,8 @@ impl TestServer {
|
||||
|
||||
admin_cx.foreground().run_until_parked();
|
||||
|
||||
member_client
|
||||
.app_state
|
||||
.channel_store
|
||||
member_cx
|
||||
.read(ChannelStore::global)
|
||||
.update(*member_cx, |channels, _| {
|
||||
channels.respond_to_channel_invite(channel_id, true)
|
||||
})
|
||||
@ -447,7 +445,7 @@ impl TestClient {
|
||||
}
|
||||
|
||||
pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
|
||||
&self.app_state.channel_store
|
||||
&self.channel_store
|
||||
}
|
||||
|
||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||
@ -571,6 +569,7 @@ impl TestClient {
|
||||
cx.update(|cx| {
|
||||
Project::local(
|
||||
self.client().clone(),
|
||||
self.app_state.node_runtime.clone(),
|
||||
self.app_state.user_store.clone(),
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
@ -614,8 +613,8 @@ impl TestClient {
|
||||
) {
|
||||
let (other_client, other_cx) = user;
|
||||
|
||||
self.app_state
|
||||
.channel_store
|
||||
cx_self
|
||||
.read(ChannelStore::global)
|
||||
.update(cx_self, |channel_store, cx| {
|
||||
channel_store.invite_member(channel, other_client.user_id().unwrap(), true, cx)
|
||||
})
|
||||
@ -624,11 +623,10 @@ impl TestClient {
|
||||
|
||||
cx_self.foreground().run_until_parked();
|
||||
|
||||
other_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(other_cx, |channels, _| {
|
||||
channels.respond_to_channel_invite(channel, true)
|
||||
other_cx
|
||||
.read(ChannelStore::global)
|
||||
.update(other_cx, |channel_store, _| {
|
||||
channel_store.respond_to_channel_invite(channel, true)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -24,7 +24,7 @@ use workspace::{
|
||||
item::{FollowableItem, Item, ItemHandle},
|
||||
register_followable_item,
|
||||
searchable::SearchableItemHandle,
|
||||
ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId,
|
||||
ItemNavHistory, Pane, SaveIntent, ViewId, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
actions!(channel_view, [Deploy]);
|
||||
@ -73,7 +73,7 @@ impl ChannelView {
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().to_owned();
|
||||
let channel_store = workspace.app_state().channel_store.clone();
|
||||
let channel_store = ChannelStore::global(cx);
|
||||
let markdown = workspace
|
||||
.app_state()
|
||||
.languages
|
||||
@ -93,15 +93,36 @@ impl ChannelView {
|
||||
}
|
||||
|
||||
pane.update(&mut cx, |pane, cx| {
|
||||
pane.items_of_type::<Self>()
|
||||
.find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer)
|
||||
.unwrap_or_else(|| {
|
||||
cx.add_view(|cx| {
|
||||
let mut this = Self::new(project, channel_store, channel_buffer, cx);
|
||||
this.acknowledge_buffer_version(cx);
|
||||
this
|
||||
})
|
||||
})
|
||||
let buffer_id = channel_buffer.read(cx).remote_id(cx);
|
||||
|
||||
let existing_view = pane
|
||||
.items_of_type::<Self>()
|
||||
.find(|view| view.read(cx).channel_buffer.read(cx).remote_id(cx) == buffer_id);
|
||||
|
||||
// If this channel buffer is already open in this pane, just return it.
|
||||
if let Some(existing_view) = existing_view.clone() {
|
||||
if existing_view.read(cx).channel_buffer == channel_buffer {
|
||||
return existing_view;
|
||||
}
|
||||
}
|
||||
|
||||
let view = cx.add_view(|cx| {
|
||||
let mut this = Self::new(project, channel_store, channel_buffer, cx);
|
||||
this.acknowledge_buffer_version(cx);
|
||||
this
|
||||
});
|
||||
|
||||
// If the pane contained a disconnected view for this channel buffer,
|
||||
// replace that.
|
||||
if let Some(existing_item) = existing_view {
|
||||
if let Some(ix) = pane.index_for_item(&existing_item) {
|
||||
pane.close_item_by_id(existing_item.id(), SaveIntent::Skip, cx)
|
||||
.detach();
|
||||
pane.add_item(Box::new(view.clone()), true, true, Some(ix), cx);
|
||||
}
|
||||
}
|
||||
|
||||
view
|
||||
})
|
||||
.ok_or_else(|| anyhow!("pane was dropped"))
|
||||
})
|
||||
@ -285,10 +306,14 @@ impl FollowableItem for ChannelView {
|
||||
}
|
||||
|
||||
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
|
||||
let channel = self.channel_buffer.read(cx).channel();
|
||||
let channel_buffer = self.channel_buffer.read(cx);
|
||||
if !channel_buffer.is_connected() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(proto::view::Variant::ChannelView(
|
||||
proto::view::ChannelView {
|
||||
channel_id: channel.id,
|
||||
channel_id: channel_buffer.channel().id,
|
||||
editor: if let Some(proto::view::Variant::Editor(proto)) =
|
||||
self.editor.read(cx).to_state_proto(cx)
|
||||
{
|
||||
|
@ -81,7 +81,7 @@ impl ChatPanel {
|
||||
pub fn new(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) -> ViewHandle<Self> {
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let client = workspace.app_state().client.clone();
|
||||
let channel_store = workspace.app_state().channel_store.clone();
|
||||
let channel_store = ChannelStore::global(cx);
|
||||
let languages = workspace.app_state().languages.clone();
|
||||
|
||||
let input_editor = cx.add_view(|cx| {
|
||||
|
@ -34,8 +34,8 @@ use gpui::{
|
||||
},
|
||||
impl_actions,
|
||||
platform::{CursorStyle, MouseButton, PromptLevel},
|
||||
serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle,
|
||||
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
serde_json, AnyElement, AppContext, AsyncAppContext, ClipboardItem, Element, Entity, FontCache,
|
||||
ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use menu::{Confirm, SelectNext, SelectPrev};
|
||||
use project::{Fs, Project};
|
||||
@ -100,6 +100,11 @@ pub struct JoinChannelChat {
|
||||
pub channel_id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct CopyChannelLink {
|
||||
pub channel_id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
struct StartMoveChannelFor {
|
||||
channel_id: ChannelId,
|
||||
@ -157,6 +162,7 @@ impl_actions!(
|
||||
OpenChannelNotes,
|
||||
JoinChannelCall,
|
||||
JoinChannelChat,
|
||||
CopyChannelLink,
|
||||
LinkChannel,
|
||||
StartMoveChannelFor,
|
||||
StartLinkChannelFor,
|
||||
@ -205,6 +211,7 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(CollabPanel::expand_selected_channel);
|
||||
cx.add_action(CollabPanel::open_channel_notes);
|
||||
cx.add_action(CollabPanel::join_channel_chat);
|
||||
cx.add_action(CollabPanel::copy_channel_link);
|
||||
|
||||
cx.add_action(
|
||||
|panel: &mut CollabPanel, action: &ToggleSelectedIx, cx: &mut ViewContext<CollabPanel>| {
|
||||
@ -648,7 +655,7 @@ impl CollabPanel {
|
||||
channel_editing_state: None,
|
||||
selection: None,
|
||||
user_store: workspace.user_store().clone(),
|
||||
channel_store: workspace.app_state().channel_store.clone(),
|
||||
channel_store: ChannelStore::global(cx),
|
||||
project: workspace.project().clone(),
|
||||
subscriptions: Vec::default(),
|
||||
match_candidates: Vec::default(),
|
||||
@ -2568,6 +2575,13 @@ impl CollabPanel {
|
||||
},
|
||||
));
|
||||
|
||||
items.push(ContextMenuItem::action(
|
||||
"Copy Channel Link",
|
||||
CopyChannelLink {
|
||||
channel_id: path.channel_id(),
|
||||
},
|
||||
));
|
||||
|
||||
if self.channel_store.read(cx).is_user_admin(path.channel_id()) {
|
||||
let parent_id = path.parent_id();
|
||||
|
||||
@ -3187,49 +3201,19 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
fn join_channel(&self, channel_id: u64, cx: &mut ViewContext<Self>) {
|
||||
let workspace = self.workspace.clone();
|
||||
let window = cx.window();
|
||||
let active_call = ActiveCall::global(cx);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
if active_call.read_with(&mut cx, |active_call, cx| {
|
||||
if let Some(room) = active_call.room() {
|
||||
let room = room.read(cx);
|
||||
room.is_sharing_project() && room.remote_participants().len() > 0
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
let answer = window.prompt(
|
||||
PromptLevel::Warning,
|
||||
"Leaving this call will unshare your current project.\nDo you want to switch channels?",
|
||||
&["Yes, Join Channel", "Cancel"],
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
if let Some(mut answer) = answer {
|
||||
if answer.next().await == Some(1) {
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let room = active_call
|
||||
.update(&mut cx, |call, cx| call.join_channel(channel_id, cx))
|
||||
.await?;
|
||||
|
||||
let task = room.update(&mut cx, |room, cx| {
|
||||
let workspace = workspace.upgrade(cx)?;
|
||||
let (project, host) = room.most_active_project()?;
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
Some(workspace::join_remote_project(project, host, app_state, cx))
|
||||
});
|
||||
if let Some(task) = task {
|
||||
task.await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
let Some(workspace) = self.workspace.upgrade(cx) else {
|
||||
return;
|
||||
};
|
||||
let Some(handle) = cx.window().downcast::<Workspace>() else {
|
||||
return;
|
||||
};
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
workspace.read(cx).app_state().clone(),
|
||||
Some(handle),
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
|
||||
fn join_channel_chat(&mut self, action: &JoinChannelChat, cx: &mut ViewContext<Self>) {
|
||||
@ -3246,6 +3230,15 @@ impl CollabPanel {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_channel_link(&mut self, action: &CopyChannelLink, cx: &mut ViewContext<Self>) {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let Some(channel) = channel_store.channel_for_id(action.channel_id) else {
|
||||
return;
|
||||
};
|
||||
let item = ClipboardItem::new(channel.link());
|
||||
cx.write_to_clipboard(item)
|
||||
}
|
||||
}
|
||||
|
||||
fn render_tree_branch(
|
||||
|
@ -2,6 +2,7 @@ use crate::{
|
||||
contact_notification::ContactNotification, face_pile::FacePile, toggle_deafen, toggle_mute,
|
||||
toggle_screen_sharing, LeaveCall, ToggleDeafen, ToggleMute, ToggleScreenSharing,
|
||||
};
|
||||
use auto_update::AutoUpdateStatus;
|
||||
use call::{ActiveCall, ParticipantLocation, Room};
|
||||
use client::{proto::PeerId, Client, ContactEventKind, SignIn, SignOut, User, UserStore};
|
||||
use clock::ReplicaId;
|
||||
@ -1177,22 +1178,38 @@ impl CollabTitlebarItem {
|
||||
.with_style(theme.titlebar.offline_icon.container)
|
||||
.into_any(),
|
||||
),
|
||||
client::Status::UpgradeRequired => Some(
|
||||
MouseEventHandler::new::<ConnectionStatusButton, _>(0, cx, |_, _| {
|
||||
Label::new(
|
||||
"Please update Zed to collaborate",
|
||||
theme.titlebar.outdated_warning.text.clone(),
|
||||
)
|
||||
.contained()
|
||||
.with_style(theme.titlebar.outdated_warning.container)
|
||||
.aligned()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, |_, _, cx| {
|
||||
auto_update::check(&Default::default(), cx);
|
||||
})
|
||||
.into_any(),
|
||||
),
|
||||
client::Status::UpgradeRequired => {
|
||||
let auto_updater = auto_update::AutoUpdater::get(cx);
|
||||
let label = match auto_updater.map(|auto_update| auto_update.read(cx).status()) {
|
||||
Some(AutoUpdateStatus::Updated) => "Please restart Zed to Collaborate",
|
||||
Some(AutoUpdateStatus::Installing)
|
||||
| Some(AutoUpdateStatus::Downloading)
|
||||
| Some(AutoUpdateStatus::Checking) => "Updating...",
|
||||
Some(AutoUpdateStatus::Idle) | Some(AutoUpdateStatus::Errored) | None => {
|
||||
"Please update Zed to Collaborate"
|
||||
}
|
||||
};
|
||||
|
||||
Some(
|
||||
MouseEventHandler::new::<ConnectionStatusButton, _>(0, cx, |_, _| {
|
||||
Label::new(label, theme.titlebar.outdated_warning.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.titlebar.outdated_warning.container)
|
||||
.aligned()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, |_, _, cx| {
|
||||
if let Some(auto_updater) = auto_update::AutoUpdater::get(cx) {
|
||||
if auto_updater.read(cx).status() == AutoUpdateStatus::Updated {
|
||||
workspace::restart(&Default::default(), cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
auto_update::check(&Default::default(), cx);
|
||||
})
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -38,6 +38,10 @@ impl DiagnosticIndicator {
|
||||
this.in_progress_checks.remove(language_server_id);
|
||||
cx.notify();
|
||||
}
|
||||
project::Event::DiagnosticsUpdated { .. } => {
|
||||
this.summary = project.read(cx).diagnostic_summary(cx);
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.detach();
|
||||
|
@ -57,7 +57,6 @@ log.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -7,6 +7,7 @@ pub struct EditorSettings {
|
||||
pub cursor_blink: bool,
|
||||
pub hover_popover_enabled: bool,
|
||||
pub show_completions_on_input: bool,
|
||||
pub show_completion_documentation: bool,
|
||||
pub use_on_type_format: bool,
|
||||
pub scrollbar: Scrollbar,
|
||||
pub relative_line_numbers: bool,
|
||||
@ -33,6 +34,7 @@ pub struct EditorSettingsContent {
|
||||
pub cursor_blink: Option<bool>,
|
||||
pub hover_popover_enabled: Option<bool>,
|
||||
pub show_completions_on_input: Option<bool>,
|
||||
pub show_completion_documentation: Option<bool>,
|
||||
pub use_on_type_format: Option<bool>,
|
||||
pub scrollbar: Option<ScrollbarContent>,
|
||||
pub relative_line_numbers: Option<bool>,
|
||||
|
@ -19,8 +19,8 @@ use gpui::{
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent},
|
||||
BracketPairConfig, FakeLspAdapter, LanguageConfig, LanguageConfigOverride, LanguageRegistry,
|
||||
Override, Point,
|
||||
BracketPairConfig, BundledFormatter, FakeLspAdapter, LanguageConfig, LanguageConfigOverride,
|
||||
LanguageRegistry, Override, Point,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use project::project_settings::{LspSettings, ProjectSettings};
|
||||
@ -1333,7 +1333,7 @@ async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppCon
|
||||
|
||||
cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
|
||||
cx.assert_editor_state(
|
||||
&r#"ˇone
|
||||
&r#"one
|
||||
two
|
||||
|
||||
three
|
||||
@ -1344,9 +1344,22 @@ async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppCon
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
|
||||
cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
|
||||
cx.assert_editor_state(
|
||||
&r#"ˇone
|
||||
&r#"one
|
||||
two
|
||||
|
||||
three
|
||||
four
|
||||
five
|
||||
ˇ
|
||||
six"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
|
||||
cx.assert_editor_state(
|
||||
&r#"one
|
||||
two
|
||||
ˇ
|
||||
three
|
||||
@ -1366,32 +1379,6 @@ async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppCon
|
||||
four
|
||||
five
|
||||
|
||||
sixˇ"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
|
||||
cx.assert_editor_state(
|
||||
&r#"one
|
||||
two
|
||||
|
||||
three
|
||||
four
|
||||
five
|
||||
ˇ
|
||||
sixˇ"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
|
||||
cx.assert_editor_state(
|
||||
&r#"one
|
||||
two
|
||||
ˇ
|
||||
three
|
||||
four
|
||||
five
|
||||
ˇ
|
||||
six"#
|
||||
.unindent(),
|
||||
);
|
||||
@ -5089,7 +5076,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(language_settings::Formatter::LanguageServer)
|
||||
});
|
||||
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
@ -5105,6 +5094,12 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
|
||||
document_formatting_provider: Some(lsp::OneOf::Left(true)),
|
||||
..Default::default()
|
||||
},
|
||||
// Enable Prettier formatting for the same buffer, and ensure
|
||||
// LSP is called instead of Prettier.
|
||||
enabled_formatters: vec![BundledFormatter::Prettier {
|
||||
parser_name: Some("test_parser"),
|
||||
plugin_names: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
@ -5113,7 +5108,10 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
|
||||
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
|
||||
project.update(cx, |project, _| {
|
||||
project.enable_test_prettier(&[]);
|
||||
project.languages().add(Arc::new(language));
|
||||
});
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
|
||||
.await
|
||||
@ -5231,7 +5229,9 @@ async fn test_concurrent_format_requests(cx: &mut gpui::TestAppContext) {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(language_settings::Formatter::Auto)
|
||||
});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
@ -5430,9 +5430,9 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
additional edit
|
||||
"});
|
||||
cx.simulate_keystroke(" ");
|
||||
assert!(cx.editor(|e, _| e.context_menu.is_none()));
|
||||
assert!(cx.editor(|e, _| e.context_menu.read().is_none()));
|
||||
cx.simulate_keystroke("s");
|
||||
assert!(cx.editor(|e, _| e.context_menu.is_none()));
|
||||
assert!(cx.editor(|e, _| e.context_menu.read().is_none()));
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
one.second_completion
|
||||
@ -5494,12 +5494,12 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
});
|
||||
cx.set_state("editorˇ");
|
||||
cx.simulate_keystroke(".");
|
||||
assert!(cx.editor(|e, _| e.context_menu.is_none()));
|
||||
assert!(cx.editor(|e, _| e.context_menu.read().is_none()));
|
||||
cx.simulate_keystroke("c");
|
||||
cx.simulate_keystroke("l");
|
||||
cx.simulate_keystroke("o");
|
||||
cx.assert_editor_state("editor.cloˇ");
|
||||
assert!(cx.editor(|e, _| e.context_menu.is_none()));
|
||||
assert!(cx.editor(|e, _| e.context_menu.read().is_none()));
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.show_completions(&ShowCompletions, cx);
|
||||
});
|
||||
@ -7788,7 +7788,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui:
|
||||
cx.simulate_keystroke("-");
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, _| {
|
||||
if let Some(ContextMenu::Completions(menu)) = &editor.context_menu {
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert_eq!(
|
||||
menu.matches.iter().map(|m| &m.string).collect::<Vec<_>>(),
|
||||
&["bg-red", "bg-blue", "bg-yellow"]
|
||||
@ -7801,7 +7801,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui:
|
||||
cx.simulate_keystroke("l");
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, _| {
|
||||
if let Some(ContextMenu::Completions(menu)) = &editor.context_menu {
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert_eq!(
|
||||
menu.matches.iter().map(|m| &m.string).collect::<Vec<_>>(),
|
||||
&["bg-blue", "bg-yellow"]
|
||||
@ -7817,7 +7817,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui:
|
||||
cx.simulate_keystroke("l");
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, _| {
|
||||
if let Some(ContextMenu::Completions(menu)) = &editor.context_menu {
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert_eq!(
|
||||
menu.matches.iter().map(|m| &m.string).collect::<Vec<_>>(),
|
||||
&["bg-yellow"]
|
||||
@ -7828,6 +7828,75 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui:
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(language_settings::Formatter::Prettier)
|
||||
});
|
||||
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
let _ = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
enabled_formatters: vec![BundledFormatter::Prettier {
|
||||
parser_name: Some("test_parser"),
|
||||
plugin_names: vec![test_plugin],
|
||||
}],
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
|
||||
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
|
||||
let prettier_format_suffix = project.update(cx, |project, _| {
|
||||
let suffix = project.enable_test_prettier(&[test_plugin]);
|
||||
project.languages().add(Arc::new(language));
|
||||
suffix
|
||||
});
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let buffer_text = "one\ntwo\nthree\n";
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
editor.update(cx, |editor, cx| editor.set_text(buffer_text, cx));
|
||||
|
||||
let format = editor.update(cx, |editor, cx| {
|
||||
editor.perform_format(project.clone(), FormatTrigger::Manual, cx)
|
||||
});
|
||||
format.await.unwrap();
|
||||
assert_eq!(
|
||||
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
||||
buffer_text.to_string() + prettier_format_suffix,
|
||||
"Test prettier formatting was not applied to the original buffer text",
|
||||
);
|
||||
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.formatter = Some(language_settings::Formatter::Auto)
|
||||
});
|
||||
let format = editor.update(cx, |editor, cx| {
|
||||
editor.perform_format(project.clone(), FormatTrigger::Manual, cx)
|
||||
});
|
||||
format.await.unwrap();
|
||||
assert_eq!(
|
||||
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
||||
buffer_text.to_string() + prettier_format_suffix + "\n" + prettier_format_suffix,
|
||||
"Autoformatting (via test prettier) was not applied to the original buffer text",
|
||||
);
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(row as u32, column as u32);
|
||||
point..point
|
||||
|
@ -2428,7 +2428,7 @@ impl Element<Editor> for EditorElement {
|
||||
}
|
||||
|
||||
let active = matches!(
|
||||
editor.context_menu,
|
||||
editor.context_menu.read().as_ref(),
|
||||
Some(crate::ContextMenu::CodeActions(_))
|
||||
);
|
||||
|
||||
@ -2439,9 +2439,13 @@ impl Element<Editor> for EditorElement {
|
||||
}
|
||||
|
||||
let visible_rows = start_row..start_row + line_layouts.len() as u32;
|
||||
let mut hover = editor
|
||||
.hover_state
|
||||
.render(&snapshot, &style, visible_rows, cx);
|
||||
let mut hover = editor.hover_state.render(
|
||||
&snapshot,
|
||||
&style,
|
||||
visible_rows,
|
||||
editor.workspace.as_ref().map(|(w, _)| w.clone()),
|
||||
cx,
|
||||
);
|
||||
let mode = editor.mode;
|
||||
|
||||
let mut fold_indicators = editor.render_fold_indicators(
|
||||
|
@ -9,13 +9,15 @@ use gpui::{
|
||||
actions,
|
||||
elements::{Flex, MouseEventHandler, Padding, ParentElement, Text},
|
||||
platform::{CursorStyle, MouseButton},
|
||||
AnyElement, AppContext, Element, ModelHandle, Task, ViewContext,
|
||||
AnyElement, AppContext, Element, ModelHandle, Task, ViewContext, WeakViewHandle,
|
||||
};
|
||||
use language::{
|
||||
markdown, Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry, ParsedMarkdown,
|
||||
};
|
||||
use language::{Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry};
|
||||
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
|
||||
use rich_text::{new_paragraph, render_code, render_markdown_mut, RichText};
|
||||
use std::{ops::Range, sync::Arc, time::Duration};
|
||||
use util::TryFutureExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub const HOVER_DELAY_MILLIS: u64 = 350;
|
||||
pub const HOVER_REQUEST_DELAY_MILLIS: u64 = 200;
|
||||
@ -105,12 +107,15 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
|
||||
this.hover_state.diagnostic_popover = None;
|
||||
})?;
|
||||
|
||||
let language_registry = project.update(&mut cx, |p, _| p.languages().clone());
|
||||
let blocks = vec![inlay_hover.tooltip];
|
||||
let parsed_content = parse_blocks(&blocks, &language_registry, None).await;
|
||||
|
||||
let hover_popover = InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: RangeInEditor::Inlay(inlay_hover.range.clone()),
|
||||
blocks: vec![inlay_hover.tooltip],
|
||||
language: None,
|
||||
rendered_content: None,
|
||||
blocks,
|
||||
parsed_content,
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
@ -288,35 +293,38 @@ fn show_hover(
|
||||
});
|
||||
})?;
|
||||
|
||||
// Construct new hover popover from hover request
|
||||
let hover_popover = hover_request.await.ok().flatten().and_then(|hover_result| {
|
||||
if hover_result.is_empty() {
|
||||
return None;
|
||||
let hover_result = hover_request.await.ok().flatten();
|
||||
let hover_popover = match hover_result {
|
||||
Some(hover_result) if !hover_result.is_empty() => {
|
||||
// Create symbol range of anchors for highlighting and filtering of future requests.
|
||||
let range = if let Some(range) = hover_result.range {
|
||||
let start = snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(excerpt_id.clone(), range.start);
|
||||
let end = snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(excerpt_id.clone(), range.end);
|
||||
|
||||
start..end
|
||||
} else {
|
||||
anchor..anchor
|
||||
};
|
||||
|
||||
let language_registry = project.update(&mut cx, |p, _| p.languages().clone());
|
||||
let blocks = hover_result.contents;
|
||||
let language = hover_result.language;
|
||||
let parsed_content = parse_blocks(&blocks, &language_registry, language).await;
|
||||
|
||||
Some(InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: RangeInEditor::Text(range),
|
||||
blocks,
|
||||
parsed_content,
|
||||
})
|
||||
}
|
||||
|
||||
// Create symbol range of anchors for highlighting and filtering
|
||||
// of future requests.
|
||||
let range = if let Some(range) = hover_result.range {
|
||||
let start = snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(excerpt_id.clone(), range.start);
|
||||
let end = snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(excerpt_id.clone(), range.end);
|
||||
|
||||
start..end
|
||||
} else {
|
||||
anchor..anchor
|
||||
};
|
||||
|
||||
Some(InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: RangeInEditor::Text(range),
|
||||
blocks: hover_result.contents,
|
||||
language: hover_result.language,
|
||||
rendered_content: None,
|
||||
})
|
||||
});
|
||||
_ => None,
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(symbol_range) = hover_popover
|
||||
@ -345,44 +353,56 @@ fn show_hover(
|
||||
editor.hover_state.info_task = Some(task);
|
||||
}
|
||||
|
||||
fn render_blocks(
|
||||
async fn parse_blocks(
|
||||
blocks: &[HoverBlock],
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<&Arc<Language>>,
|
||||
) -> RichText {
|
||||
let mut data = RichText {
|
||||
text: Default::default(),
|
||||
highlights: Default::default(),
|
||||
region_ranges: Default::default(),
|
||||
regions: Default::default(),
|
||||
};
|
||||
language: Option<Arc<Language>>,
|
||||
) -> markdown::ParsedMarkdown {
|
||||
let mut text = String::new();
|
||||
let mut highlights = Vec::new();
|
||||
let mut region_ranges = Vec::new();
|
||||
let mut regions = Vec::new();
|
||||
|
||||
for block in blocks {
|
||||
match &block.kind {
|
||||
HoverBlockKind::PlainText => {
|
||||
new_paragraph(&mut data.text, &mut Vec::new());
|
||||
data.text.push_str(&block.text);
|
||||
markdown::new_paragraph(&mut text, &mut Vec::new());
|
||||
text.push_str(&block.text);
|
||||
}
|
||||
|
||||
HoverBlockKind::Markdown => {
|
||||
render_markdown_mut(&block.text, language_registry, language, &mut data)
|
||||
markdown::parse_markdown_block(
|
||||
&block.text,
|
||||
language_registry,
|
||||
language.clone(),
|
||||
&mut text,
|
||||
&mut highlights,
|
||||
&mut region_ranges,
|
||||
&mut regions,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
HoverBlockKind::Code { language } => {
|
||||
if let Some(language) = language_registry
|
||||
.language_for_name(language)
|
||||
.now_or_never()
|
||||
.and_then(Result::ok)
|
||||
{
|
||||
render_code(&mut data.text, &mut data.highlights, &block.text, &language);
|
||||
markdown::highlight_code(&mut text, &mut highlights, &block.text, &language);
|
||||
} else {
|
||||
data.text.push_str(&block.text);
|
||||
text.push_str(&block.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data.text = data.text.trim().to_string();
|
||||
|
||||
data
|
||||
ParsedMarkdown {
|
||||
text: text.trim().to_string(),
|
||||
highlights,
|
||||
region_ranges,
|
||||
regions,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@ -403,6 +423,7 @@ impl HoverState {
|
||||
snapshot: &EditorSnapshot,
|
||||
style: &EditorStyle,
|
||||
visible_rows: Range<u32>,
|
||||
workspace: Option<WeakViewHandle<Workspace>>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Option<(DisplayPoint, Vec<AnyElement<Editor>>)> {
|
||||
// If there is a diagnostic, position the popovers based on that.
|
||||
@ -432,7 +453,7 @@ impl HoverState {
|
||||
elements.push(diagnostic_popover.render(style, cx));
|
||||
}
|
||||
if let Some(info_popover) = self.info_popover.as_mut() {
|
||||
elements.push(info_popover.render(style, cx));
|
||||
elements.push(info_popover.render(style, workspace, cx));
|
||||
}
|
||||
|
||||
Some((point, elements))
|
||||
@ -444,32 +465,23 @@ pub struct InfoPopover {
|
||||
pub project: ModelHandle<Project>,
|
||||
symbol_range: RangeInEditor,
|
||||
pub blocks: Vec<HoverBlock>,
|
||||
language: Option<Arc<Language>>,
|
||||
rendered_content: Option<RichText>,
|
||||
parsed_content: ParsedMarkdown,
|
||||
}
|
||||
|
||||
impl InfoPopover {
|
||||
pub fn render(
|
||||
&mut self,
|
||||
style: &EditorStyle,
|
||||
workspace: Option<WeakViewHandle<Workspace>>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> AnyElement<Editor> {
|
||||
let rendered_content = self.rendered_content.get_or_insert_with(|| {
|
||||
render_blocks(
|
||||
&self.blocks,
|
||||
self.project.read(cx).languages(),
|
||||
self.language.as_ref(),
|
||||
)
|
||||
});
|
||||
|
||||
MouseEventHandler::new::<InfoPopover, _>(0, cx, move |_, cx| {
|
||||
let code_span_background_color = style.document_highlight_read_background;
|
||||
MouseEventHandler::new::<InfoPopover, _>(0, cx, |_, cx| {
|
||||
Flex::column()
|
||||
.scrollable::<HoverBlock>(1, None, cx)
|
||||
.with_child(rendered_content.element(
|
||||
style.syntax.clone(),
|
||||
style.text.clone(),
|
||||
code_span_background_color,
|
||||
.scrollable::<HoverBlock>(0, None, cx)
|
||||
.with_child(crate::render_parsed_markdown::<HoverBlock>(
|
||||
&self.parsed_content,
|
||||
style,
|
||||
workspace,
|
||||
cx,
|
||||
))
|
||||
.contained()
|
||||
@ -572,7 +584,6 @@ mod tests {
|
||||
use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet};
|
||||
use lsp::LanguageServerId;
|
||||
use project::{HoverBlock, HoverBlockKind};
|
||||
use rich_text::Highlight;
|
||||
use smol::stream::StreamExt;
|
||||
use unindent::Unindent;
|
||||
use util::test::marked_text_ranges;
|
||||
@ -793,7 +804,7 @@ mod tests {
|
||||
}],
|
||||
);
|
||||
|
||||
let rendered = render_blocks(&blocks, &Default::default(), None);
|
||||
let rendered = smol::block_on(parse_blocks(&blocks, &Default::default(), None));
|
||||
assert_eq!(
|
||||
rendered.text,
|
||||
code_str.trim(),
|
||||
@ -900,7 +911,7 @@ mod tests {
|
||||
// Links
|
||||
Row {
|
||||
blocks: vec![HoverBlock {
|
||||
text: "one [two](the-url) three".to_string(),
|
||||
text: "one [two](https://the-url) three".to_string(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
}],
|
||||
expected_marked_text: "one «two» three".to_string(),
|
||||
@ -921,7 +932,7 @@ mod tests {
|
||||
- a
|
||||
- b
|
||||
* two
|
||||
- [c](the-url)
|
||||
- [c](https://the-url)
|
||||
- d"
|
||||
.unindent(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
@ -985,7 +996,7 @@ mod tests {
|
||||
expected_styles,
|
||||
} in &rows[0..]
|
||||
{
|
||||
let rendered = render_blocks(&blocks, &Default::default(), None);
|
||||
let rendered = smol::block_on(parse_blocks(&blocks, &Default::default(), None));
|
||||
|
||||
let (expected_text, ranges) = marked_text_ranges(expected_marked_text, false);
|
||||
let expected_highlights = ranges
|
||||
@ -1001,11 +1012,8 @@ mod tests {
|
||||
.highlights
|
||||
.iter()
|
||||
.filter_map(|(range, highlight)| {
|
||||
let style = match highlight {
|
||||
Highlight::Id(id) => id.style(&style.syntax)?,
|
||||
Highlight::Highlight(style) => style.clone(),
|
||||
};
|
||||
Some((range.clone(), style))
|
||||
let highlight = highlight.to_highlight_style(&style.syntax)?;
|
||||
Some((range.clone(), highlight))
|
||||
})
|
||||
.collect();
|
||||
|
||||
@ -1258,11 +1266,7 @@ mod tests {
|
||||
"Popover range should match the new type label part"
|
||||
);
|
||||
assert_eq!(
|
||||
popover
|
||||
.rendered_content
|
||||
.as_ref()
|
||||
.expect("should have label text for new type hint")
|
||||
.text,
|
||||
popover.parsed_content.text,
|
||||
format!("A tooltip for `{new_type_label}`"),
|
||||
"Rendered text should not anyhow alter backticks"
|
||||
);
|
||||
@ -1316,11 +1320,7 @@ mod tests {
|
||||
"Popover range should match the struct label part"
|
||||
);
|
||||
assert_eq!(
|
||||
popover
|
||||
.rendered_content
|
||||
.as_ref()
|
||||
.expect("should have label text for struct hint")
|
||||
.text,
|
||||
popover.parsed_content.text,
|
||||
format!("A tooltip for {struct_label}"),
|
||||
"Rendered markdown element should remove backticks from text"
|
||||
);
|
||||
|
@ -234,7 +234,7 @@ pub fn start_of_paragraph(
|
||||
) -> DisplayPoint {
|
||||
let point = display_point.to_point(map);
|
||||
if point.row == 0 {
|
||||
return map.max_point();
|
||||
return DisplayPoint::zero();
|
||||
}
|
||||
|
||||
let mut found_non_blank_line = false;
|
||||
@ -261,7 +261,7 @@ pub fn end_of_paragraph(
|
||||
) -> DisplayPoint {
|
||||
let point = display_point.to_point(map);
|
||||
if point.row == map.max_buffer_row() {
|
||||
return DisplayPoint::zero();
|
||||
return map.max_point();
|
||||
}
|
||||
|
||||
let mut found_non_blank_line = false;
|
||||
|
@ -498,77 +498,91 @@ impl MultiBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
for (buffer_id, mut edits) in buffer_edits {
|
||||
edits.sort_unstable_by_key(|edit| edit.range.start);
|
||||
self.buffers.borrow()[&buffer_id]
|
||||
.buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
let mut edits = edits.into_iter().peekable();
|
||||
let mut insertions = Vec::new();
|
||||
let mut original_indent_columns = Vec::new();
|
||||
let mut deletions = Vec::new();
|
||||
let empty_str: Arc<str> = "".into();
|
||||
while let Some(BufferEdit {
|
||||
mut range,
|
||||
new_text,
|
||||
mut is_insertion,
|
||||
original_indent_column,
|
||||
}) = edits.next()
|
||||
{
|
||||
drop(cursor);
|
||||
drop(snapshot);
|
||||
// Non-generic part of edit, hoisted out to avoid blowing up LLVM IR.
|
||||
fn tail(
|
||||
this: &mut MultiBuffer,
|
||||
buffer_edits: HashMap<u64, Vec<BufferEdit>>,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
edited_excerpt_ids: Vec<ExcerptId>,
|
||||
cx: &mut ModelContext<MultiBuffer>,
|
||||
) {
|
||||
for (buffer_id, mut edits) in buffer_edits {
|
||||
edits.sort_unstable_by_key(|edit| edit.range.start);
|
||||
this.buffers.borrow()[&buffer_id]
|
||||
.buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
let mut edits = edits.into_iter().peekable();
|
||||
let mut insertions = Vec::new();
|
||||
let mut original_indent_columns = Vec::new();
|
||||
let mut deletions = Vec::new();
|
||||
let empty_str: Arc<str> = "".into();
|
||||
while let Some(BufferEdit {
|
||||
range: next_range,
|
||||
is_insertion: next_is_insertion,
|
||||
..
|
||||
}) = edits.peek()
|
||||
mut range,
|
||||
new_text,
|
||||
mut is_insertion,
|
||||
original_indent_column,
|
||||
}) = edits.next()
|
||||
{
|
||||
if range.end >= next_range.start {
|
||||
range.end = cmp::max(next_range.end, range.end);
|
||||
is_insertion |= *next_is_insertion;
|
||||
edits.next();
|
||||
} else {
|
||||
break;
|
||||
while let Some(BufferEdit {
|
||||
range: next_range,
|
||||
is_insertion: next_is_insertion,
|
||||
..
|
||||
}) = edits.peek()
|
||||
{
|
||||
if range.end >= next_range.start {
|
||||
range.end = cmp::max(next_range.end, range.end);
|
||||
is_insertion |= *next_is_insertion;
|
||||
edits.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if is_insertion {
|
||||
original_indent_columns.push(original_indent_column);
|
||||
insertions.push((
|
||||
buffer.anchor_before(range.start)
|
||||
..buffer.anchor_before(range.end),
|
||||
new_text.clone(),
|
||||
));
|
||||
} else if !range.is_empty() {
|
||||
deletions.push((
|
||||
buffer.anchor_before(range.start)
|
||||
..buffer.anchor_before(range.end),
|
||||
empty_str.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if is_insertion {
|
||||
original_indent_columns.push(original_indent_column);
|
||||
insertions.push((
|
||||
buffer.anchor_before(range.start)..buffer.anchor_before(range.end),
|
||||
new_text.clone(),
|
||||
));
|
||||
} else if !range.is_empty() {
|
||||
deletions.push((
|
||||
buffer.anchor_before(range.start)..buffer.anchor_before(range.end),
|
||||
empty_str.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
let deletion_autoindent_mode =
|
||||
if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Default::default(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let insertion_autoindent_mode =
|
||||
if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let deletion_autoindent_mode =
|
||||
if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Default::default(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let insertion_autoindent_mode =
|
||||
if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
buffer.edit(deletions, deletion_autoindent_mode, cx);
|
||||
buffer.edit(insertions, insertion_autoindent_mode, cx);
|
||||
})
|
||||
}
|
||||
|
||||
buffer.edit(deletions, deletion_autoindent_mode, cx);
|
||||
buffer.edit(insertions, insertion_autoindent_mode, cx);
|
||||
})
|
||||
cx.emit(Event::ExcerptsEdited {
|
||||
ids: edited_excerpt_ids,
|
||||
});
|
||||
}
|
||||
|
||||
cx.emit(Event::ExcerptsEdited {
|
||||
ids: edited_excerpt_ids,
|
||||
});
|
||||
tail(self, buffer_edits, autoindent_mode, edited_excerpt_ids, cx);
|
||||
}
|
||||
|
||||
pub fn start_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
|
||||
|
@ -107,13 +107,23 @@ fn matching_history_item_paths(
|
||||
) -> HashMap<Arc<Path>, PathMatch> {
|
||||
let history_items_by_worktrees = history_items
|
||||
.iter()
|
||||
.map(|found_path| {
|
||||
let path = &found_path.project.path;
|
||||
.filter_map(|found_path| {
|
||||
let candidate = PathMatchCandidate {
|
||||
path,
|
||||
char_bag: CharBag::from_iter(path.to_string_lossy().to_lowercase().chars()),
|
||||
path: &found_path.project.path,
|
||||
// Only match history items names, otherwise their paths may match too many queries, producing false positives.
|
||||
// E.g. `foo` would match both `something/foo/bar.rs` and `something/foo/foo.rs` and if the former is a history item,
|
||||
// it would be shown first always, despite the latter being a better match.
|
||||
char_bag: CharBag::from_iter(
|
||||
found_path
|
||||
.project
|
||||
.path
|
||||
.file_name()?
|
||||
.to_string_lossy()
|
||||
.to_lowercase()
|
||||
.chars(),
|
||||
),
|
||||
};
|
||||
(found_path.project.worktree_id, candidate)
|
||||
Some((found_path.project.worktree_id, candidate))
|
||||
})
|
||||
.fold(
|
||||
HashMap::default(),
|
||||
@ -212,6 +222,10 @@ fn toggle_or_cycle_file_finder(
|
||||
.as_ref()
|
||||
.and_then(|found_path| found_path.absolute.as_ref())
|
||||
})
|
||||
.filter(|(_, history_abs_path)| match history_abs_path {
|
||||
Some(abs_path) => history_file_exists(abs_path),
|
||||
None => true,
|
||||
})
|
||||
.map(|(history_path, abs_path)| FoundPath::new(history_path, abs_path)),
|
||||
)
|
||||
.collect();
|
||||
@ -236,6 +250,16 @@ fn toggle_or_cycle_file_finder(
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn history_file_exists(abs_path: &PathBuf) -> bool {
|
||||
abs_path.exists()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn history_file_exists(abs_path: &PathBuf) -> bool {
|
||||
!abs_path.ends_with("nonexistent.rs")
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
Selected(ProjectPath),
|
||||
Dismissed,
|
||||
@ -505,12 +529,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
project
|
||||
.worktree_for_id(history_item.project.worktree_id, cx)
|
||||
.is_some()
|
||||
|| (project.is_local()
|
||||
&& history_item
|
||||
.absolute
|
||||
.as_ref()
|
||||
.filter(|abs_path| abs_path.exists())
|
||||
.is_some())
|
||||
|| (project.is_local() && history_item.absolute.is_some())
|
||||
})
|
||||
.cloned()
|
||||
.map(|p| (p, None))
|
||||
@ -1803,6 +1822,202 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_history_items_vs_very_good_external_match(
|
||||
deterministic: Arc<gpui::executor::Deterministic>,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/src",
|
||||
json!({
|
||||
"collab_ui": {
|
||||
"first.rs": "// First Rust file",
|
||||
"second.rs": "// Second Rust file",
|
||||
"third.rs": "// Third Rust file",
|
||||
"collab_ui.rs": "// Fourth Rust file",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
|
||||
let workspace = window.root(cx);
|
||||
// generate some history to select from
|
||||
open_close_queried_buffer(
|
||||
"fir",
|
||||
1,
|
||||
"first.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"sec",
|
||||
1,
|
||||
"second.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"thi",
|
||||
1,
|
||||
"third.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"sec",
|
||||
1,
|
||||
"second.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.dispatch_action(window.into(), Toggle);
|
||||
let query = "collab_ui";
|
||||
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
|
||||
finder
|
||||
.update(cx, |finder, cx| {
|
||||
finder.delegate_mut().update_matches(query.to_string(), cx)
|
||||
})
|
||||
.await;
|
||||
finder.read_with(cx, |finder, _| {
|
||||
let delegate = finder.delegate();
|
||||
assert!(
|
||||
delegate.matches.history.is_empty(),
|
||||
"History items should not math query {query}, they should be matched by name only"
|
||||
);
|
||||
|
||||
let search_entries = delegate
|
||||
.matches
|
||||
.search
|
||||
.iter()
|
||||
.map(|path_match| path_match.path.to_path_buf())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
search_entries,
|
||||
vec![
|
||||
PathBuf::from("collab_ui/collab_ui.rs"),
|
||||
PathBuf::from("collab_ui/third.rs"),
|
||||
PathBuf::from("collab_ui/first.rs"),
|
||||
PathBuf::from("collab_ui/second.rs"),
|
||||
],
|
||||
"Despite all search results having the same directory name, the most matching one should be on top"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_nonexistent_history_items_not_shown(
|
||||
deterministic: Arc<gpui::executor::Deterministic>,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/src",
|
||||
json!({
|
||||
"test": {
|
||||
"first.rs": "// First Rust file",
|
||||
"nonexistent.rs": "// Second Rust file",
|
||||
"third.rs": "// Third Rust file",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
|
||||
let workspace = window.root(cx);
|
||||
// generate some history to select from
|
||||
open_close_queried_buffer(
|
||||
"fir",
|
||||
1,
|
||||
"first.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"non",
|
||||
1,
|
||||
"nonexistent.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"thi",
|
||||
1,
|
||||
"third.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
open_close_queried_buffer(
|
||||
"fir",
|
||||
1,
|
||||
"first.rs",
|
||||
window.into(),
|
||||
&workspace,
|
||||
&deterministic,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.dispatch_action(window.into(), Toggle);
|
||||
let query = "rs";
|
||||
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
|
||||
finder
|
||||
.update(cx, |finder, cx| {
|
||||
finder.delegate_mut().update_matches(query.to_string(), cx)
|
||||
})
|
||||
.await;
|
||||
finder.read_with(cx, |finder, _| {
|
||||
let delegate = finder.delegate();
|
||||
let history_entries = delegate
|
||||
.matches
|
||||
.history
|
||||
.iter()
|
||||
.map(|(_, path_match)| path_match.as_ref().expect("should have a path match").path.to_path_buf())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
history_entries,
|
||||
vec![
|
||||
PathBuf::from("test/first.rs"),
|
||||
PathBuf::from("test/third.rs"),
|
||||
],
|
||||
"Should have all opened files in the history, except the ones that do not exist on disk"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
async fn open_close_queried_buffer(
|
||||
input: &str,
|
||||
expected_matches: usize,
|
||||
|
@ -13,7 +13,6 @@ rope = { path = "../rope" }
|
||||
text = { path = "../text" }
|
||||
util = { path = "../util" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
rpc = { path = "../rpc" }
|
||||
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
|
@ -85,7 +85,7 @@ pub struct RemoveOptions {
|
||||
pub ignore_if_not_exists: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Metadata {
|
||||
pub inode: u64,
|
||||
pub mtime: SystemTime,
|
||||
|
@ -2,7 +2,6 @@ use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use git2::{BranchType, StatusShow};
|
||||
use parking_lot::Mutex;
|
||||
use rpc::proto;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
@ -23,6 +22,7 @@ pub struct Branch {
|
||||
/// Timestamp of most recent commit, normalized to Unix Epoch format.
|
||||
pub unix_timestamp: Option<i64>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait GitRepository: Send {
|
||||
fn reload_index(&self);
|
||||
@ -358,24 +358,6 @@ impl GitFileStatus {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_proto(git_status: Option<i32>) -> Option<GitFileStatus> {
|
||||
git_status.and_then(|status| {
|
||||
proto::GitStatus::from_i32(status).map(|status| match status {
|
||||
proto::GitStatus::Added => GitFileStatus::Added,
|
||||
proto::GitStatus::Modified => GitFileStatus::Modified,
|
||||
proto::GitStatus::Conflict => GitFileStatus::Conflict,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_proto(self) -> i32 {
|
||||
match self {
|
||||
GitFileStatus::Added => proto::GitStatus::Added as i32,
|
||||
GitFileStatus::Modified => proto::GitStatus::Modified as i32,
|
||||
GitFileStatus::Conflict => proto::GitStatus::Conflict as i32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
|
||||
|
@ -441,7 +441,7 @@ mod tests {
|
||||
score,
|
||||
worktree_id: 0,
|
||||
positions: Vec::new(),
|
||||
path: candidate.path.clone(),
|
||||
path: Arc::from(candidate.path),
|
||||
path_prefix: "".into(),
|
||||
distance_to_relative_ancestor: usize::MAX,
|
||||
},
|
||||
|
@ -14,7 +14,7 @@ use crate::{
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PathMatchCandidate<'a> {
|
||||
pub path: &'a Arc<Path>,
|
||||
pub path: &'a Path,
|
||||
pub char_bag: CharBag,
|
||||
}
|
||||
|
||||
@ -120,7 +120,7 @@ pub fn match_fixed_path_set(
|
||||
score,
|
||||
worktree_id,
|
||||
positions: Vec::new(),
|
||||
path: candidate.path.clone(),
|
||||
path: Arc::from(candidate.path),
|
||||
path_prefix: Arc::from(""),
|
||||
distance_to_relative_ancestor: usize::MAX,
|
||||
},
|
||||
@ -195,7 +195,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
|
||||
score,
|
||||
worktree_id,
|
||||
positions: Vec::new(),
|
||||
path: candidate.path.clone(),
|
||||
path: Arc::from(candidate.path),
|
||||
path_prefix: candidate_set.prefix(),
|
||||
distance_to_relative_ancestor: relative_to.as_ref().map_or(
|
||||
usize::MAX,
|
||||
|
@ -71,7 +71,7 @@ pub struct Window {
|
||||
pub(crate) hovered_region_ids: Vec<MouseRegionId>,
|
||||
pub(crate) clicked_region_ids: Vec<MouseRegionId>,
|
||||
pub(crate) clicked_region: Option<(MouseRegionId, MouseButton)>,
|
||||
text_layout_cache: TextLayoutCache,
|
||||
text_layout_cache: Arc<TextLayoutCache>,
|
||||
refreshing: bool,
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ impl Window {
|
||||
cursor_regions: Default::default(),
|
||||
mouse_regions: Default::default(),
|
||||
event_handlers: Default::default(),
|
||||
text_layout_cache: TextLayoutCache::new(cx.font_system.clone()),
|
||||
text_layout_cache: Arc::new(TextLayoutCache::new(cx.font_system.clone())),
|
||||
last_mouse_moved_event: None,
|
||||
last_mouse_position: Vector2F::zero(),
|
||||
pressed_buttons: Default::default(),
|
||||
@ -303,7 +303,7 @@ impl<'a> WindowContext<'a> {
|
||||
self.window.refreshing
|
||||
}
|
||||
|
||||
pub fn text_layout_cache(&self) -> &TextLayoutCache {
|
||||
pub fn text_layout_cache(&self) -> &Arc<TextLayoutCache> {
|
||||
&self.window.text_layout_cache
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,8 @@ use std::{any::Any, cell::Cell, f32::INFINITY, ops::Range, rc::Rc};
|
||||
|
||||
use crate::{
|
||||
json::{self, ToJson, Value},
|
||||
AnyElement, Axis, Element, ElementStateHandle, SizeConstraint, Vector2FExt, ViewContext,
|
||||
AnyElement, Axis, Element, ElementStateHandle, SizeConstraint, TypeTag, Vector2FExt,
|
||||
ViewContext,
|
||||
};
|
||||
use pathfinder_geometry::{
|
||||
rect::RectF,
|
||||
@ -10,10 +11,10 @@ use pathfinder_geometry::{
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Default)]
|
||||
struct ScrollState {
|
||||
scroll_to: Cell<Option<usize>>,
|
||||
scroll_position: Cell<f32>,
|
||||
type_tag: TypeTag,
|
||||
}
|
||||
|
||||
pub struct Flex<V> {
|
||||
@ -66,8 +67,14 @@ impl<V: 'static> Flex<V> {
|
||||
where
|
||||
Tag: 'static,
|
||||
{
|
||||
let scroll_state = cx.default_element_state::<Tag, Rc<ScrollState>>(element_id);
|
||||
scroll_state.read(cx).scroll_to.set(scroll_to);
|
||||
let scroll_state = cx.element_state::<Tag, Rc<ScrollState>>(
|
||||
element_id,
|
||||
Rc::new(ScrollState {
|
||||
scroll_to: Cell::new(scroll_to),
|
||||
scroll_position: Default::default(),
|
||||
type_tag: TypeTag::new::<Tag>(),
|
||||
}),
|
||||
);
|
||||
self.scroll_state = Some((scroll_state, cx.handle().id()));
|
||||
self
|
||||
}
|
||||
@ -276,38 +283,44 @@ impl<V: 'static> Element<V> for Flex<V> {
|
||||
if let Some((scroll_state, id)) = &self.scroll_state {
|
||||
let scroll_state = scroll_state.read(cx).clone();
|
||||
cx.scene().push_mouse_region(
|
||||
crate::MouseRegion::new::<Self>(*id, 0, bounds)
|
||||
.on_scroll({
|
||||
let axis = self.axis;
|
||||
move |e, _: &mut V, cx| {
|
||||
if remaining_space < 0. {
|
||||
let scroll_delta = e.delta.raw();
|
||||
crate::MouseRegion::from_handlers(
|
||||
scroll_state.type_tag,
|
||||
*id,
|
||||
0,
|
||||
bounds,
|
||||
Default::default(),
|
||||
)
|
||||
.on_scroll({
|
||||
let axis = self.axis;
|
||||
move |e, _: &mut V, cx| {
|
||||
if remaining_space < 0. {
|
||||
let scroll_delta = e.delta.raw();
|
||||
|
||||
let mut delta = match axis {
|
||||
Axis::Horizontal => {
|
||||
if scroll_delta.x().abs() >= scroll_delta.y().abs() {
|
||||
scroll_delta.x()
|
||||
} else {
|
||||
scroll_delta.y()
|
||||
}
|
||||
let mut delta = match axis {
|
||||
Axis::Horizontal => {
|
||||
if scroll_delta.x().abs() >= scroll_delta.y().abs() {
|
||||
scroll_delta.x()
|
||||
} else {
|
||||
scroll_delta.y()
|
||||
}
|
||||
Axis::Vertical => scroll_delta.y(),
|
||||
};
|
||||
if !e.delta.precise() {
|
||||
delta *= 20.;
|
||||
}
|
||||
|
||||
scroll_state
|
||||
.scroll_position
|
||||
.set(scroll_state.scroll_position.get() - delta);
|
||||
|
||||
cx.notify();
|
||||
} else {
|
||||
cx.propagate_event();
|
||||
Axis::Vertical => scroll_delta.y(),
|
||||
};
|
||||
if !e.delta.precise() {
|
||||
delta *= 20.;
|
||||
}
|
||||
|
||||
scroll_state
|
||||
.scroll_position
|
||||
.set(scroll_state.scroll_position.get() - delta);
|
||||
|
||||
cx.notify();
|
||||
} else {
|
||||
cx.propagate_event();
|
||||
}
|
||||
})
|
||||
.on_move(|_, _: &mut V, _| { /* Capture move events */ }),
|
||||
}
|
||||
})
|
||||
.on_move(|_, _: &mut V, _| { /* Capture move events */ }),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -140,6 +140,10 @@ unsafe fn build_classes() {
|
||||
sel!(application:openURLs:),
|
||||
open_urls as extern "C" fn(&mut Object, Sel, id, id),
|
||||
);
|
||||
decl.add_method(
|
||||
sel!(application:continueUserActivity:restorationHandler:),
|
||||
continue_user_activity as extern "C" fn(&mut Object, Sel, id, id, id),
|
||||
);
|
||||
decl.register()
|
||||
}
|
||||
}
|
||||
@ -1009,6 +1013,26 @@ extern "C" fn open_urls(this: &mut Object, _: Sel, _: id, urls: id) {
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn continue_user_activity(this: &mut Object, _: Sel, _: id, user_activity: id, _: id) {
|
||||
let url = unsafe {
|
||||
let url: id = msg_send!(user_activity, webpageURL);
|
||||
if url == nil {
|
||||
log::error!("got unexpected user activity");
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
CStr::from_ptr(url.absoluteString().UTF8String())
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
};
|
||||
let platform = unsafe { get_foreground_platform(this) };
|
||||
if let Some(callback) = platform.0.borrow_mut().open_urls.as_mut() {
|
||||
callback(url.into_iter().collect());
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn handle_menu_item(this: &mut Object, _: Sel, item: id) {
|
||||
unsafe {
|
||||
let platform = get_foreground_platform(this);
|
||||
|
@ -5,7 +5,7 @@ use crate::{
|
||||
use anyhow::Result;
|
||||
use gpui::{
|
||||
geometry::{vector::Vector2F, Size},
|
||||
text_layout::LineLayout,
|
||||
text_layout::Line,
|
||||
LayoutId,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
@ -32,7 +32,7 @@ impl<V: 'static> Element<V> for Text {
|
||||
_view: &mut V,
|
||||
cx: &mut ViewContext<V>,
|
||||
) -> Result<(LayoutId, Self::PaintState)> {
|
||||
let fonts = cx.platform().fonts();
|
||||
let layout_cache = cx.text_layout_cache().clone();
|
||||
let text_style = cx.text_style();
|
||||
let line_height = cx.font_cache().line_height(text_style.font_size);
|
||||
let text = self.text.clone();
|
||||
@ -41,14 +41,14 @@ impl<V: 'static> Element<V> for Text {
|
||||
let layout_id = cx.add_measured_layout_node(Default::default(), {
|
||||
let paint_state = paint_state.clone();
|
||||
move |_params| {
|
||||
let line_layout = fonts.layout_line(
|
||||
let line_layout = layout_cache.layout_str(
|
||||
text.as_ref(),
|
||||
text_style.font_size,
|
||||
&[(text.len(), text_style.to_run())],
|
||||
);
|
||||
|
||||
let size = Size {
|
||||
width: line_layout.width,
|
||||
width: line_layout.width(),
|
||||
height: line_height,
|
||||
};
|
||||
|
||||
@ -85,13 +85,9 @@ impl<V: 'static> Element<V> for Text {
|
||||
line_height = paint_state.line_height;
|
||||
}
|
||||
|
||||
let text_style = cx.text_style();
|
||||
let line =
|
||||
gpui::text_layout::Line::new(line_layout, &[(self.text.len(), text_style.to_run())]);
|
||||
|
||||
// TODO: We haven't added visible bounds to the new element system yet, so this is a placeholder.
|
||||
let visible_bounds = bounds;
|
||||
line.paint(bounds.origin(), visible_bounds, line_height, cx.legacy_cx);
|
||||
line_layout.paint(bounds.origin(), visible_bounds, line_height, cx.legacy_cx);
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,6 +100,6 @@ impl<V: 'static> IntoElement<V> for Text {
|
||||
}
|
||||
|
||||
pub struct TextLayout {
|
||||
line_layout: Arc<LineLayout>,
|
||||
line_layout: Arc<Line>,
|
||||
line_height: f32,
|
||||
}
|
||||
|
@ -22,7 +22,6 @@ test-support = [
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
@ -46,6 +45,7 @@ lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
@ -1,11 +1,13 @@
|
||||
pub use crate::{
|
||||
diagnostic_set::DiagnosticSet,
|
||||
highlight_map::{HighlightId, HighlightMap},
|
||||
markdown::ParsedMarkdown,
|
||||
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
|
||||
};
|
||||
use crate::{
|
||||
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
||||
language_settings::{language_settings, LanguageSettings},
|
||||
markdown::parse_markdown,
|
||||
outline::OutlineItem,
|
||||
syntax_map::{
|
||||
SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
|
||||
@ -143,11 +145,51 @@ pub struct Diagnostic {
|
||||
pub is_unnecessary: bool,
|
||||
}
|
||||
|
||||
pub async fn prepare_completion_documentation(
|
||||
documentation: &lsp::Documentation,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
) -> Documentation {
|
||||
match documentation {
|
||||
lsp::Documentation::String(text) => {
|
||||
if text.lines().count() <= 1 {
|
||||
Documentation::SingleLine(text.clone())
|
||||
} else {
|
||||
Documentation::MultiLinePlainText(text.clone())
|
||||
}
|
||||
}
|
||||
|
||||
lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
|
||||
lsp::MarkupKind::PlainText => {
|
||||
if value.lines().count() <= 1 {
|
||||
Documentation::SingleLine(value.clone())
|
||||
} else {
|
||||
Documentation::MultiLinePlainText(value.clone())
|
||||
}
|
||||
}
|
||||
|
||||
lsp::MarkupKind::Markdown => {
|
||||
let parsed = parse_markdown(value, language_registry, language).await;
|
||||
Documentation::MultiLineMarkdown(parsed)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Documentation {
|
||||
Undocumented,
|
||||
SingleLine(String),
|
||||
MultiLinePlainText(String),
|
||||
MultiLineMarkdown(ParsedMarkdown),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Completion {
|
||||
pub old_range: Range<Anchor>,
|
||||
pub new_text: String,
|
||||
pub label: CodeLabel,
|
||||
pub documentation: Option<Documentation>,
|
||||
pub server_id: LanguageServerId,
|
||||
pub lsp_completion: lsp::CompletionItem,
|
||||
}
|
||||
@ -1406,82 +1448,95 @@ impl Buffer {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.start_transaction();
|
||||
self.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
|
||||
// Non-generic part hoisted out to reduce LLVM IR size.
|
||||
fn tail(
|
||||
this: &mut Buffer,
|
||||
edits: Vec<(Range<usize>, Arc<str>)>,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
cx: &mut ModelContext<Buffer>,
|
||||
) -> Option<clock::Lamport> {
|
||||
this.start_transaction();
|
||||
this.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| this.language.as_ref().map(|_| (this.snapshot(), mode)));
|
||||
|
||||
let edit_operation = self.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
let edit_operation = this.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta += new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta +=
|
||||
new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column <= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column =
|
||||
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}));
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column
|
||||
<= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
}
|
||||
}
|
||||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column = Some(
|
||||
original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}),
|
||||
);
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: this
|
||||
.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..this.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
this.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
|
||||
this.end_transaction(cx);
|
||||
this.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
}
|
||||
|
||||
self.end_transaction(cx);
|
||||
self.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
tail(self, edits, autoindent_mode, cx)
|
||||
}
|
||||
|
||||
fn did_edit(
|
||||
|
@ -2,6 +2,7 @@ mod buffer;
|
||||
mod diagnostic_set;
|
||||
mod highlight_map;
|
||||
pub mod language_settings;
|
||||
pub mod markdown;
|
||||
mod outline;
|
||||
pub mod proto;
|
||||
mod syntax_map;
|
||||
@ -110,7 +111,6 @@ pub struct LanguageServerName(pub Arc<str>);
|
||||
pub struct CachedLspAdapter {
|
||||
pub name: LanguageServerName,
|
||||
pub short_name: &'static str,
|
||||
pub initialization_options: Option<Value>,
|
||||
pub disk_based_diagnostic_sources: Vec<String>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub language_ids: HashMap<String, String>,
|
||||
@ -121,7 +121,6 @@ impl CachedLspAdapter {
|
||||
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
|
||||
let name = adapter.name().await;
|
||||
let short_name = adapter.short_name();
|
||||
let initialization_options = adapter.initialization_options().await;
|
||||
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token().await;
|
||||
@ -130,7 +129,6 @@ impl CachedLspAdapter {
|
||||
Arc::new(CachedLspAdapter {
|
||||
name,
|
||||
short_name,
|
||||
initialization_options,
|
||||
disk_based_diagnostic_sources,
|
||||
disk_based_diagnostics_progress_token,
|
||||
language_ids,
|
||||
@ -227,6 +225,10 @@ impl CachedLspAdapter {
|
||||
) -> Option<CodeLabel> {
|
||||
self.adapter.label_for_symbol(name, kind, language).await
|
||||
}
|
||||
|
||||
pub fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
self.adapter.enabled_formatters()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LspAdapterDelegate: Send + Sync {
|
||||
@ -333,6 +335,33 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
async fn language_ids(&self) -> HashMap<String, String> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum BundledFormatter {
|
||||
Prettier {
|
||||
// See https://prettier.io/docs/en/options.html#parser for a list of valid values.
|
||||
// Usually, every language has a single parser (standard or plugin-provided), hence `Some("parser_name")` can be used.
|
||||
// There can not be multiple parsers for a single language, in case of a conflict, we would attempt to select the one with most plugins.
|
||||
//
|
||||
// But exceptions like Tailwind CSS exist, which uses standard parsers for CSS/JS/HTML/etc. but require an extra plugin to be installed.
|
||||
// For those cases, `None` will install the plugin but apply other, regular parser defined for the language, and this would not be a conflict.
|
||||
parser_name: Option<&'static str>,
|
||||
plugin_names: Vec<&'static str>,
|
||||
},
|
||||
}
|
||||
|
||||
impl BundledFormatter {
|
||||
pub fn prettier(parser_name: &'static str) -> Self {
|
||||
Self::Prettier {
|
||||
parser_name: Some(parser_name),
|
||||
plugin_names: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@ -467,6 +496,7 @@ pub struct FakeLspAdapter {
|
||||
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub disk_based_diagnostics_sources: Vec<String>,
|
||||
pub enabled_formatters: Vec<BundledFormatter>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@ -1729,6 +1759,7 @@ impl Default for FakeLspAdapter {
|
||||
disk_based_diagnostics_progress_token: None,
|
||||
initialization_options: None,
|
||||
disk_based_diagnostics_sources: Vec::new(),
|
||||
enabled_formatters: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1785,6 +1816,10 @@ impl LspAdapter for Arc<FakeLspAdapter> {
|
||||
async fn initialization_options(&self) -> Option<Value> {
|
||||
self.initialization_options.clone()
|
||||
}
|
||||
|
||||
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
self.enabled_formatters.clone()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
|
||||
|
@ -50,6 +50,7 @@ pub struct LanguageSettings {
|
||||
pub remove_trailing_whitespace_on_save: bool,
|
||||
pub ensure_final_newline_on_save: bool,
|
||||
pub formatter: Formatter,
|
||||
pub prettier: HashMap<String, serde_json::Value>,
|
||||
pub enable_language_server: bool,
|
||||
pub show_copilot_suggestions: bool,
|
||||
pub show_whitespaces: ShowWhitespaceSetting,
|
||||
@ -98,6 +99,8 @@ pub struct LanguageSettingsContent {
|
||||
#[serde(default)]
|
||||
pub formatter: Option<Formatter>,
|
||||
#[serde(default)]
|
||||
pub prettier: Option<HashMap<String, serde_json::Value>>,
|
||||
#[serde(default)]
|
||||
pub enable_language_server: Option<bool>,
|
||||
#[serde(default)]
|
||||
pub show_copilot_suggestions: Option<bool>,
|
||||
@ -149,10 +152,13 @@ pub enum ShowWhitespaceSetting {
|
||||
All,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Formatter {
|
||||
#[default]
|
||||
Auto,
|
||||
LanguageServer,
|
||||
Prettier,
|
||||
External {
|
||||
command: Arc<str>,
|
||||
arguments: Arc<[String]>,
|
||||
@ -392,6 +398,7 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
|
||||
src.preferred_line_length,
|
||||
);
|
||||
merge(&mut settings.formatter, src.formatter.clone());
|
||||
merge(&mut settings.prettier, src.prettier.clone());
|
||||
merge(&mut settings.format_on_save, src.format_on_save.clone());
|
||||
merge(
|
||||
&mut settings.remove_trailing_whitespace_on_save,
|
||||
|
301
crates/language/src/markdown.rs
Normal file
301
crates/language/src/markdown.rs
Normal file
@ -0,0 +1,301 @@
|
||||
use std::sync::Arc;
|
||||
use std::{ops::Range, path::PathBuf};
|
||||
|
||||
use crate::{HighlightId, Language, LanguageRegistry};
|
||||
use gpui::fonts::{self, HighlightStyle, Weight};
|
||||
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParsedMarkdown {
|
||||
pub text: String,
|
||||
pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
pub region_ranges: Vec<Range<usize>>,
|
||||
pub regions: Vec<ParsedRegion>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum MarkdownHighlight {
|
||||
Style(MarkdownHighlightStyle),
|
||||
Code(HighlightId),
|
||||
}
|
||||
|
||||
impl MarkdownHighlight {
|
||||
pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option<HighlightStyle> {
|
||||
match self {
|
||||
MarkdownHighlight::Style(style) => {
|
||||
let mut highlight = HighlightStyle::default();
|
||||
|
||||
if style.italic {
|
||||
highlight.italic = Some(true);
|
||||
}
|
||||
|
||||
if style.underline {
|
||||
highlight.underline = Some(fonts::Underline {
|
||||
thickness: 1.0.into(),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if style.weight != fonts::Weight::default() {
|
||||
highlight.weight = Some(style.weight);
|
||||
}
|
||||
|
||||
Some(highlight)
|
||||
}
|
||||
|
||||
MarkdownHighlight::Code(id) => id.style(theme),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub struct MarkdownHighlightStyle {
|
||||
pub italic: bool,
|
||||
pub underline: bool,
|
||||
pub weight: Weight,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParsedRegion {
|
||||
pub code: bool,
|
||||
pub link: Option<Link>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Link {
|
||||
Web { url: String },
|
||||
Path { path: PathBuf },
|
||||
}
|
||||
|
||||
impl Link {
|
||||
fn identify(text: String) -> Option<Link> {
|
||||
if text.starts_with("http") {
|
||||
return Some(Link::Web { url: text });
|
||||
}
|
||||
|
||||
let path = PathBuf::from(text);
|
||||
if path.is_absolute() {
|
||||
return Some(Link::Path { path });
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_markdown(
|
||||
markdown: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
) -> ParsedMarkdown {
|
||||
let mut text = String::new();
|
||||
let mut highlights = Vec::new();
|
||||
let mut region_ranges = Vec::new();
|
||||
let mut regions = Vec::new();
|
||||
|
||||
parse_markdown_block(
|
||||
markdown,
|
||||
language_registry,
|
||||
language,
|
||||
&mut text,
|
||||
&mut highlights,
|
||||
&mut region_ranges,
|
||||
&mut regions,
|
||||
)
|
||||
.await;
|
||||
|
||||
ParsedMarkdown {
|
||||
text,
|
||||
highlights,
|
||||
region_ranges,
|
||||
regions,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_markdown_block(
|
||||
markdown: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
text: &mut String,
|
||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
region_ranges: &mut Vec<Range<usize>>,
|
||||
regions: &mut Vec<ParsedRegion>,
|
||||
) {
|
||||
let mut bold_depth = 0;
|
||||
let mut italic_depth = 0;
|
||||
let mut link_url = None;
|
||||
let mut current_language = None;
|
||||
let mut list_stack = Vec::new();
|
||||
|
||||
for event in Parser::new_ext(&markdown, Options::all()) {
|
||||
let prev_len = text.len();
|
||||
match event {
|
||||
Event::Text(t) => {
|
||||
if let Some(language) = ¤t_language {
|
||||
highlight_code(text, highlights, t.as_ref(), language);
|
||||
} else {
|
||||
text.push_str(t.as_ref());
|
||||
|
||||
let mut style = MarkdownHighlightStyle::default();
|
||||
|
||||
if bold_depth > 0 {
|
||||
style.weight = Weight::BOLD;
|
||||
}
|
||||
|
||||
if italic_depth > 0 {
|
||||
style.italic = true;
|
||||
}
|
||||
|
||||
if let Some(link) = link_url.clone().and_then(|u| Link::identify(u)) {
|
||||
region_ranges.push(prev_len..text.len());
|
||||
regions.push(ParsedRegion {
|
||||
code: false,
|
||||
link: Some(link),
|
||||
});
|
||||
style.underline = true;
|
||||
}
|
||||
|
||||
if style != MarkdownHighlightStyle::default() {
|
||||
let mut new_highlight = true;
|
||||
if let Some((last_range, MarkdownHighlight::Style(last_style))) =
|
||||
highlights.last_mut()
|
||||
{
|
||||
if last_range.end == prev_len && last_style == &style {
|
||||
last_range.end = text.len();
|
||||
new_highlight = false;
|
||||
}
|
||||
}
|
||||
if new_highlight {
|
||||
let range = prev_len..text.len();
|
||||
highlights.push((range, MarkdownHighlight::Style(style)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Event::Code(t) => {
|
||||
text.push_str(t.as_ref());
|
||||
region_ranges.push(prev_len..text.len());
|
||||
|
||||
let link = link_url.clone().and_then(|u| Link::identify(u));
|
||||
if link.is_some() {
|
||||
highlights.push((
|
||||
prev_len..text.len(),
|
||||
MarkdownHighlight::Style(MarkdownHighlightStyle {
|
||||
underline: true,
|
||||
..Default::default()
|
||||
}),
|
||||
));
|
||||
}
|
||||
regions.push(ParsedRegion { code: true, link });
|
||||
}
|
||||
|
||||
Event::Start(tag) => match tag {
|
||||
Tag::Paragraph => new_paragraph(text, &mut list_stack),
|
||||
|
||||
Tag::Heading(_, _, _) => {
|
||||
new_paragraph(text, &mut list_stack);
|
||||
bold_depth += 1;
|
||||
}
|
||||
|
||||
Tag::CodeBlock(kind) => {
|
||||
new_paragraph(text, &mut list_stack);
|
||||
current_language = if let CodeBlockKind::Fenced(language) = kind {
|
||||
language_registry
|
||||
.language_for_name(language.as_ref())
|
||||
.await
|
||||
.ok()
|
||||
} else {
|
||||
language.clone()
|
||||
}
|
||||
}
|
||||
|
||||
Tag::Emphasis => italic_depth += 1,
|
||||
|
||||
Tag::Strong => bold_depth += 1,
|
||||
|
||||
Tag::Link(_, url, _) => link_url = Some(url.to_string()),
|
||||
|
||||
Tag::List(number) => {
|
||||
list_stack.push((number, false));
|
||||
}
|
||||
|
||||
Tag::Item => {
|
||||
let len = list_stack.len();
|
||||
if let Some((list_number, has_content)) = list_stack.last_mut() {
|
||||
*has_content = false;
|
||||
if !text.is_empty() && !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
for _ in 0..len - 1 {
|
||||
text.push_str(" ");
|
||||
}
|
||||
if let Some(number) = list_number {
|
||||
text.push_str(&format!("{}. ", number));
|
||||
*number += 1;
|
||||
*has_content = false;
|
||||
} else {
|
||||
text.push_str("- ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
},
|
||||
|
||||
Event::End(tag) => match tag {
|
||||
Tag::Heading(_, _, _) => bold_depth -= 1,
|
||||
Tag::CodeBlock(_) => current_language = None,
|
||||
Tag::Emphasis => italic_depth -= 1,
|
||||
Tag::Strong => bold_depth -= 1,
|
||||
Tag::Link(_, _, _) => link_url = None,
|
||||
Tag::List(_) => drop(list_stack.pop()),
|
||||
_ => {}
|
||||
},
|
||||
|
||||
Event::HardBreak => text.push('\n'),
|
||||
|
||||
Event::SoftBreak => text.push(' '),
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlight_code(
|
||||
text: &mut String,
|
||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
content: &str,
|
||||
language: &Arc<Language>,
|
||||
) {
|
||||
let prev_len = text.len();
|
||||
text.push_str(content);
|
||||
for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
|
||||
let highlight = MarkdownHighlight::Code(highlight_id);
|
||||
highlights.push((prev_len + range.start..prev_len + range.end, highlight));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
|
||||
let mut is_subsequent_paragraph_of_list = false;
|
||||
if let Some((_, has_content)) = list_stack.last_mut() {
|
||||
if *has_content {
|
||||
is_subsequent_paragraph_of_list = true;
|
||||
} else {
|
||||
*has_content = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
text.push('\n');
|
||||
}
|
||||
for _ in 0..list_stack.len().saturating_sub(1) {
|
||||
text.push_str(" ");
|
||||
}
|
||||
if is_subsequent_paragraph_of_list {
|
||||
text.push_str(" ");
|
||||
}
|
||||
}
|
@ -482,6 +482,7 @@ pub async fn deserialize_completion(
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
documentation: None,
|
||||
server_id: LanguageServerId(completion.server_id as usize),
|
||||
lsp_completion,
|
||||
})
|
||||
|
@ -91,9 +91,8 @@ impl TestServer {
|
||||
let identity = claims.sub.unwrap().to_string();
|
||||
let room_name = claims.video.room.unwrap();
|
||||
let mut server_rooms = self.rooms.lock();
|
||||
let room = server_rooms
|
||||
.get_mut(&*room_name)
|
||||
.ok_or_else(|| anyhow!("room {:?} does not exist", room_name))?;
|
||||
let room = (*server_rooms).entry(room_name.to_string()).or_default();
|
||||
|
||||
if room.client_rooms.contains_key(&identity) {
|
||||
Err(anyhow!(
|
||||
"{:?} attempted to join room {:?} twice",
|
||||
|
@ -466,7 +466,10 @@ impl LanguageServer {
|
||||
completion_item: Some(CompletionItemCapability {
|
||||
snippet_support: Some(true),
|
||||
resolve_support: Some(CompletionItemCapabilityResolveSupport {
|
||||
properties: vec!["additionalTextEdits".to_string()],
|
||||
properties: vec![
|
||||
"documentation".to_string(),
|
||||
"additionalTextEdits".to_string(),
|
||||
],
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
@ -748,6 +751,15 @@ impl LanguageServer {
|
||||
)
|
||||
}
|
||||
|
||||
// some child of string literal (be it "" or ``) which is the child of an attribute
|
||||
|
||||
// <Foo className="bar" />
|
||||
// <Foo className={`bar`} />
|
||||
// <Foo className={something + "bar"} />
|
||||
// <Foo className={something + "bar"} />
|
||||
// const classes = "awesome ";
|
||||
// <Foo className={classes} />
|
||||
|
||||
fn request_internal<T: request::Request>(
|
||||
next_id: &AtomicUsize,
|
||||
response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
|
||||
|
@ -220,29 +220,129 @@ impl NodeRuntime for RealNodeRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeNodeRuntime;
|
||||
pub struct FakeNodeRuntime(Option<PrettierSupport>);
|
||||
|
||||
struct PrettierSupport {
|
||||
plugins: Vec<&'static str>,
|
||||
}
|
||||
|
||||
impl FakeNodeRuntime {
|
||||
pub fn new() -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(FakeNodeRuntime)
|
||||
Arc::new(FakeNodeRuntime(None))
|
||||
}
|
||||
|
||||
pub fn with_prettier_support(plugins: &[&'static str]) -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(FakeNodeRuntime(Some(PrettierSupport::new(plugins))))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for FakeNodeRuntime {
|
||||
async fn binary_path(&self) -> Result<PathBuf> {
|
||||
unreachable!()
|
||||
async fn binary_path(&self) -> anyhow::Result<PathBuf> {
|
||||
if let Some(prettier_support) = &self.0 {
|
||||
prettier_support.binary_path().await
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> anyhow::Result<Output> {
|
||||
if let Some(prettier_support) = &self.0 {
|
||||
prettier_support
|
||||
.run_npm_subcommand(directory, subcommand, args)
|
||||
.await
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, name: &str) -> anyhow::Result<String> {
|
||||
if let Some(prettier_support) = &self.0 {
|
||||
prettier_support.npm_package_latest_version(name).await
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
directory: &Path,
|
||||
packages: &[(&str, &str)],
|
||||
) -> anyhow::Result<()> {
|
||||
if let Some(prettier_support) = &self.0 {
|
||||
prettier_support
|
||||
.npm_install_packages(directory, packages)
|
||||
.await
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettierSupport {
|
||||
const PACKAGE_VERSION: &str = "0.0.1";
|
||||
|
||||
fn new(plugins: &[&'static str]) -> Self {
|
||||
Self {
|
||||
plugins: plugins.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for PrettierSupport {
|
||||
async fn binary_path(&self) -> anyhow::Result<PathBuf> {
|
||||
Ok(PathBuf::from("prettier_fake_node"))
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(&self, _: Option<&Path>, _: &str, _: &[&str]) -> Result<Output> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, _: &str) -> Result<String> {
|
||||
unreachable!()
|
||||
async fn npm_package_latest_version(&self, name: &str) -> anyhow::Result<String> {
|
||||
if name == "prettier" || self.plugins.contains(&name) {
|
||||
Ok(Self::PACKAGE_VERSION.to_string())
|
||||
} else {
|
||||
panic!("Unexpected package name: {name}")
|
||||
}
|
||||
}
|
||||
|
||||
async fn npm_install_packages(&self, _: &Path, _: &[(&str, &str)]) -> Result<()> {
|
||||
unreachable!()
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
_: &Path,
|
||||
packages: &[(&str, &str)],
|
||||
) -> anyhow::Result<()> {
|
||||
assert_eq!(
|
||||
packages.len(),
|
||||
self.plugins.len() + 1,
|
||||
"Unexpected packages length to install: {:?}, expected `prettier` + {:?}",
|
||||
packages,
|
||||
self.plugins
|
||||
);
|
||||
for (name, version) in packages {
|
||||
assert!(
|
||||
name == &"prettier" || self.plugins.contains(name),
|
||||
"Unexpected package `{}` to install in packages {:?}, expected {} for `prettier` + {:?}",
|
||||
name,
|
||||
packages,
|
||||
Self::PACKAGE_VERSION,
|
||||
self.plugins
|
||||
);
|
||||
assert_eq!(
|
||||
version,
|
||||
&Self::PACKAGE_VERSION,
|
||||
"Unexpected package version `{}` to install in packages {:?}, expected {} for `prettier` + {:?}",
|
||||
version,
|
||||
packages,
|
||||
Self::PACKAGE_VERSION,
|
||||
self.plugins
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
34
crates/prettier/Cargo.toml
Normal file
34
crates/prettier/Cargo.toml
Normal file
@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "prettier"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/prettier.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections"}
|
||||
language = { path = "../language" }
|
||||
gpui = { path = "../gpui" }
|
||||
fs = { path = "../fs" }
|
||||
lsp = { path = "../lsp" }
|
||||
node_runtime = { path = "../node_runtime"}
|
||||
util = { path = "../util" }
|
||||
|
||||
log.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
fs = { path = "../fs", features = ["test-support"] }
|
513
crates/prettier/src/prettier.rs
Normal file
513
crates/prettier/src/prettier.rs
Normal file
@ -0,0 +1,513 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::Fs;
|
||||
use gpui::{AsyncAppContext, ModelHandle};
|
||||
use language::language_settings::language_settings;
|
||||
use language::{Buffer, BundledFormatter, Diff};
|
||||
use lsp::{LanguageServer, LanguageServerId};
|
||||
use node_runtime::NodeRuntime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::paths::DEFAULT_PRETTIER_DIR;
|
||||
|
||||
pub enum Prettier {
|
||||
Real(RealPrettier),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Test(TestPrettier),
|
||||
}
|
||||
|
||||
pub struct RealPrettier {
|
||||
worktree_id: Option<usize>,
|
||||
default: bool,
|
||||
prettier_dir: PathBuf,
|
||||
server: Arc<LanguageServer>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct TestPrettier {
|
||||
worktree_id: Option<usize>,
|
||||
prettier_dir: PathBuf,
|
||||
default: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LocateStart {
|
||||
pub worktree_root_path: Arc<Path>,
|
||||
pub starting_path: Arc<Path>,
|
||||
}
|
||||
|
||||
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
|
||||
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
|
||||
const PRETTIER_PACKAGE_NAME: &str = "prettier";
|
||||
const TAILWIND_PRETTIER_PLUGIN_PACKAGE_NAME: &str = "prettier-plugin-tailwindcss";
|
||||
|
||||
impl Prettier {
|
||||
pub const CONFIG_FILE_NAMES: &'static [&'static str] = &[
|
||||
".prettierrc",
|
||||
".prettierrc.json",
|
||||
".prettierrc.json5",
|
||||
".prettierrc.yaml",
|
||||
".prettierrc.yml",
|
||||
".prettierrc.toml",
|
||||
".prettierrc.js",
|
||||
".prettierrc.cjs",
|
||||
"package.json",
|
||||
"prettier.config.js",
|
||||
"prettier.config.cjs",
|
||||
".editorconfig",
|
||||
];
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub const FORMAT_SUFFIX: &str = "\nformatted by test prettier";
|
||||
|
||||
pub async fn locate(
|
||||
starting_path: Option<LocateStart>,
|
||||
fs: Arc<dyn Fs>,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
let paths_to_check = match starting_path.as_ref() {
|
||||
Some(starting_path) => {
|
||||
let worktree_root = starting_path
|
||||
.worktree_root_path
|
||||
.components()
|
||||
.into_iter()
|
||||
.take_while(|path_component| {
|
||||
path_component.as_os_str().to_string_lossy() != "node_modules"
|
||||
})
|
||||
.collect::<PathBuf>();
|
||||
|
||||
if worktree_root != starting_path.worktree_root_path.as_ref() {
|
||||
vec![worktree_root]
|
||||
} else {
|
||||
let (worktree_root_metadata, start_path_metadata) = if starting_path
|
||||
.starting_path
|
||||
.as_ref()
|
||||
== Path::new("")
|
||||
{
|
||||
let worktree_root_data =
|
||||
fs.metadata(&worktree_root).await.with_context(|| {
|
||||
format!(
|
||||
"FS metadata fetch for worktree root path {worktree_root:?}",
|
||||
)
|
||||
})?;
|
||||
(worktree_root_data.unwrap_or_else(|| {
|
||||
panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
|
||||
}), None)
|
||||
} else {
|
||||
let full_starting_path = worktree_root.join(&starting_path.starting_path);
|
||||
let (worktree_root_data, start_path_data) = futures::try_join!(
|
||||
fs.metadata(&worktree_root),
|
||||
fs.metadata(&full_starting_path),
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("FS metadata fetch for starting path {full_starting_path:?}",)
|
||||
})?;
|
||||
(
|
||||
worktree_root_data.unwrap_or_else(|| {
|
||||
panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
|
||||
}),
|
||||
start_path_data,
|
||||
)
|
||||
};
|
||||
|
||||
match start_path_metadata {
|
||||
Some(start_path_metadata) => {
|
||||
anyhow::ensure!(worktree_root_metadata.is_dir,
|
||||
"For non-empty start path, worktree root {starting_path:?} should be a directory");
|
||||
anyhow::ensure!(
|
||||
!start_path_metadata.is_dir,
|
||||
"For non-empty start path, it should not be a directory {starting_path:?}"
|
||||
);
|
||||
anyhow::ensure!(
|
||||
!start_path_metadata.is_symlink,
|
||||
"For non-empty start path, it should not be a symlink {starting_path:?}"
|
||||
);
|
||||
|
||||
let file_to_format = starting_path.starting_path.as_ref();
|
||||
let mut paths_to_check = VecDeque::from(vec![worktree_root.clone()]);
|
||||
let mut current_path = worktree_root;
|
||||
for path_component in file_to_format.components().into_iter() {
|
||||
current_path = current_path.join(path_component);
|
||||
paths_to_check.push_front(current_path.clone());
|
||||
if path_component.as_os_str().to_string_lossy() == "node_modules" {
|
||||
break;
|
||||
}
|
||||
}
|
||||
paths_to_check.pop_front(); // last one is the file itself or node_modules, skip it
|
||||
Vec::from(paths_to_check)
|
||||
}
|
||||
None => {
|
||||
anyhow::ensure!(
|
||||
!worktree_root_metadata.is_dir,
|
||||
"For empty start path, worktree root should not be a directory {starting_path:?}"
|
||||
);
|
||||
anyhow::ensure!(
|
||||
!worktree_root_metadata.is_symlink,
|
||||
"For empty start path, worktree root should not be a symlink {starting_path:?}"
|
||||
);
|
||||
worktree_root
|
||||
.parent()
|
||||
.map(|path| vec![path.to_path_buf()])
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
match find_closest_prettier_dir(paths_to_check, fs.as_ref())
|
||||
.await
|
||||
.with_context(|| format!("finding prettier starting with {starting_path:?}"))?
|
||||
{
|
||||
Some(prettier_dir) => Ok(prettier_dir),
|
||||
None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn start(
|
||||
worktree_id: Option<usize>,
|
||||
_: LanguageServerId,
|
||||
prettier_dir: PathBuf,
|
||||
_: Arc<dyn NodeRuntime>,
|
||||
_: AsyncAppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Ok(
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(TestPrettier {
|
||||
worktree_id,
|
||||
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
|
||||
prettier_dir,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
pub async fn start(
|
||||
worktree_id: Option<usize>,
|
||||
server_id: LanguageServerId,
|
||||
prettier_dir: PathBuf,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
cx: AsyncAppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
use lsp::LanguageServerBinary;
|
||||
|
||||
let backgroud = cx.background();
|
||||
anyhow::ensure!(
|
||||
prettier_dir.is_dir(),
|
||||
"Prettier dir {prettier_dir:?} is not a directory"
|
||||
);
|
||||
let prettier_server = DEFAULT_PRETTIER_DIR.join(PRETTIER_SERVER_FILE);
|
||||
anyhow::ensure!(
|
||||
prettier_server.is_file(),
|
||||
"no prettier server package found at {prettier_server:?}"
|
||||
);
|
||||
|
||||
let node_path = backgroud
|
||||
.spawn(async move { node.binary_path().await })
|
||||
.await?;
|
||||
let server = LanguageServer::new(
|
||||
server_id,
|
||||
LanguageServerBinary {
|
||||
path: node_path,
|
||||
arguments: vec![prettier_server.into(), prettier_dir.as_path().into()],
|
||||
},
|
||||
Path::new("/"),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.context("prettier server creation")?;
|
||||
let server = backgroud
|
||||
.spawn(server.initialize(None))
|
||||
.await
|
||||
.context("prettier server initialization")?;
|
||||
Ok(Self::Real(RealPrettier {
|
||||
worktree_id,
|
||||
server,
|
||||
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
|
||||
prettier_dir,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn format(
|
||||
&self,
|
||||
buffer: &ModelHandle<Buffer>,
|
||||
buffer_path: Option<PathBuf>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> anyhow::Result<Diff> {
|
||||
match self {
|
||||
Self::Real(local) => {
|
||||
let params = buffer.read_with(cx, |buffer, cx| {
|
||||
let buffer_language = buffer.language();
|
||||
let parsers_with_plugins = buffer_language
|
||||
.into_iter()
|
||||
.flat_map(|language| {
|
||||
language
|
||||
.lsp_adapters()
|
||||
.iter()
|
||||
.flat_map(|adapter| adapter.enabled_formatters())
|
||||
.filter_map(|formatter| match formatter {
|
||||
BundledFormatter::Prettier {
|
||||
parser_name,
|
||||
plugin_names,
|
||||
} => Some((parser_name, plugin_names)),
|
||||
})
|
||||
})
|
||||
.fold(
|
||||
HashMap::default(),
|
||||
|mut parsers_with_plugins, (parser_name, plugins)| {
|
||||
match parser_name {
|
||||
Some(parser_name) => parsers_with_plugins
|
||||
.entry(parser_name)
|
||||
.or_insert_with(HashSet::default)
|
||||
.extend(plugins),
|
||||
None => parsers_with_plugins.values_mut().for_each(|existing_plugins| {
|
||||
existing_plugins.extend(plugins.iter());
|
||||
}),
|
||||
}
|
||||
parsers_with_plugins
|
||||
},
|
||||
);
|
||||
|
||||
let selected_parser_with_plugins = parsers_with_plugins.iter().max_by_key(|(_, plugins)| plugins.len());
|
||||
if parsers_with_plugins.len() > 1 {
|
||||
log::warn!("Found multiple parsers with plugins {parsers_with_plugins:?}, will select only one: {selected_parser_with_plugins:?}");
|
||||
}
|
||||
|
||||
let prettier_node_modules = self.prettier_dir().join("node_modules");
|
||||
anyhow::ensure!(prettier_node_modules.is_dir(), "Prettier node_modules dir does not exist: {prettier_node_modules:?}");
|
||||
let plugin_name_into_path = |plugin_name: &str| {
|
||||
let prettier_plugin_dir = prettier_node_modules.join(plugin_name);
|
||||
for possible_plugin_path in [
|
||||
prettier_plugin_dir.join("dist").join("index.mjs"),
|
||||
prettier_plugin_dir.join("dist").join("index.js"),
|
||||
prettier_plugin_dir.join("dist").join("plugin.js"),
|
||||
prettier_plugin_dir.join("index.mjs"),
|
||||
prettier_plugin_dir.join("index.js"),
|
||||
prettier_plugin_dir.join("plugin.js"),
|
||||
prettier_plugin_dir,
|
||||
] {
|
||||
if possible_plugin_path.is_file() {
|
||||
return Some(possible_plugin_path);
|
||||
}
|
||||
}
|
||||
None
|
||||
};
|
||||
let (parser, located_plugins) = match selected_parser_with_plugins {
|
||||
Some((parser, plugins)) => {
|
||||
// Tailwind plugin requires being added last
|
||||
// https://github.com/tailwindlabs/prettier-plugin-tailwindcss#compatibility-with-other-prettier-plugins
|
||||
let mut add_tailwind_back = false;
|
||||
|
||||
let mut plugins = plugins.into_iter().filter(|&&plugin_name| {
|
||||
if plugin_name == TAILWIND_PRETTIER_PLUGIN_PACKAGE_NAME {
|
||||
add_tailwind_back = true;
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}).map(|plugin_name| (plugin_name, plugin_name_into_path(plugin_name))).collect::<Vec<_>>();
|
||||
if add_tailwind_back {
|
||||
plugins.push((&TAILWIND_PRETTIER_PLUGIN_PACKAGE_NAME, plugin_name_into_path(TAILWIND_PRETTIER_PLUGIN_PACKAGE_NAME)));
|
||||
}
|
||||
(Some(parser.to_string()), plugins)
|
||||
},
|
||||
None => (None, Vec::new()),
|
||||
};
|
||||
|
||||
let prettier_options = if self.is_default() {
|
||||
let language_settings = language_settings(buffer_language, buffer.file(), cx);
|
||||
let mut options = language_settings.prettier.clone();
|
||||
if !options.contains_key("tabWidth") {
|
||||
options.insert(
|
||||
"tabWidth".to_string(),
|
||||
serde_json::Value::Number(serde_json::Number::from(
|
||||
language_settings.tab_size.get(),
|
||||
)),
|
||||
);
|
||||
}
|
||||
if !options.contains_key("printWidth") {
|
||||
options.insert(
|
||||
"printWidth".to_string(),
|
||||
serde_json::Value::Number(serde_json::Number::from(
|
||||
language_settings.preferred_line_length,
|
||||
)),
|
||||
);
|
||||
}
|
||||
Some(options)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let plugins = located_plugins.into_iter().filter_map(|(plugin_name, located_plugin_path)| {
|
||||
match located_plugin_path {
|
||||
Some(path) => Some(path),
|
||||
None => {
|
||||
log::error!("Have not found plugin path for {plugin_name:?} inside {prettier_node_modules:?}");
|
||||
None},
|
||||
}
|
||||
}).collect();
|
||||
log::debug!("Formatting file {:?} with prettier, plugins :{plugins:?}, options: {prettier_options:?}", buffer.file().map(|f| f.full_path(cx)));
|
||||
|
||||
anyhow::Ok(FormatParams {
|
||||
text: buffer.text(),
|
||||
options: FormatOptions {
|
||||
parser,
|
||||
plugins,
|
||||
path: buffer_path,
|
||||
prettier_options,
|
||||
},
|
||||
})
|
||||
}).context("prettier params calculation")?;
|
||||
let response = local
|
||||
.server
|
||||
.request::<Format>(params)
|
||||
.await
|
||||
.context("prettier format request")?;
|
||||
let diff_task = buffer.read_with(cx, |buffer, cx| buffer.diff(response.text, cx));
|
||||
Ok(diff_task.await)
|
||||
}
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(_) => Ok(buffer
|
||||
.read_with(cx, |buffer, cx| {
|
||||
let formatted_text = buffer.text() + Self::FORMAT_SUFFIX;
|
||||
buffer.diff(formatted_text, cx)
|
||||
})
|
||||
.await),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn clear_cache(&self) -> anyhow::Result<()> {
|
||||
match self {
|
||||
Self::Real(local) => local
|
||||
.server
|
||||
.request::<ClearCache>(())
|
||||
.await
|
||||
.context("prettier clear cache"),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn server(&self) -> Option<&Arc<LanguageServer>> {
|
||||
match self {
|
||||
Self::Real(local) => Some(&local.server),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_default(&self) -> bool {
|
||||
match self {
|
||||
Self::Real(local) => local.default,
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(test_prettier) => test_prettier.default,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prettier_dir(&self) -> &Path {
|
||||
match self {
|
||||
Self::Real(local) => &local.prettier_dir,
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(test_prettier) => &test_prettier.prettier_dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn worktree_id(&self) -> Option<usize> {
|
||||
match self {
|
||||
Self::Real(local) => local.worktree_id,
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Self::Test(test_prettier) => test_prettier.worktree_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn find_closest_prettier_dir(
|
||||
paths_to_check: Vec<PathBuf>,
|
||||
fs: &dyn Fs,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
for path in paths_to_check {
|
||||
let possible_package_json = path.join("package.json");
|
||||
if let Some(package_json_metadata) = fs
|
||||
.metadata(&possible_package_json)
|
||||
.await
|
||||
.with_context(|| format!("Fetching metadata for {possible_package_json:?}"))?
|
||||
{
|
||||
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
|
||||
let package_json_contents = fs
|
||||
.load(&possible_package_json)
|
||||
.await
|
||||
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
|
||||
if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
|
||||
&package_json_contents,
|
||||
) {
|
||||
if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
|
||||
if o.contains_key(PRETTIER_PACKAGE_NAME) {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
}
|
||||
if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
|
||||
{
|
||||
if o.contains_key(PRETTIER_PACKAGE_NAME) {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
|
||||
if let Some(node_modules_location_metadata) = fs
|
||||
.metadata(&possible_node_modules_location)
|
||||
.await
|
||||
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
|
||||
{
|
||||
if node_modules_location_metadata.is_dir {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
enum Format {}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct FormatParams {
|
||||
text: String,
|
||||
options: FormatOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct FormatOptions {
|
||||
plugins: Vec<PathBuf>,
|
||||
parser: Option<String>,
|
||||
#[serde(rename = "filepath")]
|
||||
path: Option<PathBuf>,
|
||||
prettier_options: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct FormatResult {
|
||||
text: String,
|
||||
}
|
||||
|
||||
impl lsp::request::Request for Format {
|
||||
type Params = FormatParams;
|
||||
type Result = FormatResult;
|
||||
const METHOD: &'static str = "prettier/format";
|
||||
}
|
||||
|
||||
enum ClearCache {}
|
||||
|
||||
impl lsp::request::Request for ClearCache {
|
||||
type Params = ();
|
||||
type Result = ();
|
||||
const METHOD: &'static str = "prettier/clear_cache";
|
||||
}
|
217
crates/prettier/src/prettier_server.js
Normal file
217
crates/prettier/src/prettier_server.js
Normal file
@ -0,0 +1,217 @@
|
||||
const { Buffer } = require('buffer');
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { once } = require('events');
|
||||
|
||||
const prettierContainerPath = process.argv[2];
|
||||
if (prettierContainerPath == null || prettierContainerPath.length == 0) {
|
||||
process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
fs.stat(prettierContainerPath, (err, stats) => {
|
||||
if (err) {
|
||||
process.stderr.write(`Path '${prettierContainerPath}' does not exist\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
process.stderr.write(`Path '${prettierContainerPath}' exists but is not a directory\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier');
|
||||
|
||||
class Prettier {
|
||||
constructor(path, prettier, config) {
|
||||
this.path = path;
|
||||
this.prettier = prettier;
|
||||
this.config = config;
|
||||
}
|
||||
}
|
||||
|
||||
(async () => {
|
||||
let prettier;
|
||||
let config;
|
||||
try {
|
||||
prettier = await loadPrettier(prettierPath);
|
||||
config = await prettier.resolveConfig(prettierPath) || {};
|
||||
} catch (e) {
|
||||
process.stderr.write(`Failed to load prettier: ${e}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
|
||||
process.stdin.resume();
|
||||
handleBuffer(new Prettier(prettierPath, prettier, config));
|
||||
})()
|
||||
|
||||
async function handleBuffer(prettier) {
|
||||
for await (const messageText of readStdin()) {
|
||||
let message;
|
||||
try {
|
||||
message = JSON.parse(messageText);
|
||||
} catch (e) {
|
||||
sendResponse(makeError(`Failed to parse message '${messageText}': ${e}`));
|
||||
continue;
|
||||
}
|
||||
// allow concurrent request handling by not `await`ing the message handling promise (async function)
|
||||
handleMessage(message, prettier).catch(e => {
|
||||
sendResponse({ id: message.id, ...makeError(`error during message handling: ${e}`) });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const headerSeparator = "\r\n";
|
||||
const contentLengthHeaderName = 'Content-Length';
|
||||
|
||||
async function* readStdin() {
|
||||
let buffer = Buffer.alloc(0);
|
||||
let streamEnded = false;
|
||||
process.stdin.on('end', () => {
|
||||
streamEnded = true;
|
||||
});
|
||||
process.stdin.on('data', (data) => {
|
||||
buffer = Buffer.concat([buffer, data]);
|
||||
});
|
||||
|
||||
async function handleStreamEnded(errorMessage) {
|
||||
sendResponse(makeError(errorMessage));
|
||||
buffer = Buffer.alloc(0);
|
||||
messageLength = null;
|
||||
await once(process.stdin, 'readable');
|
||||
streamEnded = false;
|
||||
}
|
||||
|
||||
try {
|
||||
let headersLength = null;
|
||||
let messageLength = null;
|
||||
main_loop: while (true) {
|
||||
if (messageLength === null) {
|
||||
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
|
||||
if (streamEnded) {
|
||||
await handleStreamEnded('Unexpected end of stream: headers not found');
|
||||
continue main_loop;
|
||||
} else if (buffer.length > contentLengthHeaderName.length * 10) {
|
||||
await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`);
|
||||
continue main_loop;
|
||||
}
|
||||
await once(process.stdin, 'readable');
|
||||
}
|
||||
const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii');
|
||||
const contentLengthHeader = headers.split(headerSeparator)
|
||||
.map(header => header.split(':'))
|
||||
.filter(header => header[2] === undefined)
|
||||
.filter(header => (header[1] || '').length > 0)
|
||||
.find(header => (header[0] || '').trim() === contentLengthHeaderName);
|
||||
const contentLength = (contentLengthHeader || [])[1];
|
||||
if (contentLength === undefined) {
|
||||
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
|
||||
continue main_loop;
|
||||
}
|
||||
headersLength = headers.length + headerSeparator.length * 2;
|
||||
messageLength = parseInt(contentLength, 10);
|
||||
}
|
||||
|
||||
while (buffer.length < (headersLength + messageLength)) {
|
||||
if (streamEnded) {
|
||||
await handleStreamEnded(
|
||||
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`);
|
||||
continue main_loop;
|
||||
}
|
||||
await once(process.stdin, 'readable');
|
||||
}
|
||||
|
||||
const messageEnd = headersLength + messageLength;
|
||||
const message = buffer.subarray(headersLength, messageEnd);
|
||||
buffer = buffer.subarray(messageEnd);
|
||||
headersLength = null;
|
||||
messageLength = null;
|
||||
yield message.toString('utf8');
|
||||
}
|
||||
} catch (e) {
|
||||
sendResponse(makeError(`Error reading stdin: ${e}`));
|
||||
} finally {
|
||||
process.stdin.off('data', () => { });
|
||||
}
|
||||
}
|
||||
|
||||
async function handleMessage(message, prettier) {
|
||||
const { method, id, params } = message;
|
||||
if (method === undefined) {
|
||||
throw new Error(`Message method is undefined: ${JSON.stringify(message)}`);
|
||||
}
|
||||
if (id === undefined) {
|
||||
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
|
||||
}
|
||||
|
||||
if (method === 'prettier/format') {
|
||||
if (params === undefined || params.text === undefined) {
|
||||
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
|
||||
}
|
||||
if (params.options === undefined) {
|
||||
throw new Error(`Message params.options is undefined: ${JSON.stringify(message)}`);
|
||||
}
|
||||
|
||||
let resolvedConfig = {};
|
||||
if (params.options.filepath !== undefined) {
|
||||
resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {};
|
||||
}
|
||||
|
||||
const options = {
|
||||
...(params.options.prettierOptions || prettier.config),
|
||||
...resolvedConfig,
|
||||
parser: params.options.parser,
|
||||
plugins: params.options.plugins,
|
||||
path: params.options.filepath
|
||||
};
|
||||
process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`);
|
||||
const formattedText = await prettier.prettier.format(params.text, options);
|
||||
sendResponse({ id, result: { text: formattedText } });
|
||||
} else if (method === 'prettier/clear_cache') {
|
||||
prettier.prettier.clearConfigCache();
|
||||
prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {};
|
||||
sendResponse({ id, result: null });
|
||||
} else if (method === 'initialize') {
|
||||
sendResponse({
|
||||
id,
|
||||
result: {
|
||||
"capabilities": {}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
throw new Error(`Unknown method: ${method}`);
|
||||
}
|
||||
}
|
||||
|
||||
function makeError(message) {
|
||||
return {
|
||||
error: {
|
||||
"code": -32600, // invalid request code
|
||||
message,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function sendResponse(response) {
|
||||
const responsePayloadString = JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
...response
|
||||
});
|
||||
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`;
|
||||
process.stdout.write(headers + responsePayloadString);
|
||||
}
|
||||
|
||||
function loadPrettier(prettierPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.access(prettierPath, fs.constants.F_OK, (err) => {
|
||||
if (err) {
|
||||
reject(`Path '${prettierPath}' does not exist.Error: ${err}`);
|
||||
} else {
|
||||
try {
|
||||
resolve(require(prettierPath));
|
||||
} catch (err) {
|
||||
reject(`Error requiring prettier module from path '${prettierPath}'.Error: ${err}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
@ -15,6 +15,7 @@ test-support = [
|
||||
"language/test-support",
|
||||
"settings/test-support",
|
||||
"text/test-support",
|
||||
"prettier/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
@ -31,6 +32,8 @@ git = { path = "../git" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
lsp = { path = "../lsp" }
|
||||
node_runtime = { path = "../node_runtime" }
|
||||
prettier = { path = "../prettier" }
|
||||
rpc = { path = "../rpc" }
|
||||
settings = { path = "../settings" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
@ -73,6 +76,7 @@ gpui = { path = "../gpui", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
lsp = { path = "../lsp", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
prettier = { path = "../prettier", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
git2.workspace = true
|
||||
|
@ -10,7 +10,7 @@ use futures::future;
|
||||
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
||||
use language::{
|
||||
language_settings::{language_settings, InlayHintKind},
|
||||
point_from_lsp, point_to_lsp,
|
||||
point_from_lsp, point_to_lsp, prepare_completion_documentation,
|
||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
|
||||
CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
|
||||
@ -1341,7 +1341,7 @@ impl LspCommand for GetCompletions {
|
||||
async fn response_from_lsp(
|
||||
self,
|
||||
completions: Option<lsp::CompletionResponse>,
|
||||
_: ModelHandle<Project>,
|
||||
project: ModelHandle<Project>,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
cx: AsyncAppContext,
|
||||
@ -1358,10 +1358,11 @@ impl LspCommand for GetCompletions {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Default::default()
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let completions = buffer.read_with(&cx, |buffer, _| {
|
||||
let completions = buffer.read_with(&cx, |buffer, cx| {
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let language = buffer.language().cloned();
|
||||
let snapshot = buffer.snapshot();
|
||||
let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
|
||||
@ -1370,6 +1371,14 @@ impl LspCommand for GetCompletions {
|
||||
completions
|
||||
.into_iter()
|
||||
.filter_map(move |mut lsp_completion| {
|
||||
if let Some(response_list) = &response_list {
|
||||
if let Some(item_defaults) = &response_list.item_defaults {
|
||||
if let Some(data) = &item_defaults.data {
|
||||
lsp_completion.data = Some(data.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
||||
// If the language server provides a range to overwrite, then
|
||||
// check that the range is valid.
|
||||
@ -1445,14 +1454,30 @@ impl LspCommand for GetCompletions {
|
||||
}
|
||||
};
|
||||
|
||||
let language = language.clone();
|
||||
LineEnding::normalize(&mut new_text);
|
||||
let language_registry = language_registry.clone();
|
||||
let language = language.clone();
|
||||
|
||||
Some(async move {
|
||||
let mut label = None;
|
||||
if let Some(language) = language {
|
||||
if let Some(language) = language.as_ref() {
|
||||
language.process_completion(&mut lsp_completion).await;
|
||||
label = language.label_for_completion(&lsp_completion).await;
|
||||
}
|
||||
|
||||
let documentation = if let Some(lsp_docs) = &lsp_completion.documentation {
|
||||
Some(
|
||||
prepare_completion_documentation(
|
||||
lsp_docs,
|
||||
&language_registry,
|
||||
language.clone(),
|
||||
)
|
||||
.await,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Completion {
|
||||
old_range,
|
||||
new_text,
|
||||
@ -1462,6 +1487,7 @@ impl LspCommand for GetCompletions {
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
documentation,
|
||||
server_id,
|
||||
lsp_completion,
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ use futures::{
|
||||
mpsc::{self, UnboundedReceiver},
|
||||
oneshot,
|
||||
},
|
||||
future::{try_join_all, Shared},
|
||||
future::{self, try_join_all, Shared},
|
||||
stream::FuturesUnordered,
|
||||
AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
|
||||
};
|
||||
@ -31,17 +31,19 @@ use gpui::{
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
|
||||
language_settings::{
|
||||
language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings,
|
||||
},
|
||||
point_to_lsp,
|
||||
proto::{
|
||||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||
serialize_anchor, serialize_version, split_operations,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
|
||||
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
|
||||
OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
|
||||
ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, BundledFormatter, CachedLspAdapter,
|
||||
CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
|
||||
Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
|
||||
LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16,
|
||||
TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
};
|
||||
use log::error;
|
||||
use lsp::{
|
||||
@ -49,7 +51,9 @@ use lsp::{
|
||||
DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
|
||||
};
|
||||
use lsp_command::*;
|
||||
use node_runtime::NodeRuntime;
|
||||
use postage::watch;
|
||||
use prettier::{LocateStart, Prettier, PRETTIER_SERVER_FILE, PRETTIER_SERVER_JS};
|
||||
use project_settings::{LspSettings, ProjectSettings};
|
||||
use rand::prelude::*;
|
||||
use search::SearchQuery;
|
||||
@ -75,10 +79,13 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use terminals::Terminals;
|
||||
use text::Anchor;
|
||||
use text::{Anchor, LineEnding, Rope};
|
||||
use util::{
|
||||
debug_panic, defer, http::HttpClient, merge_json_value_into,
|
||||
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
|
||||
debug_panic, defer,
|
||||
http::HttpClient,
|
||||
merge_json_value_into,
|
||||
paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
|
||||
post_inc, ResultExt, TryFutureExt as _,
|
||||
};
|
||||
|
||||
pub use fs::*;
|
||||
@ -152,6 +159,11 @@ pub struct Project {
|
||||
copilot_lsp_subscription: Option<gpui::Subscription>,
|
||||
copilot_log_subscription: Option<lsp::Subscription>,
|
||||
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
|
||||
node: Option<Arc<dyn NodeRuntime>>,
|
||||
prettier_instances: HashMap<
|
||||
(Option<WorktreeId>, PathBuf),
|
||||
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
|
||||
>,
|
||||
}
|
||||
|
||||
struct DelayedDebounced {
|
||||
@ -580,6 +592,7 @@ impl Project {
|
||||
client.add_model_request_handler(Self::handle_apply_code_action);
|
||||
client.add_model_request_handler(Self::handle_on_type_formatting);
|
||||
client.add_model_request_handler(Self::handle_inlay_hints);
|
||||
client.add_model_request_handler(Self::handle_resolve_completion_documentation);
|
||||
client.add_model_request_handler(Self::handle_resolve_inlay_hint);
|
||||
client.add_model_request_handler(Self::handle_refresh_inlay_hints);
|
||||
client.add_model_request_handler(Self::handle_reload_buffers);
|
||||
@ -605,6 +618,7 @@ impl Project {
|
||||
|
||||
pub fn local(
|
||||
client: Arc<Client>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@ -660,6 +674,8 @@ impl Project {
|
||||
copilot_lsp_subscription,
|
||||
copilot_log_subscription: None,
|
||||
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
|
||||
node: Some(node),
|
||||
prettier_instances: HashMap::default(),
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -757,6 +773,8 @@ impl Project {
|
||||
copilot_lsp_subscription,
|
||||
copilot_log_subscription: None,
|
||||
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
|
||||
node: None,
|
||||
prettier_instances: HashMap::default(),
|
||||
};
|
||||
for worktree in worktrees {
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
@ -795,8 +813,16 @@ impl Project {
|
||||
let http_client = util::http::FakeHttpClient::with_404_response();
|
||||
let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let project =
|
||||
cx.update(|cx| Project::local(client, user_store, Arc::new(languages), fs, cx));
|
||||
let project = cx.update(|cx| {
|
||||
Project::local(
|
||||
client,
|
||||
node_runtime::FakeNodeRuntime::new(),
|
||||
user_store,
|
||||
Arc::new(languages),
|
||||
fs,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
for path in root_paths {
|
||||
let (tree, _) = project
|
||||
.update(cx, |project, cx| {
|
||||
@ -810,19 +836,37 @@ impl Project {
|
||||
project
|
||||
}
|
||||
|
||||
/// Enables a prettier mock that avoids interacting with node runtime, prettier LSP wrapper, or any real file changes.
|
||||
/// Instead, if appends the suffix to every input, this suffix is returned by this method.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn enable_test_prettier(&mut self, plugins: &[&'static str]) -> &'static str {
|
||||
self.node = Some(node_runtime::FakeNodeRuntime::with_prettier_support(
|
||||
plugins,
|
||||
));
|
||||
Prettier::FORMAT_SUFFIX
|
||||
}
|
||||
|
||||
fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let mut language_servers_to_start = Vec::new();
|
||||
let mut language_formatters_to_check = Vec::new();
|
||||
for buffer in self.opened_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
let buffer = buffer.read(cx);
|
||||
if let Some((file, language)) = buffer.file().zip(buffer.language()) {
|
||||
let settings = language_settings(Some(language), Some(file), cx);
|
||||
let buffer_file = File::from_dyn(buffer.file());
|
||||
let buffer_language = buffer.language();
|
||||
let settings = language_settings(buffer_language, buffer.file(), cx);
|
||||
if let Some(language) = buffer_language {
|
||||
if settings.enable_language_server {
|
||||
if let Some(file) = File::from_dyn(Some(file)) {
|
||||
if let Some(file) = buffer_file {
|
||||
language_servers_to_start
|
||||
.push((file.worktree.clone(), language.clone()));
|
||||
.push((file.worktree.clone(), Arc::clone(language)));
|
||||
}
|
||||
}
|
||||
language_formatters_to_check.push((
|
||||
buffer_file.map(|f| f.worktree_id(cx)),
|
||||
Arc::clone(language),
|
||||
settings.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -875,6 +919,11 @@ impl Project {
|
||||
.detach();
|
||||
}
|
||||
|
||||
for (worktree, language, settings) in language_formatters_to_check {
|
||||
self.install_default_formatters(worktree, &language, &settings, cx)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
// Start all the newly-enabled language servers.
|
||||
for (worktree, language) in language_servers_to_start {
|
||||
let worktree_path = worktree.read(cx).abs_path();
|
||||
@ -2623,7 +2672,26 @@ impl Project {
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
|
||||
let buffer_file = buffer.read(cx).file().cloned();
|
||||
let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
|
||||
let buffer_file = File::from_dyn(buffer_file.as_ref());
|
||||
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
|
||||
|
||||
let task_buffer = buffer.clone();
|
||||
let prettier_installation_task =
|
||||
self.install_default_formatters(worktree, &new_language, &settings, cx);
|
||||
cx.spawn(|project, mut cx| async move {
|
||||
prettier_installation_task.await?;
|
||||
let _ = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.prettier_instance_for_buffer(&task_buffer, cx)
|
||||
})
|
||||
.await;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if let Some(file) = buffer_file {
|
||||
let worktree = file.worktree.clone();
|
||||
if let Some(tree) = worktree.read(cx).as_local() {
|
||||
self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
|
||||
@ -2684,15 +2752,6 @@ impl Project {
|
||||
let lsp = project_settings.lsp.get(&adapter.name.0);
|
||||
let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
|
||||
|
||||
let mut initialization_options = adapter.initialization_options.clone();
|
||||
match (&mut initialization_options, override_options) {
|
||||
(Some(initialization_options), Some(override_options)) => {
|
||||
merge_json_value_into(override_options, initialization_options);
|
||||
}
|
||||
(None, override_options) => initialization_options = override_options,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let server_id = pending_server.server_id;
|
||||
let container_dir = pending_server.container_dir.clone();
|
||||
let state = LanguageServerState::Starting({
|
||||
@ -2704,7 +2763,7 @@ impl Project {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let result = Self::setup_and_insert_language_server(
|
||||
this,
|
||||
initialization_options,
|
||||
override_options,
|
||||
pending_server,
|
||||
adapter.clone(),
|
||||
language.clone(),
|
||||
@ -2807,7 +2866,7 @@ impl Project {
|
||||
|
||||
async fn setup_and_insert_language_server(
|
||||
this: WeakModelHandle<Self>,
|
||||
initialization_options: Option<serde_json::Value>,
|
||||
override_initialization_options: Option<serde_json::Value>,
|
||||
pending_server: PendingLanguageServer,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
language: Arc<Language>,
|
||||
@ -2817,7 +2876,7 @@ impl Project {
|
||||
) -> Result<Option<Arc<LanguageServer>>> {
|
||||
let setup = Self::setup_pending_language_server(
|
||||
this,
|
||||
initialization_options,
|
||||
override_initialization_options,
|
||||
pending_server,
|
||||
adapter.clone(),
|
||||
server_id,
|
||||
@ -2849,7 +2908,7 @@ impl Project {
|
||||
|
||||
async fn setup_pending_language_server(
|
||||
this: WeakModelHandle<Self>,
|
||||
initialization_options: Option<serde_json::Value>,
|
||||
override_options: Option<serde_json::Value>,
|
||||
pending_server: PendingLanguageServer,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
server_id: LanguageServerId,
|
||||
@ -2867,8 +2926,8 @@ impl Project {
|
||||
move |mut params, mut cx| {
|
||||
let this = this;
|
||||
let adapter = adapter.clone();
|
||||
adapter.process_diagnostics(&mut params);
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
adapter.process_diagnostics(&mut params);
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update_diagnostics(
|
||||
server_id,
|
||||
@ -2995,6 +3054,14 @@ impl Project {
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
let mut initialization_options = adapter.adapter.initialization_options().await;
|
||||
match (&mut initialization_options, override_options) {
|
||||
(Some(initialization_options), Some(override_options)) => {
|
||||
merge_json_value_into(override_options, initialization_options);
|
||||
}
|
||||
(None, override_options) => initialization_options = override_options,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let language_server = language_server.initialize(initialization_options).await?;
|
||||
|
||||
@ -3949,7 +4016,7 @@ impl Project {
|
||||
push_to_history: bool,
|
||||
trigger: FormatTrigger,
|
||||
cx: &mut ModelContext<Project>,
|
||||
) -> Task<Result<ProjectTransaction>> {
|
||||
) -> Task<anyhow::Result<ProjectTransaction>> {
|
||||
if self.is_local() {
|
||||
let mut buffers_with_paths_and_servers = buffers
|
||||
.into_iter()
|
||||
@ -4027,6 +4094,7 @@ impl Project {
|
||||
enum FormatOperation {
|
||||
Lsp(Vec<(Range<Anchor>, String)>),
|
||||
External(Diff),
|
||||
Prettier(Diff),
|
||||
}
|
||||
|
||||
// Apply language-specific formatting using either a language server
|
||||
@ -4062,8 +4130,8 @@ impl Project {
|
||||
| (_, FormatOnSave::External { command, arguments }) => {
|
||||
if let Some(buffer_abs_path) = buffer_abs_path {
|
||||
format_operation = Self::format_via_external_command(
|
||||
&buffer,
|
||||
&buffer_abs_path,
|
||||
buffer,
|
||||
buffer_abs_path,
|
||||
&command,
|
||||
&arguments,
|
||||
&mut cx,
|
||||
@ -4076,6 +4144,69 @@ impl Project {
|
||||
.map(FormatOperation::External);
|
||||
}
|
||||
}
|
||||
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
|
||||
if let Some(prettier_task) = this
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.prettier_instance_for_buffer(buffer, cx)
|
||||
}).await {
|
||||
match prettier_task.await
|
||||
{
|
||||
Ok(prettier) => {
|
||||
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
|
||||
});
|
||||
format_operation = Some(FormatOperation::Prettier(
|
||||
prettier
|
||||
.format(buffer, buffer_path, &cx)
|
||||
.await
|
||||
.context("formatting via prettier")?,
|
||||
));
|
||||
}
|
||||
Err(e) => anyhow::bail!(
|
||||
"Failed to create prettier instance for buffer during autoformatting: {e:#}"
|
||||
),
|
||||
}
|
||||
} else if let Some((language_server, buffer_abs_path)) =
|
||||
language_server.as_ref().zip(buffer_abs_path.as_ref())
|
||||
{
|
||||
format_operation = Some(FormatOperation::Lsp(
|
||||
Self::format_via_lsp(
|
||||
&this,
|
||||
&buffer,
|
||||
buffer_abs_path,
|
||||
&language_server,
|
||||
tab_size,
|
||||
&mut cx,
|
||||
)
|
||||
.await
|
||||
.context("failed to format via language server")?,
|
||||
));
|
||||
}
|
||||
}
|
||||
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
|
||||
if let Some(prettier_task) = this
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.prettier_instance_for_buffer(buffer, cx)
|
||||
}).await {
|
||||
match prettier_task.await
|
||||
{
|
||||
Ok(prettier) => {
|
||||
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
|
||||
});
|
||||
format_operation = Some(FormatOperation::Prettier(
|
||||
prettier
|
||||
.format(buffer, buffer_path, &cx)
|
||||
.await
|
||||
.context("formatting via prettier")?,
|
||||
));
|
||||
}
|
||||
Err(e) => anyhow::bail!(
|
||||
"Failed to create prettier instance for buffer during formatting: {e:#}"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
buffer.update(&mut cx, |b, cx| {
|
||||
@ -4100,6 +4231,9 @@ impl Project {
|
||||
FormatOperation::External(diff) => {
|
||||
b.apply_diff(diff, cx);
|
||||
}
|
||||
FormatOperation::Prettier(diff) => {
|
||||
b.apply_diff(diff, cx);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(transaction_id) = whitespace_transaction_id {
|
||||
@ -5873,6 +6007,7 @@ impl Project {
|
||||
this.update_local_worktree_buffers(&worktree, changes, cx);
|
||||
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
||||
this.update_local_worktree_settings(&worktree, changes, cx);
|
||||
this.update_prettier_settings(&worktree, changes, cx);
|
||||
cx.emit(Event::WorktreeUpdatedEntries(
|
||||
worktree.read(cx).id(),
|
||||
changes.clone(),
|
||||
@ -6252,6 +6387,69 @@ impl Project {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn update_prettier_settings(
|
||||
&self,
|
||||
worktree: &ModelHandle<Worktree>,
|
||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||
cx: &mut ModelContext<'_, Project>,
|
||||
) {
|
||||
let prettier_config_files = Prettier::CONFIG_FILE_NAMES
|
||||
.iter()
|
||||
.map(Path::new)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let prettier_config_file_changed = changes
|
||||
.iter()
|
||||
.filter(|(_, _, change)| !matches!(change, PathChange::Loaded))
|
||||
.filter(|(path, _, _)| {
|
||||
!path
|
||||
.components()
|
||||
.any(|component| component.as_os_str().to_string_lossy() == "node_modules")
|
||||
})
|
||||
.find(|(path, _, _)| prettier_config_files.contains(path.as_ref()));
|
||||
let current_worktree_id = worktree.read(cx).id();
|
||||
if let Some((config_path, _, _)) = prettier_config_file_changed {
|
||||
log::info!(
|
||||
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
|
||||
);
|
||||
let prettiers_to_reload = self
|
||||
.prettier_instances
|
||||
.iter()
|
||||
.filter_map(|((worktree_id, prettier_path), prettier_task)| {
|
||||
if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
|
||||
Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
|
||||
async move {
|
||||
prettier_task.await?
|
||||
.clear_cache()
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
|
||||
)
|
||||
})
|
||||
.map_err(Arc::new)
|
||||
}
|
||||
}))
|
||||
.await
|
||||
{
|
||||
if let Err(e) = task_result {
|
||||
log::error!("Failed to clear cache for prettier: {e:#}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
|
||||
let new_active_entry = entry.and_then(|project_path| {
|
||||
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
@ -7155,6 +7353,40 @@ impl Project {
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_resolve_completion_documentation(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::ResolveCompletionDocumentationResponse> {
|
||||
let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
|
||||
|
||||
let completion = this
|
||||
.read_with(&mut cx, |this, _| {
|
||||
let id = LanguageServerId(envelope.payload.language_server_id as usize);
|
||||
let Some(server) = this.language_server_for_id(id) else {
|
||||
return Err(anyhow!("No language server {id}"));
|
||||
};
|
||||
|
||||
Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let mut is_markdown = false;
|
||||
let text = match completion.documentation {
|
||||
Some(lsp::Documentation::String(text)) => text,
|
||||
|
||||
Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
|
||||
is_markdown = kind == lsp::MarkupKind::Markdown;
|
||||
value
|
||||
}
|
||||
|
||||
_ => String::new(),
|
||||
};
|
||||
|
||||
Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
|
||||
}
|
||||
|
||||
async fn handle_apply_code_action(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::ApplyCodeAction>,
|
||||
@ -8109,6 +8341,236 @@ impl Project {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
fn prettier_instance_for_buffer(
|
||||
&mut self,
|
||||
buffer: &ModelHandle<Buffer>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
|
||||
let buffer = buffer.read(cx);
|
||||
let buffer_file = buffer.file();
|
||||
let Some(buffer_language) = buffer.language() else {
|
||||
return Task::ready(None);
|
||||
};
|
||||
if !buffer_language
|
||||
.lsp_adapters()
|
||||
.iter()
|
||||
.flat_map(|adapter| adapter.enabled_formatters())
|
||||
.any(|formatter| matches!(formatter, BundledFormatter::Prettier { .. }))
|
||||
{
|
||||
return Task::ready(None);
|
||||
}
|
||||
|
||||
let buffer_file = File::from_dyn(buffer_file);
|
||||
let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
|
||||
let worktree_path = buffer_file
|
||||
.as_ref()
|
||||
.and_then(|file| Some(file.worktree.read(cx).abs_path()));
|
||||
let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
|
||||
if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
|
||||
let Some(node) = self.node.as_ref().map(Arc::clone) else {
|
||||
return Task::ready(None);
|
||||
};
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs));
|
||||
let prettier_dir = match cx
|
||||
.background()
|
||||
.spawn(Prettier::locate(
|
||||
worktree_path.zip(buffer_path).map(
|
||||
|(worktree_root_path, starting_path)| LocateStart {
|
||||
worktree_root_path,
|
||||
starting_path,
|
||||
},
|
||||
),
|
||||
fs,
|
||||
))
|
||||
.await
|
||||
{
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
return Some(
|
||||
Task::ready(Err(Arc::new(e.context(
|
||||
"determining prettier path for worktree {worktree_path:?}",
|
||||
))))
|
||||
.shared(),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(existing_prettier) = this.update(&mut cx, |project, _| {
|
||||
project
|
||||
.prettier_instances
|
||||
.get(&(worktree_id, prettier_dir.clone()))
|
||||
.cloned()
|
||||
}) {
|
||||
return Some(existing_prettier);
|
||||
}
|
||||
|
||||
log::info!("Found prettier in {prettier_dir:?}, starting.");
|
||||
let task_prettier_dir = prettier_dir.clone();
|
||||
let weak_project = this.downgrade();
|
||||
let new_server_id =
|
||||
this.update(&mut cx, |this, _| this.languages.next_language_server_id());
|
||||
let new_prettier_task = cx
|
||||
.spawn(|mut cx| async move {
|
||||
let prettier = Prettier::start(
|
||||
worktree_id.map(|id| id.to_usize()),
|
||||
new_server_id,
|
||||
task_prettier_dir,
|
||||
node,
|
||||
cx.clone(),
|
||||
)
|
||||
.await
|
||||
.context("prettier start")
|
||||
.map_err(Arc::new)?;
|
||||
log::info!("Started prettier in {:?}", prettier.prettier_dir());
|
||||
|
||||
if let Some((project, prettier_server)) =
|
||||
weak_project.upgrade(&mut cx).zip(prettier.server())
|
||||
{
|
||||
project.update(&mut cx, |project, cx| {
|
||||
let name = if prettier.is_default() {
|
||||
LanguageServerName(Arc::from("prettier (default)"))
|
||||
} else {
|
||||
let prettier_dir = prettier.prettier_dir();
|
||||
let worktree_path = prettier
|
||||
.worktree_id()
|
||||
.map(WorktreeId::from_usize)
|
||||
.and_then(|id| project.worktree_for_id(id, cx))
|
||||
.map(|worktree| worktree.read(cx).abs_path());
|
||||
match worktree_path {
|
||||
Some(worktree_path) => {
|
||||
if worktree_path.as_ref() == prettier_dir {
|
||||
LanguageServerName(Arc::from(format!(
|
||||
"prettier ({})",
|
||||
prettier_dir
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or_default()
|
||||
)))
|
||||
} else {
|
||||
let dir_to_display = match prettier_dir
|
||||
.strip_prefix(&worktree_path)
|
||||
.ok()
|
||||
{
|
||||
Some(relative_path) => relative_path,
|
||||
None => prettier_dir,
|
||||
};
|
||||
LanguageServerName(Arc::from(format!(
|
||||
"prettier ({})",
|
||||
dir_to_display.display(),
|
||||
)))
|
||||
}
|
||||
}
|
||||
None => LanguageServerName(Arc::from(format!(
|
||||
"prettier ({})",
|
||||
prettier_dir.display(),
|
||||
))),
|
||||
}
|
||||
};
|
||||
|
||||
project
|
||||
.supplementary_language_servers
|
||||
.insert(new_server_id, (name, Arc::clone(prettier_server)));
|
||||
cx.emit(Event::LanguageServerAdded(new_server_id));
|
||||
});
|
||||
}
|
||||
Ok(Arc::new(prettier)).map_err(Arc::new)
|
||||
})
|
||||
.shared();
|
||||
this.update(&mut cx, |project, _| {
|
||||
project
|
||||
.prettier_instances
|
||||
.insert((worktree_id, prettier_dir), new_prettier_task.clone());
|
||||
});
|
||||
Some(new_prettier_task)
|
||||
})
|
||||
} else if self.remote_id().is_some() {
|
||||
return Task::ready(None);
|
||||
} else {
|
||||
Task::ready(Some(
|
||||
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn install_default_formatters(
|
||||
&self,
|
||||
worktree: Option<WorktreeId>,
|
||||
new_language: &Language,
|
||||
language_settings: &LanguageSettings,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
match &language_settings.formatter {
|
||||
Formatter::Prettier { .. } | Formatter::Auto => {}
|
||||
Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
|
||||
};
|
||||
let Some(node) = self.node.as_ref().cloned() else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let mut prettier_plugins = None;
|
||||
for formatter in new_language
|
||||
.lsp_adapters()
|
||||
.into_iter()
|
||||
.flat_map(|adapter| adapter.enabled_formatters())
|
||||
{
|
||||
match formatter {
|
||||
BundledFormatter::Prettier { plugin_names, .. } => prettier_plugins
|
||||
.get_or_insert_with(|| HashSet::default())
|
||||
.extend(plugin_names),
|
||||
}
|
||||
}
|
||||
let Some(prettier_plugins) = prettier_plugins else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let default_prettier_dir = DEFAULT_PRETTIER_DIR.as_path();
|
||||
let already_running_prettier = self
|
||||
.prettier_instances
|
||||
.get(&(worktree, default_prettier_dir.to_path_buf()))
|
||||
.cloned();
|
||||
|
||||
let fs = Arc::clone(&self.fs);
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE);
|
||||
// method creates parent directory if it doesn't exist
|
||||
fs.save(&prettier_wrapper_path, &Rope::from(PRETTIER_SERVER_JS), LineEnding::Unix).await
|
||||
.with_context(|| format!("writing {PRETTIER_SERVER_FILE} file at {prettier_wrapper_path:?}"))?;
|
||||
|
||||
let packages_to_versions = future::try_join_all(
|
||||
prettier_plugins
|
||||
.iter()
|
||||
.chain(Some(&"prettier"))
|
||||
.map(|package_name| async {
|
||||
let returned_package_name = package_name.to_string();
|
||||
let latest_version = node.npm_package_latest_version(package_name)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("fetching latest npm version for package {returned_package_name}")
|
||||
})?;
|
||||
anyhow::Ok((returned_package_name, latest_version))
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.context("fetching latest npm versions")?;
|
||||
|
||||
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
|
||||
let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
|
||||
(package.as_str(), version.as_str())
|
||||
}).collect::<Vec<_>>();
|
||||
node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
|
||||
|
||||
if !prettier_plugins.is_empty() {
|
||||
if let Some(prettier) = already_running_prettier {
|
||||
prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn subscribe_for_copilot_events(
|
||||
|
@ -4310,7 +4310,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
|
||||
is_symlink: entry.is_symlink,
|
||||
is_ignored: entry.is_ignored,
|
||||
is_external: entry.is_external,
|
||||
git_status: entry.git_status.map(|status| status.to_proto()),
|
||||
git_status: entry.git_status.map(git_status_to_proto),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4337,7 +4337,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
||||
is_symlink: entry.is_symlink,
|
||||
is_ignored: entry.is_ignored,
|
||||
is_external: entry.is_external,
|
||||
git_status: GitFileStatus::from_proto(entry.git_status),
|
||||
git_status: git_status_from_proto(entry.git_status),
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
@ -4366,3 +4366,21 @@ fn combine_git_statuses(
|
||||
unstaged
|
||||
}
|
||||
}
|
||||
|
||||
fn git_status_from_proto(git_status: Option<i32>) -> Option<GitFileStatus> {
|
||||
git_status.and_then(|status| {
|
||||
proto::GitStatus::from_i32(status).map(|status| match status {
|
||||
proto::GitStatus::Added => GitFileStatus::Added,
|
||||
proto::GitStatus::Modified => GitFileStatus::Modified,
|
||||
proto::GitStatus::Conflict => GitFileStatus::Conflict,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn git_status_to_proto(status: GitFileStatus) -> i32 {
|
||||
match status {
|
||||
GitFileStatus::Added => proto::GitStatus::Added as i32,
|
||||
GitFileStatus::Modified => proto::GitStatus::Modified as i32,
|
||||
GitFileStatus::Conflict => proto::GitStatus::Conflict as i32,
|
||||
}
|
||||
}
|
||||
|
@ -89,88 +89,90 @@ message Envelope {
|
||||
FormatBuffersResponse format_buffers_response = 70;
|
||||
GetCompletions get_completions = 71;
|
||||
GetCompletionsResponse get_completions_response = 72;
|
||||
ApplyCompletionAdditionalEdits apply_completion_additional_edits = 73;
|
||||
ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 74;
|
||||
GetCodeActions get_code_actions = 75;
|
||||
GetCodeActionsResponse get_code_actions_response = 76;
|
||||
GetHover get_hover = 77;
|
||||
GetHoverResponse get_hover_response = 78;
|
||||
ApplyCodeAction apply_code_action = 79;
|
||||
ApplyCodeActionResponse apply_code_action_response = 80;
|
||||
PrepareRename prepare_rename = 81;
|
||||
PrepareRenameResponse prepare_rename_response = 82;
|
||||
PerformRename perform_rename = 83;
|
||||
PerformRenameResponse perform_rename_response = 84;
|
||||
SearchProject search_project = 85;
|
||||
SearchProjectResponse search_project_response = 86;
|
||||
ResolveCompletionDocumentation resolve_completion_documentation = 73;
|
||||
ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74;
|
||||
ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75;
|
||||
ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76;
|
||||
GetCodeActions get_code_actions = 77;
|
||||
GetCodeActionsResponse get_code_actions_response = 78;
|
||||
GetHover get_hover = 79;
|
||||
GetHoverResponse get_hover_response = 80;
|
||||
ApplyCodeAction apply_code_action = 81;
|
||||
ApplyCodeActionResponse apply_code_action_response = 82;
|
||||
PrepareRename prepare_rename = 83;
|
||||
PrepareRenameResponse prepare_rename_response = 84;
|
||||
PerformRename perform_rename = 85;
|
||||
PerformRenameResponse perform_rename_response = 86;
|
||||
SearchProject search_project = 87;
|
||||
SearchProjectResponse search_project_response = 88;
|
||||
|
||||
UpdateContacts update_contacts = 87;
|
||||
UpdateInviteInfo update_invite_info = 88;
|
||||
ShowContacts show_contacts = 89;
|
||||
UpdateContacts update_contacts = 89;
|
||||
UpdateInviteInfo update_invite_info = 90;
|
||||
ShowContacts show_contacts = 91;
|
||||
|
||||
GetUsers get_users = 90;
|
||||
FuzzySearchUsers fuzzy_search_users = 91;
|
||||
UsersResponse users_response = 92;
|
||||
RequestContact request_contact = 93;
|
||||
RespondToContactRequest respond_to_contact_request = 94;
|
||||
RemoveContact remove_contact = 95;
|
||||
GetUsers get_users = 92;
|
||||
FuzzySearchUsers fuzzy_search_users = 93;
|
||||
UsersResponse users_response = 94;
|
||||
RequestContact request_contact = 95;
|
||||
RespondToContactRequest respond_to_contact_request = 96;
|
||||
RemoveContact remove_contact = 97;
|
||||
|
||||
Follow follow = 96;
|
||||
FollowResponse follow_response = 97;
|
||||
UpdateFollowers update_followers = 98;
|
||||
Unfollow unfollow = 99;
|
||||
GetPrivateUserInfo get_private_user_info = 100;
|
||||
GetPrivateUserInfoResponse get_private_user_info_response = 101;
|
||||
UpdateDiffBase update_diff_base = 102;
|
||||
Follow follow = 98;
|
||||
FollowResponse follow_response = 99;
|
||||
UpdateFollowers update_followers = 100;
|
||||
Unfollow unfollow = 101;
|
||||
GetPrivateUserInfo get_private_user_info = 102;
|
||||
GetPrivateUserInfoResponse get_private_user_info_response = 103;
|
||||
UpdateDiffBase update_diff_base = 104;
|
||||
|
||||
OnTypeFormatting on_type_formatting = 103;
|
||||
OnTypeFormattingResponse on_type_formatting_response = 104;
|
||||
OnTypeFormatting on_type_formatting = 105;
|
||||
OnTypeFormattingResponse on_type_formatting_response = 106;
|
||||
|
||||
UpdateWorktreeSettings update_worktree_settings = 105;
|
||||
UpdateWorktreeSettings update_worktree_settings = 107;
|
||||
|
||||
InlayHints inlay_hints = 106;
|
||||
InlayHintsResponse inlay_hints_response = 107;
|
||||
ResolveInlayHint resolve_inlay_hint = 108;
|
||||
ResolveInlayHintResponse resolve_inlay_hint_response = 109;
|
||||
RefreshInlayHints refresh_inlay_hints = 110;
|
||||
InlayHints inlay_hints = 108;
|
||||
InlayHintsResponse inlay_hints_response = 109;
|
||||
ResolveInlayHint resolve_inlay_hint = 110;
|
||||
ResolveInlayHintResponse resolve_inlay_hint_response = 111;
|
||||
RefreshInlayHints refresh_inlay_hints = 112;
|
||||
|
||||
CreateChannel create_channel = 111;
|
||||
CreateChannelResponse create_channel_response = 112;
|
||||
InviteChannelMember invite_channel_member = 113;
|
||||
RemoveChannelMember remove_channel_member = 114;
|
||||
RespondToChannelInvite respond_to_channel_invite = 115;
|
||||
UpdateChannels update_channels = 116;
|
||||
JoinChannel join_channel = 117;
|
||||
DeleteChannel delete_channel = 118;
|
||||
GetChannelMembers get_channel_members = 119;
|
||||
GetChannelMembersResponse get_channel_members_response = 120;
|
||||
SetChannelMemberAdmin set_channel_member_admin = 121;
|
||||
RenameChannel rename_channel = 122;
|
||||
RenameChannelResponse rename_channel_response = 123;
|
||||
CreateChannel create_channel = 113;
|
||||
CreateChannelResponse create_channel_response = 114;
|
||||
InviteChannelMember invite_channel_member = 115;
|
||||
RemoveChannelMember remove_channel_member = 116;
|
||||
RespondToChannelInvite respond_to_channel_invite = 117;
|
||||
UpdateChannels update_channels = 118;
|
||||
JoinChannel join_channel = 119;
|
||||
DeleteChannel delete_channel = 120;
|
||||
GetChannelMembers get_channel_members = 121;
|
||||
GetChannelMembersResponse get_channel_members_response = 122;
|
||||
SetChannelMemberAdmin set_channel_member_admin = 123;
|
||||
RenameChannel rename_channel = 124;
|
||||
RenameChannelResponse rename_channel_response = 125;
|
||||
|
||||
JoinChannelBuffer join_channel_buffer = 124;
|
||||
JoinChannelBufferResponse join_channel_buffer_response = 125;
|
||||
UpdateChannelBuffer update_channel_buffer = 126;
|
||||
LeaveChannelBuffer leave_channel_buffer = 127;
|
||||
UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 128;
|
||||
RejoinChannelBuffers rejoin_channel_buffers = 129;
|
||||
RejoinChannelBuffersResponse rejoin_channel_buffers_response = 130;
|
||||
AckBufferOperation ack_buffer_operation = 143;
|
||||
JoinChannelBuffer join_channel_buffer = 126;
|
||||
JoinChannelBufferResponse join_channel_buffer_response = 127;
|
||||
UpdateChannelBuffer update_channel_buffer = 128;
|
||||
LeaveChannelBuffer leave_channel_buffer = 129;
|
||||
UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130;
|
||||
RejoinChannelBuffers rejoin_channel_buffers = 131;
|
||||
RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132;
|
||||
AckBufferOperation ack_buffer_operation = 145;
|
||||
|
||||
JoinChannelChat join_channel_chat = 131;
|
||||
JoinChannelChatResponse join_channel_chat_response = 132;
|
||||
LeaveChannelChat leave_channel_chat = 133;
|
||||
SendChannelMessage send_channel_message = 134;
|
||||
SendChannelMessageResponse send_channel_message_response = 135;
|
||||
ChannelMessageSent channel_message_sent = 136;
|
||||
GetChannelMessages get_channel_messages = 137;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 138;
|
||||
RemoveChannelMessage remove_channel_message = 139;
|
||||
AckChannelMessage ack_channel_message = 144;
|
||||
JoinChannelChat join_channel_chat = 133;
|
||||
JoinChannelChatResponse join_channel_chat_response = 134;
|
||||
LeaveChannelChat leave_channel_chat = 135;
|
||||
SendChannelMessage send_channel_message = 136;
|
||||
SendChannelMessageResponse send_channel_message_response = 137;
|
||||
ChannelMessageSent channel_message_sent = 138;
|
||||
GetChannelMessages get_channel_messages = 139;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 140;
|
||||
RemoveChannelMessage remove_channel_message = 141;
|
||||
AckChannelMessage ack_channel_message = 146;
|
||||
|
||||
LinkChannel link_channel = 140;
|
||||
UnlinkChannel unlink_channel = 141;
|
||||
MoveChannel move_channel = 142; // current max: 144
|
||||
LinkChannel link_channel = 142;
|
||||
UnlinkChannel unlink_channel = 143;
|
||||
MoveChannel move_channel = 144; // current max: 146
|
||||
}
|
||||
}
|
||||
|
||||
@ -832,6 +834,17 @@ message ResolveState {
|
||||
}
|
||||
}
|
||||
|
||||
message ResolveCompletionDocumentation {
|
||||
uint64 project_id = 1;
|
||||
uint64 language_server_id = 2;
|
||||
bytes lsp_completion = 3;
|
||||
}
|
||||
|
||||
message ResolveCompletionDocumentationResponse {
|
||||
string text = 1;
|
||||
bool is_markdown = 2;
|
||||
}
|
||||
|
||||
message ResolveInlayHint {
|
||||
uint64 project_id = 1;
|
||||
uint64 buffer_id = 2;
|
||||
|
@ -205,6 +205,8 @@ messages!(
|
||||
(OnTypeFormattingResponse, Background),
|
||||
(InlayHints, Background),
|
||||
(InlayHintsResponse, Background),
|
||||
(ResolveCompletionDocumentation, Background),
|
||||
(ResolveCompletionDocumentationResponse, Background),
|
||||
(ResolveInlayHint, Background),
|
||||
(ResolveInlayHintResponse, Background),
|
||||
(RefreshInlayHints, Foreground),
|
||||
@ -318,6 +320,10 @@ request_messages!(
|
||||
(PrepareRename, PrepareRenameResponse),
|
||||
(OnTypeFormatting, OnTypeFormattingResponse),
|
||||
(InlayHints, InlayHintsResponse),
|
||||
(
|
||||
ResolveCompletionDocumentation,
|
||||
ResolveCompletionDocumentationResponse
|
||||
),
|
||||
(ResolveInlayHint, ResolveInlayHintResponse),
|
||||
(RefreshInlayHints, Ack),
|
||||
(ReloadBuffers, ReloadBuffersResponse),
|
||||
@ -381,6 +387,7 @@ entity_messages!(
|
||||
PerformRename,
|
||||
OnTypeFormatting,
|
||||
InlayHints,
|
||||
ResolveCompletionDocumentation,
|
||||
ResolveInlayHint,
|
||||
RefreshInlayHints,
|
||||
PrepareRename,
|
||||
|
@ -6,4 +6,4 @@ pub use conn::Connection;
|
||||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 64;
|
||||
pub const PROTOCOL_VERSION: u32 = 65;
|
||||
|
@ -51,7 +51,6 @@ workspace = { path = "../workspace", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"]}
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
client = { path = "../client" }
|
||||
zed = { path = "../zed"}
|
||||
node_runtime = { path = "../node_runtime"}
|
||||
|
||||
pretty_assertions.workspace = true
|
||||
@ -70,6 +69,3 @@ tree-sitter-elixir.workspace = true
|
||||
tree-sitter-lua.workspace = true
|
||||
tree-sitter-ruby.workspace = true
|
||||
tree-sitter-php.workspace = true
|
||||
|
||||
[[example]]
|
||||
name = "eval"
|
||||
|
2919
crates/storybook/Cargo.lock
generated
2919
crates/storybook/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,30 +0,0 @@
|
||||
[package]
|
||||
name = "storybook"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "storybook"
|
||||
path = "src/storybook.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
clap = { version = "4.4", features = ["derive", "string"] }
|
||||
chrono = "0.4"
|
||||
fs = { path = "../fs" }
|
||||
futures.workspace = true
|
||||
gpui2 = { path = "../gpui2" }
|
||||
itertools = "0.11.0"
|
||||
log.workspace = true
|
||||
rust-embed.workspace = true
|
||||
serde.workspace = true
|
||||
settings = { path = "../settings" }
|
||||
simplelog = "0.9"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
theme = { path = "../theme" }
|
||||
ui = { path = "../ui" }
|
||||
util = { path = "../util" }
|
||||
|
||||
[dev-dependencies]
|
||||
gpui2 = { path = "../gpui2", features = ["test-support"] }
|
@ -1,72 +0,0 @@
|
||||
Much of element styling is now handled by an external engine.
|
||||
|
||||
|
||||
How do I make an element hover.
|
||||
|
||||
There's a hover style.
|
||||
|
||||
Hoverable needs to wrap another element. That element can be styled.
|
||||
|
||||
```rs
|
||||
struct Hoverable<E: Element> {
|
||||
|
||||
}
|
||||
|
||||
impl<V> Element<V> for Hoverable {
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
|
||||
|
||||
```rs
|
||||
#[derive(Styled, Interactive)]
|
||||
pub struct Div {
|
||||
declared_style: StyleRefinement,
|
||||
interactions: Interactions
|
||||
}
|
||||
|
||||
pub trait Styled {
|
||||
fn declared_style(&mut self) -> &mut StyleRefinement;
|
||||
fn compute_style(&mut self) -> Style {
|
||||
Style::default().refine(self.declared_style())
|
||||
}
|
||||
|
||||
// All the tailwind classes, modifying self.declared_style()
|
||||
}
|
||||
|
||||
impl Style {
|
||||
pub fn paint_background<V>(layout: Layout, cx: &mut PaintContext<V>);
|
||||
pub fn paint_foreground<V>(layout: Layout, cx: &mut PaintContext<V>);
|
||||
}
|
||||
|
||||
pub trait Interactive<V> {
|
||||
fn interactions(&mut self) -> &mut Interactions<V>;
|
||||
|
||||
fn on_click(self, )
|
||||
}
|
||||
|
||||
struct Interactions<V> {
|
||||
click: SmallVec<[<Rc<dyn Fn(&mut V, &dyn Any, )>; 1]>,
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
|
||||
```rs
|
||||
|
||||
|
||||
trait Stylable {
|
||||
type Style;
|
||||
|
||||
fn with_style(self, style: Self::Style) -> Self;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
```
|
@ -1,3 +0,0 @@
|
||||
pub mod components;
|
||||
pub mod elements;
|
||||
pub mod kitchen_sink;
|
@ -1,18 +0,0 @@
|
||||
pub mod assistant_panel;
|
||||
pub mod breadcrumb;
|
||||
pub mod buffer;
|
||||
pub mod chat_panel;
|
||||
pub mod collab_panel;
|
||||
pub mod context_menu;
|
||||
pub mod facepile;
|
||||
pub mod keybinding;
|
||||
pub mod palette;
|
||||
pub mod panel;
|
||||
pub mod project_panel;
|
||||
pub mod status_bar;
|
||||
pub mod tab;
|
||||
pub mod tab_bar;
|
||||
pub mod terminal;
|
||||
pub mod title_bar;
|
||||
pub mod toolbar;
|
||||
pub mod traffic_lights;
|
@ -1,16 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::AssistantPanel;
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct AssistantPanelStory {}
|
||||
|
||||
impl AssistantPanelStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, AssistantPanel<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(AssistantPanel::new())
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use ui::prelude::*;
|
||||
use ui::{Breadcrumb, HighlightedText, Symbol};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct BreadcrumbStory {}
|
||||
|
||||
impl BreadcrumbStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
let theme = theme(cx);
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Breadcrumb>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Breadcrumb::new(
|
||||
PathBuf::from_str("crates/ui/src/components/toolbar.rs").unwrap(),
|
||||
vec![
|
||||
Symbol(vec![
|
||||
HighlightedText {
|
||||
text: "impl ".to_string(),
|
||||
color: HighlightColor::Keyword.hsla(&theme),
|
||||
},
|
||||
HighlightedText {
|
||||
text: "BreadcrumbStory".to_string(),
|
||||
color: HighlightColor::Function.hsla(&theme),
|
||||
},
|
||||
]),
|
||||
Symbol(vec![
|
||||
HighlightedText {
|
||||
text: "fn ".to_string(),
|
||||
color: HighlightColor::Keyword.hsla(&theme),
|
||||
},
|
||||
HighlightedText {
|
||||
text: "render".to_string(),
|
||||
color: HighlightColor::Function.hsla(&theme),
|
||||
},
|
||||
]),
|
||||
],
|
||||
))
|
||||
}
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
use gpui2::geometry::rems;
|
||||
use ui::prelude::*;
|
||||
use ui::{
|
||||
empty_buffer_example, hello_world_rust_buffer_example,
|
||||
hello_world_rust_buffer_with_status_example, Buffer,
|
||||
};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct BufferStory {}
|
||||
|
||||
impl BufferStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
let theme = theme(cx);
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Buffer>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(div().w(rems(64.)).h_96().child(empty_buffer_example()))
|
||||
.child(Story::label(cx, "Hello World (Rust)"))
|
||||
.child(
|
||||
div()
|
||||
.w(rems(64.))
|
||||
.h_96()
|
||||
.child(hello_world_rust_buffer_example(&theme)),
|
||||
)
|
||||
.child(Story::label(cx, "Hello World (Rust) with Status"))
|
||||
.child(
|
||||
div()
|
||||
.w(rems(64.))
|
||||
.h_96()
|
||||
.child(hello_world_rust_buffer_with_status_example(&theme)),
|
||||
)
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
use chrono::DateTime;
|
||||
use ui::prelude::*;
|
||||
use ui::{ChatMessage, ChatPanel, Panel};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct ChatPanelStory {}
|
||||
|
||||
impl ChatPanelStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, ChatPanel<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Panel::new(
|
||||
ScrollState::default(),
|
||||
|_, _| vec![ChatPanel::new(ScrollState::default()).into_any()],
|
||||
Box::new(()),
|
||||
))
|
||||
.child(Story::label(cx, "With Mesages"))
|
||||
.child(Panel::new(
|
||||
ScrollState::default(),
|
||||
|_, _| {
|
||||
vec![ChatPanel::new(ScrollState::default())
|
||||
.with_messages(vec![
|
||||
ChatMessage::new(
|
||||
"osiewicz".to_string(),
|
||||
"is this thing on?".to_string(),
|
||||
DateTime::parse_from_rfc3339("2023-09-27T15:40:52.707Z")
|
||||
.unwrap()
|
||||
.naive_local(),
|
||||
),
|
||||
ChatMessage::new(
|
||||
"maxdeviant".to_string(),
|
||||
"Reading you loud and clear!".to_string(),
|
||||
DateTime::parse_from_rfc3339("2023-09-28T15:40:52.707Z")
|
||||
.unwrap()
|
||||
.naive_local(),
|
||||
),
|
||||
])
|
||||
.into_any()]
|
||||
},
|
||||
Box::new(()),
|
||||
))
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::CollabPanel;
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct CollabPanelStory {}
|
||||
|
||||
impl CollabPanelStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, CollabPanel<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(CollabPanel::new(ScrollState::default()))
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::{ContextMenu, ContextMenuItem, Label};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct ContextMenuStory {}
|
||||
|
||||
impl ContextMenuStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
//.fill(theme.middle.base.default.background)
|
||||
.child(Story::title_for::<_, ContextMenu>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(ContextMenu::new([
|
||||
ContextMenuItem::header("Section header"),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::entry(Label::new("Some entry")),
|
||||
]))
|
||||
}
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::{static_players, Facepile};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct FacepileStory {}
|
||||
|
||||
impl FacepileStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
let players = static_players();
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Facepile>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.gap_3()
|
||||
.child(Facepile::new(players.clone().into_iter().take(1)))
|
||||
.child(Facepile::new(players.clone().into_iter().take(2)))
|
||||
.child(Facepile::new(players.clone().into_iter().take(3))),
|
||||
)
|
||||
}
|
||||
}
|
@ -1,64 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
use strum::IntoEnumIterator;
|
||||
use ui::prelude::*;
|
||||
use ui::{Keybinding, ModifierKey, ModifierKeys};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct KeybindingStory {}
|
||||
|
||||
impl KeybindingStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
let all_modifier_permutations = ModifierKey::iter().permutations(2);
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Keybinding>(cx))
|
||||
.child(Story::label(cx, "Single Key"))
|
||||
.child(Keybinding::new("Z".to_string(), ModifierKeys::new()))
|
||||
.child(Story::label(cx, "Single Key with Modifier"))
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.gap_3()
|
||||
.children(ModifierKey::iter().map(|modifier| {
|
||||
Keybinding::new("C".to_string(), ModifierKeys::new().add(modifier))
|
||||
})),
|
||||
)
|
||||
.child(Story::label(cx, "Single Key with Modifier (Permuted)"))
|
||||
.child(
|
||||
div().flex().flex_col().children(
|
||||
all_modifier_permutations
|
||||
.chunks(4)
|
||||
.into_iter()
|
||||
.map(|chunk| {
|
||||
div()
|
||||
.flex()
|
||||
.gap_4()
|
||||
.py_3()
|
||||
.children(chunk.map(|permutation| {
|
||||
let mut modifiers = ModifierKeys::new();
|
||||
|
||||
for modifier in permutation {
|
||||
modifiers = modifiers.add(modifier);
|
||||
}
|
||||
|
||||
Keybinding::new("X".to_string(), modifiers)
|
||||
}))
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(Story::label(cx, "Single Key with All Modifiers"))
|
||||
.child(Keybinding::new("Z".to_string(), ModifierKeys::all()))
|
||||
.child(Story::label(cx, "Chord"))
|
||||
.child(Keybinding::new_chord(
|
||||
("A".to_string(), ModifierKeys::new()),
|
||||
("Z".to_string(), ModifierKeys::new()),
|
||||
))
|
||||
.child(Story::label(cx, "Chord with Modifier"))
|
||||
.child(Keybinding::new_chord(
|
||||
("A".to_string(), ModifierKeys::new().control(true)),
|
||||
("Z".to_string(), ModifierKeys::new().shift(true)),
|
||||
))
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::{Keybinding, ModifierKeys, Palette, PaletteItem};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct PaletteStory {}
|
||||
|
||||
impl PaletteStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Palette<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Palette::new(ScrollState::default()))
|
||||
.child(Story::label(cx, "With Items"))
|
||||
.child(
|
||||
Palette::new(ScrollState::default())
|
||||
.placeholder("Execute a command...")
|
||||
.items(vec![
|
||||
PaletteItem::new("theme selector: toggle").keybinding(
|
||||
Keybinding::new_chord(
|
||||
("k".to_string(), ModifierKeys::new().command(true)),
|
||||
("t".to_string(), ModifierKeys::new().command(true)),
|
||||
),
|
||||
),
|
||||
PaletteItem::new("assistant: inline assist").keybinding(Keybinding::new(
|
||||
"enter".to_string(),
|
||||
ModifierKeys::new().command(true),
|
||||
)),
|
||||
PaletteItem::new("assistant: quote selection").keybinding(Keybinding::new(
|
||||
">".to_string(),
|
||||
ModifierKeys::new().command(true),
|
||||
)),
|
||||
PaletteItem::new("assistant: toggle focus").keybinding(Keybinding::new(
|
||||
"?".to_string(),
|
||||
ModifierKeys::new().command(true),
|
||||
)),
|
||||
PaletteItem::new("auto update: check"),
|
||||
PaletteItem::new("auto update: view release notes"),
|
||||
PaletteItem::new("branches: open recent").keybinding(Keybinding::new(
|
||||
"b".to_string(),
|
||||
ModifierKeys::new().command(true).alt(true),
|
||||
)),
|
||||
PaletteItem::new("chat panel: toggle focus"),
|
||||
PaletteItem::new("cli: install"),
|
||||
PaletteItem::new("client: sign in"),
|
||||
PaletteItem::new("client: sign out"),
|
||||
PaletteItem::new("editor: cancel")
|
||||
.keybinding(Keybinding::new("escape".to_string(), ModifierKeys::new())),
|
||||
]),
|
||||
)
|
||||
}
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::{Label, Panel};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct PanelStory {}
|
||||
|
||||
impl PanelStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Panel<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Panel::new(
|
||||
ScrollState::default(),
|
||||
|_, _| {
|
||||
vec![div()
|
||||
.overflow_y_scroll(ScrollState::default())
|
||||
.children((0..100).map(|ix| Label::new(format!("Item {}", ix + 1))))
|
||||
.into_any()]
|
||||
},
|
||||
Box::new(()),
|
||||
))
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::{Panel, ProjectPanel};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct ProjectPanelStory {}
|
||||
|
||||
impl ProjectPanelStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, ProjectPanel<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Panel::new(
|
||||
ScrollState::default(),
|
||||
|_, _| vec![ProjectPanel::new(ScrollState::default()).into_any()],
|
||||
Box::new(()),
|
||||
))
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
use ui::prelude::*;
|
||||
use ui::StatusBar;
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct StatusBarStory {}
|
||||
|
||||
impl StatusBarStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, StatusBar<V>>(cx))
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(StatusBar::new())
|
||||
}
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
use strum::IntoEnumIterator;
|
||||
use ui::prelude::*;
|
||||
use ui::{h_stack, v_stack, Tab};
|
||||
|
||||
use crate::story::Story;
|
||||
|
||||
#[derive(Element, Default)]
|
||||
pub struct TabStory {}
|
||||
|
||||
impl TabStory {
|
||||
fn render<V: 'static>(&mut self, _: &mut V, cx: &mut ViewContext<V>) -> impl IntoElement<V> {
|
||||
let git_statuses = GitStatus::iter();
|
||||
let fs_statuses = FileSystemStatus::iter();
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<_, Tab>(cx))
|
||||
.child(
|
||||
h_stack().child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "Default"))
|
||||
.child(Tab::new()),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_stack().child(
|
||||
v_stack().gap_2().child(Story::label(cx, "Current")).child(
|
||||
h_stack()
|
||||
.gap_4()
|
||||
.child(Tab::new().title("Current".to_string()).current(true))
|
||||
.child(Tab::new().title("Not Current".to_string()).current(false)),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_stack().child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "Titled"))
|
||||
.child(Tab::new().title("label".to_string())),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_stack().child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "With Icon"))
|
||||
.child(
|
||||
Tab::new()
|
||||
.title("label".to_string())
|
||||
.icon(Some(ui::Icon::Envelope)),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_stack().child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "Close Side"))
|
||||
.child(
|
||||
h_stack()
|
||||
.gap_4()
|
||||
.child(
|
||||
Tab::new()
|
||||
.title("Left".to_string())
|
||||
.close_side(IconSide::Left),
|
||||
)
|
||||
.child(Tab::new().title("Right".to_string())),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "Git Status"))
|
||||
.child(h_stack().gap_4().children(git_statuses.map(|git_status| {
|
||||
Tab::new()
|
||||
.title(git_status.to_string())
|
||||
.git_status(git_status)
|
||||
}))),
|
||||
)
|
||||
.child(
|
||||
v_stack()
|
||||
.gap_2()
|
||||
.child(Story::label(cx, "File System Status"))
|
||||
.child(h_stack().gap_4().children(fs_statuses.map(|fs_status| {
|
||||
Tab::new().title(fs_status.to_string()).fs_status(fs_status)
|
||||
}))),
|
||||
)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user