mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Merge remote-tracking branch 'origin/main' into ai-refactoring
This commit is contained in:
commit
27c90f12f6
7
.github/workflows/publish_collab_image.yml
vendored
7
.github/workflows/publish_collab_image.yml
vendored
@ -11,7 +11,7 @@ env:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish collab server image
|
||||
name: Publish collab server image
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
@ -22,6 +22,9 @@ jobs:
|
||||
- name: Sign into DigitalOcean docker registry
|
||||
run: doctl registry login
|
||||
|
||||
- name: Prune Docker system
|
||||
run: docker system prune
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@ -41,6 +44,6 @@ jobs:
|
||||
|
||||
- name: Build docker image
|
||||
run: docker build . --tag registry.digitalocean.com/zed/collab:v${COLLAB_VERSION}
|
||||
|
||||
|
||||
- name: Publish docker image
|
||||
run: docker push registry.digitalocean.com/zed/collab:v${COLLAB_VERSION}
|
||||
|
715
Cargo.lock
generated
715
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
12
Cargo.toml
12
Cargo.toml
@ -6,6 +6,7 @@ members = [
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
"crates/call",
|
||||
"crates/channel",
|
||||
"crates/cli",
|
||||
"crates/client",
|
||||
"crates/clock",
|
||||
@ -13,10 +14,13 @@ members = [
|
||||
"crates/collab_ui",
|
||||
"crates/collections",
|
||||
"crates/command_palette",
|
||||
"crates/component_test",
|
||||
"crates/context_menu",
|
||||
"crates/copilot",
|
||||
"crates/copilot_button",
|
||||
"crates/db",
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/diagnostics",
|
||||
"crates/drag_and_drop",
|
||||
"crates/editor",
|
||||
@ -28,6 +32,8 @@ members = [
|
||||
"crates/git",
|
||||
"crates/go_to_line",
|
||||
"crates/gpui",
|
||||
"crates/gpui/playground",
|
||||
"crates/gpui/playground_macros",
|
||||
"crates/gpui_macros",
|
||||
"crates/install_cli",
|
||||
"crates/journal",
|
||||
@ -91,9 +97,11 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
ordered-float = { version = "2.1.1" }
|
||||
parking_lot = { version = "0.11.1" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
prost = { version = "0.8" }
|
||||
rand = { version = "0.8.5" }
|
||||
refineable = { path = "./crates/refineable" }
|
||||
regex = { version = "1.5" }
|
||||
rust-embed = { version = "6.3", features = ["include-exclude"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
schemars = { version = "0.8" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
@ -135,7 +143,7 @@ tree-sitter-lua = "0.0.14"
|
||||
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
|
||||
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "35a6052fbcafc5e5fc0f9415b8652be7dcaf7222" }
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
|
||||
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457
|
||||
|
3
assets/icons/file_icons/elixir.svg
Normal file
3
assets/icons/file_icons/elixir.svg
Normal file
@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8 2L8.6165 2.10275C8.65805 1.8534 8.54532 1.60357 8.33085 1.46975C8.11639 1.33594 7.84243 1.34449 7.63673 1.49142L8 2ZM9.88714 8.62257C10.1098 9.73604 9.86526 10.3554 9.4569 10.7229C9.00367 11.1308 8.19498 11.375 7 11.375V12.625C8.30502 12.625 9.49633 12.3692 10.2931 11.6521C11.1347 10.8946 11.3902 9.76396 11.1129 8.37743L9.88714 8.62257ZM7 11.375C5.87824 11.375 5.17563 11.0417 4.75444 10.6206C4.32847 10.1946 4.125 9.61372 4.125 9H2.875C2.875 9.88628 3.17153 10.8054 3.87056 11.5044C4.57437 12.2083 5.62176 12.625 7 12.625V11.375ZM4.125 9C4.125 7.72699 5.00594 4.90668 8.36327 2.50858L7.63673 1.49142C3.99406 4.09332 2.875 7.27301 2.875 9H4.125ZM7.3835 1.89725C7.09577 3.62363 7.69108 4.78835 8.35497 5.78419C9.03189 6.79957 9.66859 7.52983 9.88714 8.62257L11.1129 8.37743C10.8314 6.97017 9.96811 5.95043 9.39503 5.09081C8.80892 4.21165 8.40423 3.37637 8.6165 2.10275L7.3835 1.89725Z" fill="black"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1017 B |
@ -21,23 +21,27 @@
|
||||
"dll": "storage",
|
||||
"doc": "document",
|
||||
"docx": "document",
|
||||
"eex": "elixir",
|
||||
"eslintrc": "eslint",
|
||||
"eslintrc.js": "eslint",
|
||||
"eslintrc.json": "eslint",
|
||||
"ex": "elixir",
|
||||
"exs": "elixir",
|
||||
"fish": "terminal",
|
||||
"flac": "audio",
|
||||
"fmp": "storage",
|
||||
"fp7": "storage",
|
||||
"flac": "audio",
|
||||
"fish": "terminal",
|
||||
"frm": "storage",
|
||||
"gdb": "storage",
|
||||
"gif": "image",
|
||||
"gitattributes": "vcs",
|
||||
"gitignore": "vcs",
|
||||
"gitmodules": "vcs",
|
||||
"gif": "image",
|
||||
"go": "code",
|
||||
"h": "code",
|
||||
"handlebars": "code",
|
||||
"hbs": "template",
|
||||
"heex": "elixir",
|
||||
"htm": "template",
|
||||
"html": "template",
|
||||
"ib": "storage",
|
||||
@ -51,16 +55,16 @@
|
||||
"ldf": "storage",
|
||||
"lock": "lock",
|
||||
"log": "log",
|
||||
"mdb": "storage",
|
||||
"md": "document",
|
||||
"mdb": "storage",
|
||||
"mdf": "storage",
|
||||
"mdx": "document",
|
||||
"mp3": "audio",
|
||||
"mp4": "video",
|
||||
"myd": "storage",
|
||||
"myi": "storage",
|
||||
"ods": "document",
|
||||
"odp": "document",
|
||||
"ods": "document",
|
||||
"odt": "document",
|
||||
"ogg": "video",
|
||||
"pdb": "storage",
|
||||
@ -74,24 +78,24 @@
|
||||
"profile": "terminal",
|
||||
"ps1": "terminal",
|
||||
"psd": "image",
|
||||
"py": "code",
|
||||
"py": "python",
|
||||
"rb": "code",
|
||||
"rkt": "code",
|
||||
"rs": "rust",
|
||||
"rtf": "document",
|
||||
"sav": "storage",
|
||||
"scm": "code",
|
||||
"sdf": "storage",
|
||||
"sh": "terminal",
|
||||
"sqlite": "storage",
|
||||
"sdf": "storage",
|
||||
"svelte": "template",
|
||||
"svg": "image",
|
||||
"swift": "code",
|
||||
"ts": "typescript",
|
||||
"tsx": "code",
|
||||
"tiff": "image",
|
||||
"toml": "toml",
|
||||
"ts": "typescript",
|
||||
"tsv": "storage",
|
||||
"tsx": "code",
|
||||
"txt": "document",
|
||||
"wav": "audio",
|
||||
"webm": "video",
|
||||
@ -103,9 +107,9 @@
|
||||
"zlogin": "terminal",
|
||||
"zsh": "terminal",
|
||||
"zsh_aliases": "terminal",
|
||||
"zshenv": "terminal",
|
||||
"zsh_histfile": "terminal",
|
||||
"zsh_profile": "terminal",
|
||||
"zshenv": "terminal",
|
||||
"zshrc": "terminal"
|
||||
},
|
||||
"types": {
|
||||
@ -127,6 +131,9 @@
|
||||
"document": {
|
||||
"icon": "icons/file_icons/book.svg"
|
||||
},
|
||||
"elixir": {
|
||||
"icon": "icons/file_icons/elixir.svg"
|
||||
},
|
||||
"eslint": {
|
||||
"icon": "icons/file_icons/eslint.svg"
|
||||
},
|
||||
@ -145,9 +152,15 @@
|
||||
"log": {
|
||||
"icon": "icons/file_icons/info.svg"
|
||||
},
|
||||
"phoenix": {
|
||||
"icon": "icons/file_icons/phoenix.svg"
|
||||
},
|
||||
"prettier": {
|
||||
"icon": "icons/file_icons/prettier.svg"
|
||||
},
|
||||
"python": {
|
||||
"icon": "icons/file_icons/python.svg"
|
||||
},
|
||||
"rust": {
|
||||
"icon": "icons/file_icons/rust.svg"
|
||||
},
|
||||
|
4
assets/icons/file_icons/phoenix.svg
Normal file
4
assets/icons/file_icons/phoenix.svg
Normal file
@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 8C12 7.32138 11.9375 6.5 11.7188 5.75C11.0625 6.53125 9.875 7.1875 9 7.5C9.75 4.90625 8.5625 2.1875 7 2C7 3.96875 6.625 4.90625 5.5 6.5C4 4 2.5 5.5 2 6C2.5 6.5 3.21832 7.24064 3.34375 8.3125C3.6875 11.25 5.75 12 7.5 12C9.25 12 9.5 10 11.5 11" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4.03125" cy="6.625" r="1.53125" fill="black"/>
|
||||
</svg>
|
After Width: | Height: | Size: 502 B |
6
assets/icons/file_icons/python.svg
Normal file
6
assets/icons/file_icons/python.svg
Normal file
@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.18452 1.9164C5.01625 1.9164 3.98489 2.77625 3.91991 3.9468H3.72024C2.81569 3.9468 2 4.63733 2 5.587V7.1098C2 8.05947 2.81569 8.75 3.72024 8.75H4.33631C4.67376 8.75 5.02976 8.48561 5.02976 8.06155C5.02976 7.46058 5.51694 6.9734 6.11791 6.9734H7.27976C8.18431 6.9734 9 6.28288 9 5.3332V4.0642C9 2.83419 7.93913 1.9164 6.73214 1.9164H6.18452Z" stroke="black" stroke-width="1.25"/>
|
||||
<path d="M7.79613 12.0836C8.97889 12.0836 10.0103 11.2025 10.0702 10.0191H10.2738C11.1885 10.0191 12 9.31459 12 8.36187V6.8135C12 5.86077 11.1885 5.15625 10.2738 5.15625H9.65439C9.30991 5.15625 8.96057 5.42749 8.96057 5.84577C8.96057 6.46262 8.46051 6.96268 7.84365 6.96268H6.69494C5.78027 6.96268 4.96875 7.6672 4.96875 8.61993V9.91023C4.96875 11.148 6.02678 12.0836 7.24554 12.0836H7.79613Z" stroke="black" stroke-width="1.25"/>
|
||||
<circle cx="6.03975" cy="3.9167" r="0.633501" fill="black"/>
|
||||
<circle cx="7.92285" cy="10.0793" r="0.670898" fill="black"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.0 KiB |
@ -173,6 +173,7 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"shift-enter": "editor::Newline",
|
||||
"cmd-shift-enter": "editor::NewlineAbove",
|
||||
"cmd-enter": "editor::NewlineBelow",
|
||||
"alt-z": "editor::ToggleSoftWrap",
|
||||
@ -543,6 +544,8 @@
|
||||
"bindings": {
|
||||
"left": "project_panel::CollapseSelectedEntry",
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"cmd-n": "project_panel::NewFile",
|
||||
"alt-cmd-n": "project_panel::NewDirectory",
|
||||
"cmd-x": "project_panel::Cut",
|
||||
"cmd-c": "project_panel::Copy",
|
||||
"cmd-v": "project_panel::Paste",
|
||||
|
@ -2,7 +2,6 @@
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-shift-o": "projects::OpenRecent",
|
||||
"cmd-shift-b": "branches::OpenRecent",
|
||||
"cmd-alt-tab": "project_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
@ -12,8 +11,9 @@
|
||||
"cmd-l": "go_to_line::Toggle",
|
||||
"ctrl-shift-d": "editor::DuplicateLine",
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"alt-cmd-b": "editor::GoToDefinition",
|
||||
"cmd-j": "editor::ScrollCursorCenter",
|
||||
"cmd-enter": "editor::NewlineBelow",
|
||||
"cmd-alt-enter": "editor::NewLineAbove",
|
||||
"cmd-shift-l": "editor::SelectLine",
|
||||
"cmd-shift-t": "outline::Toggle",
|
||||
"alt-backspace": "editor::DeleteToPreviousWordStart",
|
||||
@ -51,14 +51,17 @@
|
||||
}
|
||||
],
|
||||
"ctrl-shift-left": "editor::SelectToPreviousSubwordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextSubwordEnd"
|
||||
"ctrl-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"ctrl-w": "editor::SelectNext",
|
||||
"ctrl-u": "editor::ConvertToUpperCase",
|
||||
"ctrl-shift-u": "editor::ConvertToLowerCase",
|
||||
"ctrl-alt-u": "editor::ConvertToUpperCamelCase",
|
||||
"ctrl-_": "editor::ConvertToSnakeCase"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"cmd-alt-enter": "editor::NewlineAbove"
|
||||
}
|
||||
"bindings": {}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
@ -85,5 +88,9 @@
|
||||
{
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {}
|
||||
},
|
||||
{
|
||||
"context": "Dock",
|
||||
"bindings": {}
|
||||
}
|
||||
]
|
||||
|
@ -103,9 +103,19 @@
|
||||
],
|
||||
"v": "vim::ToggleVisual",
|
||||
"shift-v": "vim::ToggleVisualLine",
|
||||
"ctrl-v": "vim::ToggleVisualBlock",
|
||||
"ctrl-q": "vim::ToggleVisualBlock",
|
||||
"*": "vim::MoveToNext",
|
||||
"#": "vim::MoveToPrev",
|
||||
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
|
||||
"ctrl-f": "vim::PageDown",
|
||||
"pagedown": "vim::PageDown",
|
||||
"ctrl-b": "vim::PageUp",
|
||||
"pageup": "vim::PageUp",
|
||||
"ctrl-d": "vim::ScrollDown",
|
||||
"ctrl-u": "vim::ScrollUp",
|
||||
"ctrl-e": "vim::LineDown",
|
||||
"ctrl-y": "vim::LineUp",
|
||||
// "g" commands
|
||||
"g g": "vim::StartOfDocument",
|
||||
"g h": "editor::Hover",
|
||||
@ -277,6 +287,12 @@
|
||||
"shift-o": "vim::InsertLineAbove",
|
||||
"~": "vim::ChangeCase",
|
||||
"p": "vim::Paste",
|
||||
"shift-p": [
|
||||
"vim::Paste",
|
||||
{
|
||||
"before": true
|
||||
}
|
||||
],
|
||||
"u": "editor::Undo",
|
||||
"ctrl-r": "editor::Redo",
|
||||
"/": "vim::Search",
|
||||
@ -293,14 +309,6 @@
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
"ctrl-f": "vim::PageDown",
|
||||
"pagedown": "vim::PageDown",
|
||||
"ctrl-b": "vim::PageUp",
|
||||
"pageup": "vim::PageUp",
|
||||
"ctrl-d": "vim::ScrollDown",
|
||||
"ctrl-u": "vim::ScrollUp",
|
||||
"ctrl-e": "vim::LineDown",
|
||||
"ctrl-y": "vim::LineUp",
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
@ -365,7 +373,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == visual && !VimWaiting",
|
||||
"context": "Editor && vim_mode == visual && !VimWaiting && !VimObject",
|
||||
"bindings": {
|
||||
"u": "editor::Undo",
|
||||
"o": "vim::OtherEnd",
|
||||
@ -373,10 +381,21 @@
|
||||
"d": "vim::VisualDelete",
|
||||
"x": "vim::VisualDelete",
|
||||
"y": "vim::VisualYank",
|
||||
"p": "vim::VisualPaste",
|
||||
"p": "vim::Paste",
|
||||
"shift-p": [
|
||||
"vim::Paste",
|
||||
{
|
||||
"preserveClipboard": true
|
||||
}
|
||||
],
|
||||
"s": "vim::Substitute",
|
||||
"c": "vim::Substitute",
|
||||
"~": "vim::ChangeCase",
|
||||
"shift-i": [
|
||||
"vim::SwitchMode",
|
||||
"Insert"
|
||||
],
|
||||
"shift-a": "vim::InsertAfter",
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
@ -394,11 +413,27 @@
|
||||
"Normal"
|
||||
],
|
||||
">": "editor::Indent",
|
||||
"<": "editor::Outdent"
|
||||
"<": "editor::Outdent",
|
||||
"i": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"a": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": true
|
||||
}
|
||||
}
|
||||
],
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == insert",
|
||||
"context": "Editor && vim_mode == insert && !menu",
|
||||
"bindings": {
|
||||
"escape": "vim::NormalBefore",
|
||||
"ctrl-c": "vim::NormalBefore",
|
||||
|
@ -126,7 +126,7 @@
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock channels panel. Can be 'left' or 'right'.
|
||||
"dock": "right",
|
||||
"dock": "left",
|
||||
// Default width of the channels panel.
|
||||
"default_width": 240
|
||||
},
|
||||
@ -138,7 +138,13 @@
|
||||
// Default width when the assistant is docked to the left or right.
|
||||
"default_width": 640,
|
||||
// Default height when the assistant is docked to the bottom.
|
||||
"default_height": 320
|
||||
"default_height": 320,
|
||||
// The default OpenAI model to use when starting new conversations. This
|
||||
// setting can take two values:
|
||||
//
|
||||
// 1. "gpt-3.5-turbo-0613""
|
||||
// 2. "gpt-4-0613""
|
||||
"default_open_ai_model": "gpt-4-0613"
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
"show_call_status_icon": true,
|
||||
|
@ -4,6 +4,7 @@ mod streaming_diff;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
pub use assistant::AssistantPanel;
|
||||
use assistant_settings::OpenAIModel;
|
||||
use chrono::{DateTime, Local};
|
||||
use collections::HashMap;
|
||||
use fs::Fs;
|
||||
@ -65,7 +66,7 @@ struct SavedConversation {
|
||||
messages: Vec<SavedMessage>,
|
||||
message_metadata: HashMap<MessageId, MessageMetadata>,
|
||||
summary: String,
|
||||
model: String,
|
||||
model: OpenAIModel,
|
||||
}
|
||||
|
||||
impl SavedConversation {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings},
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings, OpenAIModel},
|
||||
stream_completion,
|
||||
streaming_diff::{Hunk, StreamingDiff},
|
||||
MessageId, MessageMetadata, MessageStatus, OpenAIRequest, RequestMessage, Role,
|
||||
@ -1314,7 +1314,7 @@ struct Conversation {
|
||||
pending_summary: Task<Option<()>>,
|
||||
completion_count: usize,
|
||||
pending_completions: Vec<PendingCompletion>,
|
||||
model: String,
|
||||
model: OpenAIModel,
|
||||
token_count: Option<usize>,
|
||||
max_token_count: usize,
|
||||
pending_token_count: Task<Option<()>>,
|
||||
@ -1334,7 +1334,6 @@ impl Conversation {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let model = "gpt-3.5-turbo-0613";
|
||||
let markdown = language_registry.language_for_name("Markdown");
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx);
|
||||
@ -1353,6 +1352,9 @@ impl Conversation {
|
||||
buffer
|
||||
});
|
||||
|
||||
let settings = settings::get::<AssistantSettings>(cx);
|
||||
let model = settings.default_open_ai_model.clone();
|
||||
|
||||
let mut this = Self {
|
||||
message_anchors: Default::default(),
|
||||
messages_metadata: Default::default(),
|
||||
@ -1362,9 +1364,9 @@ impl Conversation {
|
||||
completion_count: Default::default(),
|
||||
pending_completions: Default::default(),
|
||||
token_count: None,
|
||||
max_token_count: tiktoken_rs::model::get_context_size(model),
|
||||
max_token_count: tiktoken_rs::model::get_context_size(&model.full_name()),
|
||||
pending_token_count: Task::ready(None),
|
||||
model: model.into(),
|
||||
model: model.clone(),
|
||||
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
|
||||
pending_save: Task::ready(Ok(())),
|
||||
path: None,
|
||||
@ -1458,7 +1460,7 @@ impl Conversation {
|
||||
completion_count: Default::default(),
|
||||
pending_completions: Default::default(),
|
||||
token_count: None,
|
||||
max_token_count: tiktoken_rs::model::get_context_size(&model),
|
||||
max_token_count: tiktoken_rs::model::get_context_size(&model.full_name()),
|
||||
pending_token_count: Task::ready(None),
|
||||
model,
|
||||
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
|
||||
@ -1512,13 +1514,16 @@ impl Conversation {
|
||||
cx.background().timer(Duration::from_millis(200)).await;
|
||||
let token_count = cx
|
||||
.background()
|
||||
.spawn(async move { tiktoken_rs::num_tokens_from_messages(&model, &messages) })
|
||||
.spawn(async move {
|
||||
tiktoken_rs::num_tokens_from_messages(&model.full_name(), &messages)
|
||||
})
|
||||
.await?;
|
||||
|
||||
this.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("conversation was dropped"))?
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.max_token_count = tiktoken_rs::model::get_context_size(&this.model);
|
||||
this.max_token_count =
|
||||
tiktoken_rs::model::get_context_size(&this.model.full_name());
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify()
|
||||
});
|
||||
@ -1532,7 +1537,7 @@ impl Conversation {
|
||||
Some(self.max_token_count as isize - self.token_count? as isize)
|
||||
}
|
||||
|
||||
fn set_model(&mut self, model: String, cx: &mut ModelContext<Self>) {
|
||||
fn set_model(&mut self, model: OpenAIModel, cx: &mut ModelContext<Self>) {
|
||||
self.model = model;
|
||||
self.count_remaining_tokens(cx);
|
||||
cx.notify();
|
||||
@ -1574,7 +1579,7 @@ impl Conversation {
|
||||
}
|
||||
} else {
|
||||
let request = OpenAIRequest {
|
||||
model: self.model.clone(),
|
||||
model: self.model.full_name().to_string(),
|
||||
messages: self
|
||||
.messages(cx)
|
||||
.filter(|message| matches!(message.status, MessageStatus::Done))
|
||||
@ -1900,7 +1905,7 @@ impl Conversation {
|
||||
.into(),
|
||||
}));
|
||||
let request = OpenAIRequest {
|
||||
model: self.model.clone(),
|
||||
model: self.model.full_name().to_string(),
|
||||
messages: messages.collect(),
|
||||
stream: true,
|
||||
};
|
||||
@ -2504,11 +2509,8 @@ impl ConversationEditor {
|
||||
|
||||
fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.conversation.update(cx, |conversation, cx| {
|
||||
let new_model = match conversation.model.as_str() {
|
||||
"gpt-4-0613" => "gpt-3.5-turbo-0613",
|
||||
_ => "gpt-4-0613",
|
||||
};
|
||||
conversation.set_model(new_model.into(), cx);
|
||||
let new_model = conversation.model.cycle();
|
||||
conversation.set_model(new_model, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@ -2530,7 +2532,8 @@ impl ConversationEditor {
|
||||
|
||||
MouseEventHandler::new::<Model, _>(0, cx, |state, cx| {
|
||||
let style = style.model.style_for(state);
|
||||
Label::new(self.conversation.read(cx).model.clone(), style.text.clone())
|
||||
let model_display_name = self.conversation.read(cx).model.short_name();
|
||||
Label::new(model_display_name, style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
})
|
||||
@ -2712,6 +2715,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
fn test_inserting_and_removing_messages(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
init(cx);
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let conversation = cx.add_model(|cx| Conversation::new(Default::default(), registry, cx));
|
||||
let buffer = conversation.read(cx).buffer.clone();
|
||||
@ -2838,6 +2843,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
fn test_message_splitting(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
init(cx);
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let conversation = cx.add_model(|cx| Conversation::new(Default::default(), registry, cx));
|
||||
let buffer = conversation.read(cx).buffer.clone();
|
||||
@ -2932,6 +2939,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
fn test_messages_for_offsets(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
init(cx);
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let conversation = cx.add_model(|cx| Conversation::new(Default::default(), registry, cx));
|
||||
let buffer = conversation.read(cx).buffer.clone();
|
||||
@ -3012,6 +3021,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
fn test_serialization(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
init(cx);
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let conversation =
|
||||
cx.add_model(|cx| Conversation::new(Default::default(), registry.clone(), cx));
|
||||
|
@ -3,6 +3,37 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Setting;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
pub enum OpenAIModel {
|
||||
#[serde(rename = "gpt-3.5-turbo-0613")]
|
||||
ThreePointFiveTurbo,
|
||||
#[serde(rename = "gpt-4-0613")]
|
||||
Four,
|
||||
}
|
||||
|
||||
impl OpenAIModel {
|
||||
pub fn full_name(&self) -> &'static str {
|
||||
match self {
|
||||
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
|
||||
OpenAIModel::Four => "gpt-4-0613",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn short_name(&self) -> &'static str {
|
||||
match self {
|
||||
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo",
|
||||
OpenAIModel::Four => "gpt-4",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cycle(&self) -> Self {
|
||||
match self {
|
||||
OpenAIModel::ThreePointFiveTurbo => OpenAIModel::Four,
|
||||
OpenAIModel::Four => OpenAIModel::ThreePointFiveTurbo,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AssistantDockPosition {
|
||||
@ -17,6 +48,7 @@ pub struct AssistantSettings {
|
||||
pub dock: AssistantDockPosition,
|
||||
pub default_width: f32,
|
||||
pub default_height: f32,
|
||||
pub default_open_ai_model: OpenAIModel,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
@ -25,6 +57,7 @@ pub struct AssistantSettingsContent {
|
||||
pub dock: Option<AssistantDockPosition>,
|
||||
pub default_width: Option<f32>,
|
||||
pub default_height: Option<f32>,
|
||||
pub default_open_ai_model: Option<OpenAIModel>,
|
||||
}
|
||||
|
||||
impl Setting for AssistantSettings {
|
||||
|
@ -13,7 +13,7 @@ gpui = { path = "../gpui" }
|
||||
collections = { path = "../collections" }
|
||||
util = { path = "../util" }
|
||||
|
||||
rodio = "0.17.1"
|
||||
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
|
||||
|
||||
log.workspace = true
|
||||
|
||||
|
@ -20,6 +20,7 @@ test-support = [
|
||||
|
||||
[dependencies]
|
||||
audio = { path = "../audio" }
|
||||
channel = { path = "../channel" }
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
|
@ -7,9 +7,8 @@ use std::sync::Arc;
|
||||
use anyhow::{anyhow, Result};
|
||||
use audio::Audio;
|
||||
use call_settings::CallSettings;
|
||||
use client::{
|
||||
proto, ChannelId, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
|
||||
};
|
||||
use channel::ChannelId;
|
||||
use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
|
||||
use collections::HashSet;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use postage::watch;
|
||||
@ -274,7 +273,7 @@ impl ActiveCall {
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
Self::report_call_event_for_room("decline incoming", call.room_id, &self.client, cx);
|
||||
Self::report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
@ -406,19 +405,31 @@ impl ActiveCall {
|
||||
|
||||
fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
Self::report_call_event_for_room(operation, room.read(cx).id(), &self.client, cx)
|
||||
let room = room.read(cx);
|
||||
Self::report_call_event_for_room(
|
||||
operation,
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&self.client,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
channel_id: Option<u64>,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
let event = ClickhouseEvent::Call { operation, room_id };
|
||||
let event = ClickhouseEvent::Call {
|
||||
operation,
|
||||
room_id,
|
||||
channel_id,
|
||||
};
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
}
|
||||
|
51
crates/channel/Cargo.toml
Normal file
51
crates/channel/Cargo.toml
Normal file
@ -0,0 +1,51 @@
|
||||
[package]
|
||||
name = "channel"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/channel.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
|
||||
|
||||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
collections = { path = "../collections" }
|
||||
db = { path = "../db" }
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
rpc = { path = "../rpc" }
|
||||
text = { path = "../text" }
|
||||
language = { path = "../language" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
image = "0.23"
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
smol.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
url = "2.2"
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
tempfile = "3"
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
14
crates/channel/src/channel.rs
Normal file
14
crates/channel/src/channel.rs
Normal file
@ -0,0 +1,14 @@
|
||||
mod channel_store;
|
||||
|
||||
pub mod channel_buffer;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use channel_store::*;
|
||||
use client::Client;
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
channel_buffer::init(client);
|
||||
}
|
197
crates/channel/src/channel_buffer.rs
Normal file
197
crates/channel/src/channel_buffer.rs
Normal file
@ -0,0 +1,197 @@
|
||||
use crate::Channel;
|
||||
use anyhow::Result;
|
||||
use client::Client;
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle};
|
||||
use rpc::{proto, TypedEnvelope};
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
|
||||
pub(crate) fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
|
||||
}
|
||||
|
||||
pub struct ChannelBuffer {
|
||||
pub(crate) channel: Arc<Channel>,
|
||||
connected: bool,
|
||||
collaborators: Vec<proto::Collaborator>,
|
||||
buffer: ModelHandle<language::Buffer>,
|
||||
client: Arc<Client>,
|
||||
subscription: Option<client::Subscription>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
CollaboratorsChanged,
|
||||
Disconnected,
|
||||
}
|
||||
|
||||
impl Entity for ChannelBuffer {
|
||||
type Event = Event;
|
||||
|
||||
fn release(&mut self, _: &mut AppContext) {
|
||||
if self.connected {
|
||||
self.client
|
||||
.send(proto::LeaveChannelBuffer {
|
||||
channel_id: self.channel.id,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChannelBuffer {
|
||||
pub(crate) async fn new(
|
||||
channel: Arc<Channel>,
|
||||
client: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
let response = client
|
||||
.request(proto::JoinChannelBuffer {
|
||||
channel_id: channel.id,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let base_text = response.base_text;
|
||||
let operations = response
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let collaborators = response.collaborators;
|
||||
|
||||
let buffer = cx.add_model(|_| {
|
||||
language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
|
||||
});
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
|
||||
|
||||
let subscription = client.subscribe_to_entity(channel.id)?;
|
||||
|
||||
anyhow::Ok(cx.add_model(|cx| {
|
||||
cx.subscribe(&buffer, Self::on_buffer_update).detach();
|
||||
|
||||
Self {
|
||||
buffer,
|
||||
client,
|
||||
connected: true,
|
||||
collaborators,
|
||||
channel,
|
||||
subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async fn handle_update_channel_buffer(
|
||||
this: ModelHandle<Self>,
|
||||
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let ops = update_channel_buffer
|
||||
.payload
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.notify();
|
||||
this.buffer
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_add_channel_buffer_collaborator(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::AddChannelBufferCollaborator>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let collaborator = envelope.payload.collaborator.ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"Should have gotten a collaborator in the AddChannelBufferCollaborator message"
|
||||
)
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.collaborators.push(collaborator);
|
||||
cx.emit(Event::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_remove_channel_buffer_collaborator(
|
||||
this: ModelHandle<Self>,
|
||||
message: TypedEnvelope<proto::RemoveChannelBufferCollaborator>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.collaborators.retain(|collaborator| {
|
||||
if collaborator.peer_id == message.payload.peer_id {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.remove_peer(collaborator.replica_id as u16, cx)
|
||||
});
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
cx.emit(Event::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_buffer_update(
|
||||
&mut self,
|
||||
_: ModelHandle<language::Buffer>,
|
||||
event: &language::Event,
|
||||
_: &mut ModelContext<Self>,
|
||||
) {
|
||||
if let language::Event::Operation(operation) = event {
|
||||
let operation = language::proto::serialize_operation(operation);
|
||||
self.client
|
||||
.send(proto::UpdateChannelBuffer {
|
||||
channel_id: self.channel.id,
|
||||
operations: vec![operation],
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
|
||||
self.buffer.clone()
|
||||
}
|
||||
|
||||
pub fn collaborators(&self) -> &[proto::Collaborator] {
|
||||
&self.collaborators
|
||||
}
|
||||
|
||||
pub fn channel(&self) -> Arc<Channel> {
|
||||
self.channel.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if self.connected {
|
||||
self.connected = false;
|
||||
self.subscription.take();
|
||||
cx.emit(Event::Disconnected);
|
||||
cx.notify()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_connected(&self) -> bool {
|
||||
self.connected
|
||||
}
|
||||
|
||||
pub fn replica_id(&self, cx: &AppContext) -> u16 {
|
||||
self.buffer.read(cx).replica_id()
|
||||
}
|
||||
}
|
@ -1,19 +1,14 @@
|
||||
use crate::Status;
|
||||
use crate::{Client, Subscription, User, UserStore};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use collections::HashSet;
|
||||
use futures::channel::mpsc;
|
||||
use futures::Future;
|
||||
use futures::StreamExt;
|
||||
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use crate::channel_buffer::ChannelBuffer;
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{Client, Status, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||
use rpc::{proto, TypedEnvelope};
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
|
||||
pub type ChannelId = u64;
|
||||
pub type UserId = u64;
|
||||
|
||||
pub struct ChannelStore {
|
||||
channels_by_id: HashMap<ChannelId, Arc<Channel>>,
|
||||
@ -23,6 +18,7 @@ pub struct ChannelStore {
|
||||
channels_with_admin_privileges: HashSet<ChannelId>,
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
opened_buffers: HashMap<ChannelId, OpenedChannelBuffer>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
_rpc_subscription: Subscription,
|
||||
@ -57,6 +53,11 @@ pub enum ChannelMemberStatus {
|
||||
NotMember,
|
||||
}
|
||||
|
||||
enum OpenedChannelBuffer {
|
||||
Open(WeakModelHandle<ChannelBuffer>),
|
||||
Loading(Shared<Task<Result<ModelHandle<ChannelBuffer>, Arc<anyhow::Error>>>>),
|
||||
}
|
||||
|
||||
impl ChannelStore {
|
||||
pub fn new(
|
||||
client: Arc<Client>,
|
||||
@ -70,16 +71,14 @@ impl ChannelStore {
|
||||
let mut connection_status = client.status();
|
||||
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some(status) = connection_status.next().await {
|
||||
if matches!(status, Status::ConnectionLost | Status::SignedOut) {
|
||||
if !status.is_connected() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.channels_by_id.clear();
|
||||
this.channel_invitations.clear();
|
||||
this.channel_participants.clear();
|
||||
this.channels_with_admin_privileges.clear();
|
||||
this.channel_paths.clear();
|
||||
this.outgoing_invites.clear();
|
||||
cx.notify();
|
||||
if matches!(status, Status::ConnectionLost | Status::SignedOut) {
|
||||
this.handle_disconnect(cx);
|
||||
} else {
|
||||
this.disconnect_buffers(cx);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
break;
|
||||
@ -87,6 +86,7 @@ impl ChannelStore {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
channels_by_id: HashMap::default(),
|
||||
channel_invitations: Vec::default(),
|
||||
@ -94,6 +94,7 @@ impl ChannelStore {
|
||||
channel_participants: Default::default(),
|
||||
channels_with_admin_privileges: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
update_channels_tx,
|
||||
client,
|
||||
user_store,
|
||||
@ -114,6 +115,16 @@ impl ChannelStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_children(&self, channel_id: ChannelId) -> bool {
|
||||
self.channel_paths.iter().any(|path| {
|
||||
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
|
||||
path.len() > ix + 1
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn channel_count(&self) -> usize {
|
||||
self.channel_paths.len()
|
||||
}
|
||||
@ -141,6 +152,74 @@ impl ChannelStore {
|
||||
self.channels_by_id.get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn open_channel_buffer(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ModelHandle<ChannelBuffer>>> {
|
||||
// Make sure that a given channel buffer is only opened once per
|
||||
// app instance, even if this method is called multiple times
|
||||
// with the same channel id while the first task is still running.
|
||||
let task = loop {
|
||||
match self.opened_buffers.entry(channel_id) {
|
||||
hash_map::Entry::Occupied(e) => match e.get() {
|
||||
OpenedChannelBuffer::Open(buffer) => {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
break Task::ready(Ok(buffer)).shared();
|
||||
} else {
|
||||
self.opened_buffers.remove(&channel_id);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
OpenedChannelBuffer::Loading(task) => break task.clone(),
|
||||
},
|
||||
hash_map::Entry::Vacant(e) => {
|
||||
let client = self.client.clone();
|
||||
let task = cx
|
||||
.spawn(|this, cx| async move {
|
||||
let channel = this.read_with(&cx, |this, _| {
|
||||
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
|
||||
Arc::new(anyhow!("no channel for id: {}", channel_id))
|
||||
})
|
||||
})?;
|
||||
|
||||
ChannelBuffer::new(channel, client, cx)
|
||||
.await
|
||||
.map_err(Arc::new)
|
||||
})
|
||||
.shared();
|
||||
e.insert(OpenedChannelBuffer::Loading(task.clone()));
|
||||
cx.spawn({
|
||||
let task = task.clone();
|
||||
|this, mut cx| async move {
|
||||
let result = task.await;
|
||||
this.update(&mut cx, |this, cx| match result {
|
||||
Ok(buffer) => {
|
||||
cx.observe_release(&buffer, move |this, _, _| {
|
||||
this.opened_buffers.remove(&channel_id);
|
||||
})
|
||||
.detach();
|
||||
this.opened_buffers.insert(
|
||||
channel_id,
|
||||
OpenedChannelBuffer::Open(buffer.downgrade()),
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("failed to open channel buffer {error:?}");
|
||||
this.opened_buffers.remove(&channel_id);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
break task;
|
||||
}
|
||||
}
|
||||
};
|
||||
cx.foreground()
|
||||
.spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
|
||||
}
|
||||
|
||||
pub fn is_user_admin(&self, channel_id: ChannelId) -> bool {
|
||||
self.channel_paths.iter().any(|path| {
|
||||
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
|
||||
@ -403,6 +482,27 @@ impl ChannelStore {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
|
||||
self.disconnect_buffers(cx);
|
||||
self.channels_by_id.clear();
|
||||
self.channel_invitations.clear();
|
||||
self.channel_participants.clear();
|
||||
self.channels_with_admin_privileges.clear();
|
||||
self.channel_paths.clear();
|
||||
self.outgoing_invites.clear();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
|
||||
for (_, buffer) in self.opened_buffers.drain() {
|
||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn update_channels(
|
||||
&mut self,
|
||||
payload: proto::UpdateChannels,
|
||||
@ -437,38 +537,44 @@ impl ChannelStore {
|
||||
.retain(|channel_id, _| !payload.remove_channels.contains(channel_id));
|
||||
self.channels_with_admin_privileges
|
||||
.retain(|channel_id| !payload.remove_channels.contains(channel_id));
|
||||
|
||||
for channel_id in &payload.remove_channels {
|
||||
let channel_id = *channel_id;
|
||||
if let Some(OpenedChannelBuffer::Open(buffer)) =
|
||||
self.opened_buffers.remove(&channel_id)
|
||||
{
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, ChannelBuffer::disconnect);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for channel in payload.channels {
|
||||
if let Some(existing_channel) = self.channels_by_id.get_mut(&channel.id) {
|
||||
// FIXME: We may be missing a path for this existing channel in certain cases
|
||||
let existing_channel = Arc::make_mut(existing_channel);
|
||||
existing_channel.name = channel.name;
|
||||
continue;
|
||||
}
|
||||
for channel_proto in payload.channels {
|
||||
if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
|
||||
Arc::make_mut(existing_channel).name = channel_proto.name;
|
||||
} else {
|
||||
let channel = Arc::new(Channel {
|
||||
id: channel_proto.id,
|
||||
name: channel_proto.name,
|
||||
});
|
||||
self.channels_by_id.insert(channel.id, channel.clone());
|
||||
|
||||
self.channels_by_id.insert(
|
||||
channel.id,
|
||||
Arc::new(Channel {
|
||||
id: channel.id,
|
||||
name: channel.name,
|
||||
}),
|
||||
);
|
||||
|
||||
if let Some(parent_id) = channel.parent_id {
|
||||
let mut ix = 0;
|
||||
while ix < self.channel_paths.len() {
|
||||
let path = &self.channel_paths[ix];
|
||||
if path.ends_with(&[parent_id]) {
|
||||
let mut new_path = path.clone();
|
||||
new_path.push(channel.id);
|
||||
self.channel_paths.insert(ix + 1, new_path);
|
||||
if let Some(parent_id) = channel_proto.parent_id {
|
||||
let mut ix = 0;
|
||||
while ix < self.channel_paths.len() {
|
||||
let path = &self.channel_paths[ix];
|
||||
if path.ends_with(&[parent_id]) {
|
||||
let mut new_path = path.clone();
|
||||
new_path.push(channel.id);
|
||||
self.channel_paths.insert(ix + 1, new_path);
|
||||
ix += 1;
|
||||
}
|
||||
ix += 1;
|
||||
}
|
||||
ix += 1;
|
||||
} else {
|
||||
self.channel_paths.push(vec![channel.id]);
|
||||
}
|
||||
} else {
|
||||
self.channel_paths.push(vec![channel.id]);
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,7 @@
|
||||
use super::*;
|
||||
use client::{Client, UserStore};
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use rpc::proto;
|
||||
use util::http::FakeHttpClient;
|
||||
|
||||
#[gpui::test]
|
@ -17,6 +17,7 @@ db = { path = "../db" }
|
||||
gpui = { path = "../gpui" }
|
||||
util = { path = "../util" }
|
||||
rpc = { path = "../rpc" }
|
||||
text = { path = "../text" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
@ -1,10 +1,6 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
pub mod channel_store;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
|
||||
@ -48,7 +44,6 @@ use util::channel::ReleaseChannel;
|
||||
use util::http::HttpClient;
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub use channel_store::*;
|
||||
pub use rpc::*;
|
||||
pub use telemetry::ClickhouseEvent;
|
||||
pub use user::*;
|
||||
|
@ -74,6 +74,7 @@ pub enum ClickhouseEvent {
|
||||
Call {
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
channel_id: Option<u64>,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -10,9 +10,11 @@ use std::sync::{Arc, Weak};
|
||||
use util::http::HttpClient;
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
pub type UserId = u64;
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct User {
|
||||
pub id: u64,
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub avatar: Option<Arc<ImageData>>,
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.17.0"
|
||||
version = "0.18.0"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
@ -14,8 +14,10 @@ name = "seed"
|
||||
required-features = ["seed-support"]
|
||||
|
||||
[dependencies]
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
live_kit_server = { path = "../live_kit_server" }
|
||||
text = { path = "../text" }
|
||||
rpc = { path = "../rpc" }
|
||||
util = { path = "../util" }
|
||||
|
||||
@ -35,6 +37,7 @@ log.workspace = true
|
||||
nanoid = "0.4"
|
||||
parking_lot.workspace = true
|
||||
prometheus = "0.13"
|
||||
prost.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
scrypt = "0.7"
|
||||
@ -62,6 +65,7 @@ collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
call = { path = "../call", features = ["test-support"] }
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
channel = { path = "../channel" }
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
fs = { path = "../fs", features = ["test-support"] }
|
||||
@ -74,6 +78,7 @@ rpc = { path = "../rpc", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
theme = { path = "../theme" }
|
||||
workspace = { path = "../workspace", features = ["test-support"] }
|
||||
collab_ui = { path = "../collab_ui", features = ["test-support"] }
|
||||
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
|
@ -208,3 +208,44 @@ CREATE TABLE "channel_members" (
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
|
||||
|
||||
CREATE TABLE "buffers" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
|
||||
|
||||
CREATE TABLE "buffer_operations" (
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"lamport_timestamp" INTEGER NOT NULL,
|
||||
"value" BLOB NOT NULL,
|
||||
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
|
||||
);
|
||||
|
||||
CREATE TABLE "buffer_snapshots" (
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"text" TEXT NOT NULL,
|
||||
"operation_serialization_version" INTEGER NOT NULL,
|
||||
PRIMARY KEY(buffer_id, epoch)
|
||||
);
|
||||
|
||||
CREATE TABLE "channel_buffer_collaborators" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"connection_lost" BOOLEAN NOT NULL DEFAULT false,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"replica_id" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
|
||||
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
|
||||
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
|
||||
|
@ -0,0 +1,40 @@
|
||||
CREATE TABLE "buffers" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
|
||||
|
||||
CREATE TABLE "buffer_operations" (
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"lamport_timestamp" INTEGER NOT NULL,
|
||||
"value" BYTEA NOT NULL,
|
||||
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
|
||||
);
|
||||
|
||||
CREATE TABLE "buffer_snapshots" (
|
||||
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
|
||||
"epoch" INTEGER NOT NULL,
|
||||
"text" TEXT NOT NULL,
|
||||
"operation_serialization_version" INTEGER NOT NULL,
|
||||
PRIMARY KEY(buffer_id, epoch)
|
||||
);
|
||||
|
||||
CREATE TABLE "channel_buffer_collaborators" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"connection_lost" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"replica_id" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
|
||||
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
|
||||
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
|
File diff suppressed because it is too large
Load Diff
127
crates/collab/src/db/ids.rs
Normal file
127
crates/collab/src/db/ids.rs
Normal file
@ -0,0 +1,127 @@
|
||||
use crate::Result;
|
||||
use sea_orm::DbErr;
|
||||
use sea_query::{Value, ValueTypeErr};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
macro_rules! id_type {
|
||||
($name:ident) => {
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
Default,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct $name(pub i32);
|
||||
|
||||
impl $name {
|
||||
#[allow(unused)]
|
||||
pub const MAX: Self = Self(i32::MAX);
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn from_proto(value: u64) -> Self {
|
||||
Self(value as i32)
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn to_proto(self) -> u64 {
|
||||
self.0 as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$name> for sea_query::Value {
|
||||
fn from(value: $name) -> Self {
|
||||
sea_query::Value::Int(Some(value.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_orm::TryGetable for $name {
|
||||
fn try_get(
|
||||
res: &sea_orm::QueryResult,
|
||||
pre: &str,
|
||||
col: &str,
|
||||
) -> Result<Self, sea_orm::TryGetError> {
|
||||
Ok(Self(i32::try_get(res, pre, col)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_query::ValueType for $name {
|
||||
fn try_from(v: Value) -> Result<Self, sea_query::ValueTypeErr> {
|
||||
Ok(Self(value_to_integer(v)?))
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
stringify!($name).into()
|
||||
}
|
||||
|
||||
fn array_type() -> sea_query::ArrayType {
|
||||
sea_query::ArrayType::Int
|
||||
}
|
||||
|
||||
fn column_type() -> sea_query::ColumnType {
|
||||
sea_query::ColumnType::Integer(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_orm::TryFromU64 for $name {
|
||||
fn try_from_u64(n: u64) -> Result<Self, DbErr> {
|
||||
Ok(Self(n.try_into().map_err(|_| {
|
||||
DbErr::ConvertFromU64(concat!(
|
||||
"error converting ",
|
||||
stringify!($name),
|
||||
" to u64"
|
||||
))
|
||||
})?))
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_query::Nullable for $name {
|
||||
fn null() -> Value {
|
||||
Value::Int(None)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn value_to_integer(v: Value) -> Result<i32, ValueTypeErr> {
|
||||
match v {
|
||||
Value::TinyInt(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::SmallInt(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::Int(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::BigInt(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::TinyUnsigned(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::SmallUnsigned(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::Unsigned(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
Value::BigUnsigned(Some(int)) => int.try_into().map_err(|_| ValueTypeErr),
|
||||
_ => Err(ValueTypeErr),
|
||||
}
|
||||
}
|
||||
|
||||
id_type!(BufferId);
|
||||
id_type!(AccessTokenId);
|
||||
id_type!(ChannelId);
|
||||
id_type!(ChannelMemberId);
|
||||
id_type!(ContactId);
|
||||
id_type!(FollowerId);
|
||||
id_type!(RoomId);
|
||||
id_type!(RoomParticipantId);
|
||||
id_type!(ProjectId);
|
||||
id_type!(ProjectCollaboratorId);
|
||||
id_type!(ReplicaId);
|
||||
id_type!(ServerId);
|
||||
id_type!(SignupId);
|
||||
id_type!(UserId);
|
||||
id_type!(ChannelBufferCollaboratorId);
|
11
crates/collab/src/db/queries.rs
Normal file
11
crates/collab/src/db/queries.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use super::*;
|
||||
|
||||
pub mod access_tokens;
|
||||
pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod projects;
|
||||
pub mod rooms;
|
||||
pub mod servers;
|
||||
pub mod signups;
|
||||
pub mod users;
|
53
crates/collab/src/db/queries/access_tokens.rs
Normal file
53
crates/collab/src/db/queries/access_tokens.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_access_token(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
access_token_hash: &str,
|
||||
max_access_token_count: usize,
|
||||
) -> Result<AccessTokenId> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
|
||||
let token = access_token::ActiveModel {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
hash: ActiveValue::set(access_token_hash.into()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
access_token::Entity::delete_many()
|
||||
.filter(
|
||||
access_token::Column::Id.in_subquery(
|
||||
Query::select()
|
||||
.column(access_token::Column::Id)
|
||||
.from(access_token::Entity)
|
||||
.and_where(access_token::Column::UserId.eq(user_id))
|
||||
.order_by(access_token::Column::Id, sea_orm::Order::Desc)
|
||||
.limit(10000)
|
||||
.offset(max_access_token_count as u64)
|
||||
.to_owned(),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(token.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_access_token(
|
||||
&self,
|
||||
access_token_id: AccessTokenId,
|
||||
) -> Result<access_token::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(access_token::Entity::find_by_id(access_token_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such access token"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
588
crates/collab/src/db/queries/buffers.rs
Normal file
588
crates/collab/src/db/queries/buffers.rs
Normal file
@ -0,0 +1,588 @@
|
||||
use super::*;
|
||||
use prost::Message;
|
||||
use text::{EditOperation, InsertionTimestamp, UndoOperation};
|
||||
|
||||
impl Database {
|
||||
pub async fn join_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<proto::JoinChannelBufferResponse> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
|
||||
self.check_user_is_channel_member(channel_id, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let buffer = channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(buffer::Entity)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let buffer = if let Some(buffer) = buffer {
|
||||
buffer
|
||||
} else {
|
||||
let buffer = buffer::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
buffer_snapshot::ActiveModel {
|
||||
buffer_id: ActiveValue::Set(buffer.id),
|
||||
epoch: ActiveValue::Set(0),
|
||||
text: ActiveValue::Set(String::new()),
|
||||
operation_serialization_version: ActiveValue::Set(
|
||||
storage::SERIALIZATION_VERSION,
|
||||
),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
buffer
|
||||
};
|
||||
|
||||
// Join the collaborators
|
||||
let mut collaborators = channel_buffer_collaborator::Entity::find()
|
||||
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let replica_ids = collaborators
|
||||
.iter()
|
||||
.map(|c| c.replica_id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut replica_id = ReplicaId(0);
|
||||
while replica_ids.contains(&replica_id) {
|
||||
replica_id.0 += 1;
|
||||
}
|
||||
let collaborator = channel_buffer_collaborator::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
connection_id: ActiveValue::Set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
replica_id: ActiveValue::Set(replica_id),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
collaborators.push(collaborator);
|
||||
|
||||
// Assemble the buffer state
|
||||
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
|
||||
|
||||
Ok(proto::JoinChannelBufferResponse {
|
||||
buffer_id: buffer.id.to_proto(),
|
||||
replica_id: replica_id.to_proto() as u32,
|
||||
base_text,
|
||||
operations,
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| proto::Collaborator {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
self.transaction(|tx| async move {
|
||||
self.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffer_internal(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
connection: ConnectionId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
let result = channel_buffer_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||
.add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
channel_buffer_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("not a collaborator on this project"))?;
|
||||
}
|
||||
|
||||
let mut connections = Vec::new();
|
||||
let mut rows = channel_buffer_collaborator::Entity::find()
|
||||
.filter(
|
||||
Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
connections.push(ConnectionId {
|
||||
id: row.connection_id as u32,
|
||||
owner_id: row.connection_server_id.0 as u32,
|
||||
});
|
||||
}
|
||||
|
||||
drop(rows);
|
||||
|
||||
if connections.is_empty() {
|
||||
self.snapshot_buffer(channel_id, &tx).await?;
|
||||
}
|
||||
|
||||
Ok(connections)
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffers(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelIds {
|
||||
ChannelId,
|
||||
}
|
||||
|
||||
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||
.filter(Condition::all().add(
|
||||
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
|
||||
))
|
||||
.into_values::<_, QueryChannelIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for channel_id in channel_ids {
|
||||
let collaborators = self
|
||||
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
.await?;
|
||||
result.push((channel_id, collaborators));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub async fn get_channel_buffer_collaborators(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
) -> Result<Vec<UserId>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryUserIds {
|
||||
UserId,
|
||||
}
|
||||
|
||||
let users: Vec<UserId> = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::UserId)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.into_values::<_, QueryUserIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(users)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user: UserId,
|
||||
operations: &[proto::Operation],
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
self.transaction(move |tx| async move {
|
||||
self.check_user_is_channel_member(channel_id, user, &*tx)
|
||||
.await?;
|
||||
|
||||
let buffer = buffer::Entity::find()
|
||||
.filter(buffer::Column::ChannelId.eq(channel_id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryVersion {
|
||||
OperationSerializationVersion,
|
||||
}
|
||||
|
||||
let serialization_version: i32 = buffer
|
||||
.find_related(buffer_snapshot::Entity)
|
||||
.select_only()
|
||||
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
||||
.filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
|
||||
.into_values::<_, QueryVersion>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?;
|
||||
|
||||
let operations = operations
|
||||
.iter()
|
||||
.filter_map(|op| operation_to_storage(op, &buffer, serialization_version))
|
||||
.collect::<Vec<_>>();
|
||||
if !operations.is_empty() {
|
||||
buffer_operation::Entity::insert_many(operations)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let mut connections = Vec::new();
|
||||
let mut rows = channel_buffer_collaborator::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
connections.push(ConnectionId {
|
||||
id: row.connection_id as u32,
|
||||
owner_id: row.connection_server_id.0 as u32,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(connections)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_buffer_state(
|
||||
&self,
|
||||
buffer: &buffer::Model,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(String, Vec<proto::Operation>)> {
|
||||
let id = buffer.id;
|
||||
let (base_text, version) = if buffer.epoch > 0 {
|
||||
let snapshot = buffer_snapshot::Entity::find()
|
||||
.filter(
|
||||
buffer_snapshot::Column::BufferId
|
||||
.eq(id)
|
||||
.and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such snapshot"))?;
|
||||
|
||||
let version = snapshot.operation_serialization_version;
|
||||
(snapshot.text, version)
|
||||
} else {
|
||||
(String::new(), storage::SERIALIZATION_VERSION)
|
||||
};
|
||||
|
||||
let mut rows = buffer_operation::Entity::find()
|
||||
.filter(
|
||||
buffer_operation::Column::BufferId
|
||||
.eq(id)
|
||||
.and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
let mut operations = Vec::new();
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
|
||||
let operation = operation_from_storage(row, version)?;
|
||||
operations.push(proto::Operation {
|
||||
variant: Some(operation),
|
||||
})
|
||||
}
|
||||
|
||||
Ok((base_text, operations))
|
||||
}
|
||||
|
||||
async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
|
||||
let buffer = channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(buffer::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
|
||||
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
|
||||
if operations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut text_buffer = text::Buffer::new(0, 0, base_text);
|
||||
text_buffer
|
||||
.apply_ops(operations.into_iter().filter_map(operation_from_wire))
|
||||
.unwrap();
|
||||
|
||||
let base_text = text_buffer.text();
|
||||
let epoch = buffer.epoch + 1;
|
||||
|
||||
buffer_snapshot::Model {
|
||||
buffer_id: buffer.id,
|
||||
epoch,
|
||||
text: base_text,
|
||||
operation_serialization_version: storage::SERIALIZATION_VERSION,
|
||||
}
|
||||
.into_active_model()
|
||||
.insert(tx)
|
||||
.await?;
|
||||
|
||||
buffer::ActiveModel {
|
||||
id: ActiveValue::Unchanged(buffer.id),
|
||||
epoch: ActiveValue::Set(epoch),
|
||||
..Default::default()
|
||||
}
|
||||
.save(tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn operation_to_storage(
|
||||
operation: &proto::Operation,
|
||||
buffer: &buffer::Model,
|
||||
_format: i32,
|
||||
) -> Option<buffer_operation::ActiveModel> {
|
||||
let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? {
|
||||
proto::operation::Variant::Edit(operation) => (
|
||||
operation.replica_id,
|
||||
operation.lamport_timestamp,
|
||||
storage::Operation {
|
||||
local_timestamp: operation.local_timestamp,
|
||||
version: version_to_storage(&operation.version),
|
||||
is_undo: false,
|
||||
edit_ranges: operation
|
||||
.ranges
|
||||
.iter()
|
||||
.map(|range| storage::Range {
|
||||
start: range.start,
|
||||
end: range.end,
|
||||
})
|
||||
.collect(),
|
||||
edit_texts: operation.new_text.clone(),
|
||||
undo_counts: Vec::new(),
|
||||
},
|
||||
),
|
||||
proto::operation::Variant::Undo(operation) => (
|
||||
operation.replica_id,
|
||||
operation.lamport_timestamp,
|
||||
storage::Operation {
|
||||
local_timestamp: operation.local_timestamp,
|
||||
version: version_to_storage(&operation.version),
|
||||
is_undo: true,
|
||||
edit_ranges: Vec::new(),
|
||||
edit_texts: Vec::new(),
|
||||
undo_counts: operation
|
||||
.counts
|
||||
.iter()
|
||||
.map(|entry| storage::UndoCount {
|
||||
replica_id: entry.replica_id,
|
||||
local_timestamp: entry.local_timestamp,
|
||||
count: entry.count,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
),
|
||||
_ => None?,
|
||||
};
|
||||
|
||||
Some(buffer_operation::ActiveModel {
|
||||
buffer_id: ActiveValue::Set(buffer.id),
|
||||
epoch: ActiveValue::Set(buffer.epoch),
|
||||
replica_id: ActiveValue::Set(replica_id as i32),
|
||||
lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32),
|
||||
value: ActiveValue::Set(value.encode_to_vec()),
|
||||
})
|
||||
}
|
||||
|
||||
fn operation_from_storage(
|
||||
row: buffer_operation::Model,
|
||||
_format_version: i32,
|
||||
) -> Result<proto::operation::Variant, Error> {
|
||||
let operation =
|
||||
storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
|
||||
let version = version_from_storage(&operation.version);
|
||||
Ok(if operation.is_undo {
|
||||
proto::operation::Variant::Undo(proto::operation::Undo {
|
||||
replica_id: row.replica_id as u32,
|
||||
local_timestamp: operation.local_timestamp as u32,
|
||||
lamport_timestamp: row.lamport_timestamp as u32,
|
||||
version,
|
||||
counts: operation
|
||||
.undo_counts
|
||||
.iter()
|
||||
.map(|entry| proto::UndoCount {
|
||||
replica_id: entry.replica_id,
|
||||
local_timestamp: entry.local_timestamp,
|
||||
count: entry.count,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
} else {
|
||||
proto::operation::Variant::Edit(proto::operation::Edit {
|
||||
replica_id: row.replica_id as u32,
|
||||
local_timestamp: operation.local_timestamp as u32,
|
||||
lamport_timestamp: row.lamport_timestamp as u32,
|
||||
version,
|
||||
ranges: operation
|
||||
.edit_ranges
|
||||
.into_iter()
|
||||
.map(|range| proto::Range {
|
||||
start: range.start,
|
||||
end: range.end,
|
||||
})
|
||||
.collect(),
|
||||
new_text: operation.edit_texts,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
|
||||
version
|
||||
.iter()
|
||||
.map(|entry| storage::VectorClockEntry {
|
||||
replica_id: entry.replica_id,
|
||||
timestamp: entry.timestamp,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
|
||||
version
|
||||
.iter()
|
||||
.map(|entry| proto::VectorClockEntry {
|
||||
replica_id: entry.replica_id,
|
||||
timestamp: entry.timestamp,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// This is currently a manual copy of the deserialization code in the client's langauge crate
|
||||
pub fn operation_from_wire(operation: proto::Operation) -> Option<text::Operation> {
|
||||
match operation.variant? {
|
||||
proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation {
|
||||
timestamp: InsertionTimestamp {
|
||||
replica_id: edit.replica_id as text::ReplicaId,
|
||||
local: edit.local_timestamp,
|
||||
lamport: edit.lamport_timestamp,
|
||||
},
|
||||
version: version_from_wire(&edit.version),
|
||||
ranges: edit
|
||||
.ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize)
|
||||
})
|
||||
.collect(),
|
||||
new_text: edit.new_text.into_iter().map(Arc::from).collect(),
|
||||
})),
|
||||
proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo {
|
||||
lamport_timestamp: clock::Lamport {
|
||||
replica_id: undo.replica_id as text::ReplicaId,
|
||||
value: undo.lamport_timestamp,
|
||||
},
|
||||
undo: UndoOperation {
|
||||
id: clock::Local {
|
||||
replica_id: undo.replica_id as text::ReplicaId,
|
||||
value: undo.local_timestamp,
|
||||
},
|
||||
version: version_from_wire(&undo.version),
|
||||
counts: undo
|
||||
.counts
|
||||
.into_iter()
|
||||
.map(|c| {
|
||||
(
|
||||
clock::Local {
|
||||
replica_id: c.replica_id as text::ReplicaId,
|
||||
value: c.local_timestamp,
|
||||
},
|
||||
c.count,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
|
||||
let mut version = clock::Global::new();
|
||||
for entry in message {
|
||||
version.observe(clock::Local {
|
||||
replica_id: entry.replica_id as text::ReplicaId,
|
||||
value: entry.timestamp,
|
||||
});
|
||||
}
|
||||
version
|
||||
}
|
||||
|
||||
mod storage {
|
||||
#![allow(non_snake_case)]
|
||||
use prost::Message;
|
||||
pub const SERIALIZATION_VERSION: i32 = 1;
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct Operation {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub local_timestamp: u32,
|
||||
#[prost(message, repeated, tag = "2")]
|
||||
pub version: Vec<VectorClockEntry>,
|
||||
#[prost(bool, tag = "3")]
|
||||
pub is_undo: bool,
|
||||
#[prost(message, repeated, tag = "4")]
|
||||
pub edit_ranges: Vec<Range>,
|
||||
#[prost(string, repeated, tag = "5")]
|
||||
pub edit_texts: Vec<String>,
|
||||
#[prost(message, repeated, tag = "6")]
|
||||
pub undo_counts: Vec<UndoCount>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct VectorClockEntry {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub replica_id: u32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub timestamp: u32,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct Range {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub start: u64,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub end: u64,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct UndoCount {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub replica_id: u32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub local_timestamp: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub count: u32,
|
||||
}
|
||||
}
|
697
crates/collab/src/db/queries/channels.rs
Normal file
697
crates/collab/src/db/queries/channels.rs
Normal file
@ -0,0 +1,697 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_root_channel(
|
||||
&self,
|
||||
name: &str,
|
||||
live_kit_room: &str,
|
||||
creator_id: UserId,
|
||||
) -> Result<ChannelId> {
|
||||
self.create_channel(name, None, live_kit_room, creator_id)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_channel(
|
||||
&self,
|
||||
name: &str,
|
||||
parent: Option<ChannelId>,
|
||||
live_kit_room: &str,
|
||||
creator_id: UserId,
|
||||
) -> Result<ChannelId> {
|
||||
let name = Self::sanitize_channel_name(name)?;
|
||||
self.transaction(move |tx| async move {
|
||||
if let Some(parent) = parent {
|
||||
self.check_user_is_channel_admin(parent, creator_id, &*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let channel = channel::ActiveModel {
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let channel_paths_stmt;
|
||||
if let Some(parent) = parent {
|
||||
let sql = r#"
|
||||
INSERT INTO channel_paths
|
||||
(id_path, channel_id)
|
||||
SELECT
|
||||
id_path || $1 || '/', $2
|
||||
FROM
|
||||
channel_paths
|
||||
WHERE
|
||||
channel_id = $3
|
||||
"#;
|
||||
channel_paths_stmt = Statement::from_sql_and_values(
|
||||
self.pool.get_database_backend(),
|
||||
sql,
|
||||
[
|
||||
channel.id.to_proto().into(),
|
||||
channel.id.to_proto().into(),
|
||||
parent.to_proto().into(),
|
||||
],
|
||||
);
|
||||
tx.execute(channel_paths_stmt).await?;
|
||||
} else {
|
||||
channel_path::Entity::insert(channel_path::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel.id),
|
||||
id_path: ActiveValue::Set(format!("/{}/", channel.id)),
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
channel_member::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel.id),
|
||||
user_id: ActiveValue::Set(creator_id),
|
||||
accepted: ActiveValue::Set(true),
|
||||
admin: ActiveValue::Set(true),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
room::ActiveModel {
|
||||
channel_id: ActiveValue::Set(Some(channel.id)),
|
||||
live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(channel.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove_channel(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
) -> Result<(Vec<ChannelId>, Vec<UserId>)> {
|
||||
self.transaction(move |tx| async move {
|
||||
self.check_user_is_channel_admin(channel_id, user_id, &*tx)
|
||||
.await?;
|
||||
|
||||
// Don't remove descendant channels that have additional parents.
|
||||
let mut channels_to_remove = self.get_channel_descendants([channel_id], &*tx).await?;
|
||||
{
|
||||
let mut channels_to_keep = channel_path::Entity::find()
|
||||
.filter(
|
||||
channel_path::Column::ChannelId
|
||||
.is_in(
|
||||
channels_to_remove
|
||||
.keys()
|
||||
.copied()
|
||||
.filter(|&id| id != channel_id),
|
||||
)
|
||||
.and(
|
||||
channel_path::Column::IdPath
|
||||
.not_like(&format!("%/{}/%", channel_id)),
|
||||
),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = channels_to_keep.next().await {
|
||||
let row = row?;
|
||||
channels_to_remove.remove(&row.channel_id);
|
||||
}
|
||||
}
|
||||
|
||||
let channel_ancestors = self.get_channel_ancestors(channel_id, &*tx).await?;
|
||||
let members_to_notify: Vec<UserId> = channel_member::Entity::find()
|
||||
.filter(channel_member::Column::ChannelId.is_in(channel_ancestors))
|
||||
.select_only()
|
||||
.column(channel_member::Column::UserId)
|
||||
.distinct()
|
||||
.into_values::<_, QueryUserIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
channel::Entity::delete_many()
|
||||
.filter(channel::Column::Id.is_in(channels_to_remove.keys().copied()))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((channels_to_remove.into_keys().collect(), members_to_notify))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn invite_channel_member(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
invitee_id: UserId,
|
||||
inviter_id: UserId,
|
||||
is_admin: bool,
|
||||
) -> Result<()> {
|
||||
self.transaction(move |tx| async move {
|
||||
self.check_user_is_channel_admin(channel_id, inviter_id, &*tx)
|
||||
.await?;
|
||||
|
||||
channel_member::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
user_id: ActiveValue::Set(invitee_id),
|
||||
accepted: ActiveValue::Set(false),
|
||||
admin: ActiveValue::Set(is_admin),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
fn sanitize_channel_name(name: &str) -> Result<&str> {
|
||||
let new_name = name.trim().trim_start_matches('#');
|
||||
if new_name == "" {
|
||||
Err(anyhow!("channel name can't be blank"))?;
|
||||
}
|
||||
Ok(new_name)
|
||||
}
|
||||
|
||||
pub async fn rename_channel(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
new_name: &str,
|
||||
) -> Result<String> {
|
||||
self.transaction(move |tx| async move {
|
||||
let new_name = Self::sanitize_channel_name(new_name)?.to_string();
|
||||
|
||||
self.check_user_is_channel_admin(channel_id, user_id, &*tx)
|
||||
.await?;
|
||||
|
||||
channel::ActiveModel {
|
||||
id: ActiveValue::Unchanged(channel_id),
|
||||
name: ActiveValue::Set(new_name.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.update(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(new_name)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn respond_to_channel_invite(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
accept: bool,
|
||||
) -> Result<()> {
|
||||
self.transaction(move |tx| async move {
|
||||
let rows_affected = if accept {
|
||||
channel_member::Entity::update_many()
|
||||
.set(channel_member::ActiveModel {
|
||||
accepted: ActiveValue::Set(accept),
|
||||
..Default::default()
|
||||
})
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.eq(channel_id)
|
||||
.and(channel_member::Column::UserId.eq(user_id))
|
||||
.and(channel_member::Column::Accepted.eq(false)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?
|
||||
.rows_affected
|
||||
} else {
|
||||
channel_member::ActiveModel {
|
||||
channel_id: ActiveValue::Unchanged(channel_id),
|
||||
user_id: ActiveValue::Unchanged(user_id),
|
||||
..Default::default()
|
||||
}
|
||||
.delete(&*tx)
|
||||
.await?
|
||||
.rows_affected
|
||||
};
|
||||
|
||||
if rows_affected == 0 {
|
||||
Err(anyhow!("no such invitation"))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove_channel_member(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
member_id: UserId,
|
||||
remover_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
self.check_user_is_channel_admin(channel_id, remover_id, &*tx)
|
||||
.await?;
|
||||
|
||||
let result = channel_member::Entity::delete_many()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.eq(channel_id)
|
||||
.and(channel_member::Column::UserId.eq(member_id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("no such member"))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_invites_for_user(&self, user_id: UserId) -> Result<Vec<Channel>> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel_invites = channel_member::Entity::find()
|
||||
.filter(
|
||||
channel_member::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(channel_member::Column::Accepted.eq(false)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let channels = channel::Entity::find()
|
||||
.filter(
|
||||
channel::Column::Id.is_in(
|
||||
channel_invites
|
||||
.into_iter()
|
||||
.map(|channel_member| channel_member.channel_id),
|
||||
),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let channels = channels
|
||||
.into_iter()
|
||||
.map(|channel| Channel {
|
||||
id: channel.id,
|
||||
name: channel.name,
|
||||
parent_id: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(channels)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channels_for_user(&self, user_id: UserId) -> Result<ChannelsForUser> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
|
||||
let channel_memberships = channel_member::Entity::find()
|
||||
.filter(
|
||||
channel_member::Column::UserId
|
||||
.eq(user_id)
|
||||
.and(channel_member::Column::Accepted.eq(true)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let parents_by_child_id = self
|
||||
.get_channel_descendants(channel_memberships.iter().map(|m| m.channel_id), &*tx)
|
||||
.await?;
|
||||
|
||||
let channels_with_admin_privileges = channel_memberships
|
||||
.iter()
|
||||
.filter_map(|membership| membership.admin.then_some(membership.channel_id))
|
||||
.collect();
|
||||
|
||||
let mut channels = Vec::with_capacity(parents_by_child_id.len());
|
||||
{
|
||||
let mut rows = channel::Entity::find()
|
||||
.filter(channel::Column::Id.is_in(parents_by_child_id.keys().copied()))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
channels.push(Channel {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
parent_id: parents_by_child_id.get(&row.id).copied().flatten(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryUserIdsAndChannelIds {
|
||||
ChannelId,
|
||||
UserId,
|
||||
}
|
||||
|
||||
let mut channel_participants: HashMap<ChannelId, Vec<UserId>> = HashMap::default();
|
||||
{
|
||||
let mut rows = room_participant::Entity::find()
|
||||
.inner_join(room::Entity)
|
||||
.filter(room::Column::ChannelId.is_in(channels.iter().map(|c| c.id)))
|
||||
.select_only()
|
||||
.column(room::Column::ChannelId)
|
||||
.column(room_participant::Column::UserId)
|
||||
.into_values::<_, QueryUserIdsAndChannelIds>()
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row: (ChannelId, UserId) = row?;
|
||||
channel_participants.entry(row.0).or_default().push(row.1)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channels,
|
||||
channel_participants,
|
||||
channels_with_admin_privileges,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_members(&self, id: ChannelId) -> Result<Vec<UserId>> {
|
||||
self.transaction(|tx| async move { self.get_channel_members_internal(id, &*tx).await })
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_channel_member_admin(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
from: UserId,
|
||||
for_user: UserId,
|
||||
admin: bool,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
self.check_user_is_channel_admin(channel_id, from, &*tx)
|
||||
.await?;
|
||||
|
||||
let result = channel_member::Entity::update_many()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.eq(channel_id)
|
||||
.and(channel_member::Column::UserId.eq(for_user)),
|
||||
)
|
||||
.set(channel_member::ActiveModel {
|
||||
admin: ActiveValue::set(admin),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("no such member"))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_member_details(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
) -> Result<Vec<proto::ChannelMember>> {
|
||||
self.transaction(|tx| async move {
|
||||
self.check_user_is_channel_admin(channel_id, user_id, &*tx)
|
||||
.await?;
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryMemberDetails {
|
||||
UserId,
|
||||
Admin,
|
||||
IsDirectMember,
|
||||
Accepted,
|
||||
}
|
||||
|
||||
let tx = tx;
|
||||
let ancestor_ids = self.get_channel_ancestors(channel_id, &*tx).await?;
|
||||
let mut stream = channel_member::Entity::find()
|
||||
.distinct()
|
||||
.filter(channel_member::Column::ChannelId.is_in(ancestor_ids.iter().copied()))
|
||||
.select_only()
|
||||
.column(channel_member::Column::UserId)
|
||||
.column(channel_member::Column::Admin)
|
||||
.column_as(
|
||||
channel_member::Column::ChannelId.eq(channel_id),
|
||||
QueryMemberDetails::IsDirectMember,
|
||||
)
|
||||
.column(channel_member::Column::Accepted)
|
||||
.order_by_asc(channel_member::Column::UserId)
|
||||
.into_values::<_, QueryMemberDetails>()
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut rows = Vec::<proto::ChannelMember>::new();
|
||||
while let Some(row) = stream.next().await {
|
||||
let (user_id, is_admin, is_direct_member, is_invite_accepted): (
|
||||
UserId,
|
||||
bool,
|
||||
bool,
|
||||
bool,
|
||||
) = row?;
|
||||
let kind = match (is_direct_member, is_invite_accepted) {
|
||||
(true, true) => proto::channel_member::Kind::Member,
|
||||
(true, false) => proto::channel_member::Kind::Invitee,
|
||||
(false, true) => proto::channel_member::Kind::AncestorMember,
|
||||
(false, false) => continue,
|
||||
};
|
||||
let user_id = user_id.to_proto();
|
||||
let kind = kind.into();
|
||||
if let Some(last_row) = rows.last_mut() {
|
||||
if last_row.user_id == user_id {
|
||||
if is_direct_member {
|
||||
last_row.kind = kind;
|
||||
last_row.admin = is_admin;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
rows.push(proto::ChannelMember {
|
||||
user_id,
|
||||
kind,
|
||||
admin: is_admin,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(rows)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_members_internal(
|
||||
&self,
|
||||
id: ChannelId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<UserId>> {
|
||||
let ancestor_ids = self.get_channel_ancestors(id, tx).await?;
|
||||
let user_ids = channel_member::Entity::find()
|
||||
.distinct()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.is_in(ancestor_ids.iter().copied())
|
||||
.and(channel_member::Column::Accepted.eq(true)),
|
||||
)
|
||||
.select_only()
|
||||
.column(channel_member::Column::UserId)
|
||||
.into_values::<_, QueryUserIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
Ok(user_ids)
|
||||
}
|
||||
|
||||
pub async fn check_user_is_channel_member(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let channel_ids = self.get_channel_ancestors(channel_id, tx).await?;
|
||||
channel_member::Entity::find()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.is_in(channel_ids)
|
||||
.and(channel_member::Column::UserId.eq(user_id)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user is not a channel member or channel does not exist"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check_user_is_channel_admin(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let channel_ids = self.get_channel_ancestors(channel_id, tx).await?;
|
||||
channel_member::Entity::find()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.is_in(channel_ids)
|
||||
.and(channel_member::Column::UserId.eq(user_id))
|
||||
.and(channel_member::Column::Admin.eq(true)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user is not a channel admin or channel does not exist"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_channel_ancestors(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ChannelId>> {
|
||||
let paths = channel_path::Entity::find()
|
||||
.filter(channel_path::Column::ChannelId.eq(channel_id))
|
||||
.all(tx)
|
||||
.await?;
|
||||
let mut channel_ids = Vec::new();
|
||||
for path in paths {
|
||||
for id in path.id_path.trim_matches('/').split('/') {
|
||||
if let Ok(id) = id.parse() {
|
||||
let id = ChannelId::from_proto(id);
|
||||
if let Err(ix) = channel_ids.binary_search(&id) {
|
||||
channel_ids.insert(ix, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(channel_ids)
|
||||
}
|
||||
|
||||
async fn get_channel_descendants(
|
||||
&self,
|
||||
channel_ids: impl IntoIterator<Item = ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<HashMap<ChannelId, Option<ChannelId>>> {
|
||||
let mut values = String::new();
|
||||
for id in channel_ids {
|
||||
if !values.is_empty() {
|
||||
values.push_str(", ");
|
||||
}
|
||||
write!(&mut values, "({})", id).unwrap();
|
||||
}
|
||||
|
||||
if values.is_empty() {
|
||||
return Ok(HashMap::default());
|
||||
}
|
||||
|
||||
let sql = format!(
|
||||
r#"
|
||||
SELECT
|
||||
descendant_paths.*
|
||||
FROM
|
||||
channel_paths parent_paths, channel_paths descendant_paths
|
||||
WHERE
|
||||
parent_paths.channel_id IN ({values}) AND
|
||||
descendant_paths.id_path LIKE (parent_paths.id_path || '%')
|
||||
"#
|
||||
);
|
||||
|
||||
let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
|
||||
|
||||
let mut parents_by_child_id = HashMap::default();
|
||||
let mut paths = channel_path::Entity::find()
|
||||
.from_raw_sql(stmt)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
while let Some(path) = paths.next().await {
|
||||
let path = path?;
|
||||
let ids = path.id_path.trim_matches('/').split('/');
|
||||
let mut parent_id = None;
|
||||
for id in ids {
|
||||
if let Ok(id) = id.parse() {
|
||||
let id = ChannelId::from_proto(id);
|
||||
if id == path.channel_id {
|
||||
break;
|
||||
}
|
||||
parent_id = Some(id);
|
||||
}
|
||||
}
|
||||
parents_by_child_id.insert(path.channel_id, parent_id);
|
||||
}
|
||||
|
||||
Ok(parents_by_child_id)
|
||||
}
|
||||
|
||||
/// Returns the channel with the given ID and:
|
||||
/// - true if the user is a member
|
||||
/// - false if the user hasn't accepted the invitation yet
|
||||
pub async fn get_channel(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
) -> Result<Option<(Channel, bool)>> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
|
||||
let channel = channel::Entity::find_by_id(channel_id).one(&*tx).await?;
|
||||
|
||||
if let Some(channel) = channel {
|
||||
if self
|
||||
.check_user_is_channel_member(channel_id, user_id, &*tx)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let channel_membership = channel_member::Entity::find()
|
||||
.filter(
|
||||
channel_member::Column::ChannelId
|
||||
.eq(channel_id)
|
||||
.and(channel_member::Column::UserId.eq(user_id)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let is_accepted = channel_membership
|
||||
.map(|membership| membership.accepted)
|
||||
.unwrap_or(false);
|
||||
|
||||
Ok(Some((
|
||||
Channel {
|
||||
id: channel.id,
|
||||
name: channel.name,
|
||||
parent_id: None,
|
||||
},
|
||||
is_accepted,
|
||||
)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn room_id_for_channel(&self, channel_id: ChannelId) -> Result<RoomId> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
let room = channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(room::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("invalid channel"))?;
|
||||
Ok(room.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryUserIds {
|
||||
UserId,
|
||||
}
|
298
crates/collab/src/db/queries/contacts.rs
Normal file
298
crates/collab/src/db/queries/contacts.rs
Normal file
@ -0,0 +1,298 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_contacts(&self, user_id: UserId) -> Result<Vec<Contact>> {
|
||||
#[derive(Debug, FromQueryResult)]
|
||||
struct ContactWithUserBusyStatuses {
|
||||
user_id_a: UserId,
|
||||
user_id_b: UserId,
|
||||
a_to_b: bool,
|
||||
accepted: bool,
|
||||
should_notify: bool,
|
||||
user_a_busy: bool,
|
||||
user_b_busy: bool,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
let user_a_participant = Alias::new("user_a_participant");
|
||||
let user_b_participant = Alias::new("user_b_participant");
|
||||
let mut db_contacts = contact::Entity::find()
|
||||
.column_as(
|
||||
Expr::tbl(user_a_participant.clone(), room_participant::Column::Id)
|
||||
.is_not_null(),
|
||||
"user_a_busy",
|
||||
)
|
||||
.column_as(
|
||||
Expr::tbl(user_b_participant.clone(), room_participant::Column::Id)
|
||||
.is_not_null(),
|
||||
"user_b_busy",
|
||||
)
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(user_id)
|
||||
.or(contact::Column::UserIdB.eq(user_id)),
|
||||
)
|
||||
.join_as(
|
||||
JoinType::LeftJoin,
|
||||
contact::Relation::UserARoomParticipant.def(),
|
||||
user_a_participant,
|
||||
)
|
||||
.join_as(
|
||||
JoinType::LeftJoin,
|
||||
contact::Relation::UserBRoomParticipant.def(),
|
||||
user_b_participant,
|
||||
)
|
||||
.into_model::<ContactWithUserBusyStatuses>()
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut contacts = Vec::new();
|
||||
while let Some(db_contact) = db_contacts.next().await {
|
||||
let db_contact = db_contact?;
|
||||
if db_contact.user_id_a == user_id {
|
||||
if db_contact.accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: db_contact.user_id_b,
|
||||
should_notify: db_contact.should_notify && db_contact.a_to_b,
|
||||
busy: db_contact.user_b_busy,
|
||||
});
|
||||
} else if db_contact.a_to_b {
|
||||
contacts.push(Contact::Outgoing {
|
||||
user_id: db_contact.user_id_b,
|
||||
})
|
||||
} else {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: db_contact.user_id_b,
|
||||
should_notify: db_contact.should_notify,
|
||||
});
|
||||
}
|
||||
} else if db_contact.accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: db_contact.user_id_a,
|
||||
should_notify: db_contact.should_notify && !db_contact.a_to_b,
|
||||
busy: db_contact.user_a_busy,
|
||||
});
|
||||
} else if db_contact.a_to_b {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: db_contact.user_id_a,
|
||||
should_notify: db_contact.should_notify,
|
||||
});
|
||||
} else {
|
||||
contacts.push(Contact::Outgoing {
|
||||
user_id: db_contact.user_id_a,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
contacts.sort_unstable_by_key(|contact| contact.user_id());
|
||||
|
||||
Ok(contacts)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::UserId.eq(user_id))
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
Ok(participant.is_some())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn has_contact(&self, user_id_1: UserId, user_id_2: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b) = if user_id_1 < user_id_2 {
|
||||
(user_id_1, user_id_2)
|
||||
} else {
|
||||
(user_id_2, user_id_1)
|
||||
};
|
||||
|
||||
Ok(contact::Entity::find()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::Accepted.eq(true)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.is_some())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_contact_request(&self, sender_id: UserId, receiver_id: UserId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if sender_id < receiver_id {
|
||||
(sender_id, receiver_id, true)
|
||||
} else {
|
||||
(receiver_id, sender_id, false)
|
||||
};
|
||||
|
||||
let rows_affected = contact::Entity::insert(contact::ActiveModel {
|
||||
user_id_a: ActiveValue::set(id_a),
|
||||
user_id_b: ActiveValue::set(id_b),
|
||||
a_to_b: ActiveValue::set(a_to_b),
|
||||
accepted: ActiveValue::set(false),
|
||||
should_notify: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([contact::Column::UserIdA, contact::Column::UserIdB])
|
||||
.values([
|
||||
(contact::Column::Accepted, true.into()),
|
||||
(contact::Column::ShouldNotify, false.into()),
|
||||
])
|
||||
.action_and_where(
|
||||
contact::Column::Accepted.eq(false).and(
|
||||
contact::Column::AToB
|
||||
.eq(a_to_b)
|
||||
.and(contact::Column::UserIdA.eq(id_b))
|
||||
.or(contact::Column::AToB
|
||||
.ne(a_to_b)
|
||||
.and(contact::Column::UserIdA.eq(id_a))),
|
||||
),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
if rows_affected == 1 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("contact already requested"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a bool indicating whether the removed contact had originally accepted or not
|
||||
///
|
||||
/// Deletes the contact identified by the requester and responder ids, and then returns
|
||||
/// whether the deleted contact had originally accepted or was a pending contact request.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `requester_id` - The user that initiates this request
|
||||
/// * `responder_id` - The user that will be removed
|
||||
pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b) = if responder_id < requester_id {
|
||||
(responder_id, requester_id)
|
||||
} else {
|
||||
(requester_id, responder_id)
|
||||
};
|
||||
|
||||
let contact = contact::Entity::find()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such contact"))?;
|
||||
|
||||
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
|
||||
Ok(contact.accepted)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dismiss_contact_notification(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
contact_user_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if user_id < contact_user_id {
|
||||
(user_id, contact_user_id, true)
|
||||
} else {
|
||||
(contact_user_id, user_id, false)
|
||||
};
|
||||
|
||||
let result = contact::Entity::update_many()
|
||||
.set(contact::ActiveModel {
|
||||
should_notify: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(
|
||||
contact::Column::AToB
|
||||
.eq(a_to_b)
|
||||
.and(contact::Column::Accepted.eq(true))
|
||||
.or(contact::Column::AToB
|
||||
.ne(a_to_b)
|
||||
.and(contact::Column::Accepted.eq(false))),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("no such contact request"))?
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn respond_to_contact_request(
|
||||
&self,
|
||||
responder_id: UserId,
|
||||
requester_id: UserId,
|
||||
accept: bool,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if responder_id < requester_id {
|
||||
(responder_id, requester_id, false)
|
||||
} else {
|
||||
(requester_id, responder_id, true)
|
||||
};
|
||||
let rows_affected = if accept {
|
||||
let result = contact::Entity::update_many()
|
||||
.set(contact::ActiveModel {
|
||||
accepted: ActiveValue::set(true),
|
||||
should_notify: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::AToB.eq(a_to_b)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
result.rows_affected
|
||||
} else {
|
||||
let result = contact::Entity::delete_many()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::AToB.eq(a_to_b))
|
||||
.and(contact::Column::Accepted.eq(false)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
result.rows_affected
|
||||
};
|
||||
|
||||
if rows_affected == 1 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("no such contact request"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
926
crates/collab/src/db/queries/projects.rs
Normal file
926
crates/collab/src/db/queries/projects.rs
Normal file
@ -0,0 +1,926 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn project_count_excluding_admins(&self) -> Result<usize> {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
Count,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find()
|
||||
.select_only()
|
||||
.column_as(project::Column::Id.count(), QueryAs::Count)
|
||||
.inner_join(user::Entity)
|
||||
.filter(user::Column::Admin.eq(false))
|
||||
.into_values::<_, QueryAs>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.unwrap_or(0i64) as usize)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_project(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find participant"))?;
|
||||
if participant.room_id != room_id {
|
||||
return Err(anyhow!("shared project on unexpected room"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::set(participant.room_id),
|
||||
host_user_id: ActiveValue::set(participant.user_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project.id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(ReplicaId(0)),
|
||||
is_host: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((project.id, room))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn unshare_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project not found"))?;
|
||||
if project.host_connection()? == connection {
|
||||
project::Entity::delete(project.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
} else {
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub(in crate::db) async fn update_project_worktrees(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
|
||||
.update_column(worktree::Column::RootName)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
worktree::Entity::delete_many()
|
||||
.filter(worktree::Column::ProjectId.eq(project_id).and(
|
||||
worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
|
||||
))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_worktree(
|
||||
&self,
|
||||
update: &proto::UpdateWorktree,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let _project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
// Update metadata.
|
||||
worktree::Entity::update(worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree_id),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
root_name: ActiveValue::set(update.root_name.clone()),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
completed_scan_id: if update.is_last_update {
|
||||
ActiveValue::set(update.scan_id as i64)
|
||||
} else {
|
||||
ActiveValue::default()
|
||||
},
|
||||
abs_path: ActiveValue::set(update.abs_path.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if !update.updated_entries.is_empty() {
|
||||
worktree_entry::Entity::insert_many(update.updated_entries.iter().map(|entry| {
|
||||
let mtime = entry.mtime.clone().unwrap_or_default();
|
||||
worktree_entry::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
id: ActiveValue::set(entry.id as i64),
|
||||
is_dir: ActiveValue::set(entry.is_dir),
|
||||
path: ActiveValue::set(entry.path.clone()),
|
||||
inode: ActiveValue::set(entry.inode as i64),
|
||||
mtime_seconds: ActiveValue::set(mtime.seconds as i64),
|
||||
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
|
||||
is_symlink: ActiveValue::set(entry.is_symlink),
|
||||
is_ignored: ActiveValue::set(entry.is_ignored),
|
||||
is_external: ActiveValue::set(entry.is_external),
|
||||
git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_entry::Column::ProjectId,
|
||||
worktree_entry::Column::WorktreeId,
|
||||
worktree_entry::Column::Id,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_entry::Column::IsDir,
|
||||
worktree_entry::Column::Path,
|
||||
worktree_entry::Column::Inode,
|
||||
worktree_entry::Column::MtimeSeconds,
|
||||
worktree_entry::Column::MtimeNanos,
|
||||
worktree_entry::Column::IsSymlink,
|
||||
worktree_entry::Column::IsIgnored,
|
||||
worktree_entry::Column::GitStatus,
|
||||
worktree_entry::Column::ScanId,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.removed_entries.is_empty() {
|
||||
worktree_entry::Entity::update_many()
|
||||
.filter(
|
||||
worktree_entry::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(worktree_entry::Column::WorktreeId.eq(worktree_id))
|
||||
.and(
|
||||
worktree_entry::Column::Id
|
||||
.is_in(update.removed_entries.iter().map(|id| *id as i64)),
|
||||
),
|
||||
)
|
||||
.set(worktree_entry::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.updated_repositories.is_empty() {
|
||||
worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
|
||||
|repository| worktree_repository::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
branch: ActiveValue::set(repository.branch.clone()),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
},
|
||||
))
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_repository::Column::ProjectId,
|
||||
worktree_repository::Column::WorktreeId,
|
||||
worktree_repository::Column::WorkDirectoryId,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_repository::Column::ScanId,
|
||||
worktree_repository::Column::Branch,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.removed_repositories.is_empty() {
|
||||
worktree_repository::Entity::update_many()
|
||||
.filter(
|
||||
worktree_repository::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(worktree_repository::Column::WorktreeId.eq(worktree_id))
|
||||
.and(
|
||||
worktree_repository::Column::WorkDirectoryId
|
||||
.is_in(update.removed_repositories.iter().map(|id| *id as i64)),
|
||||
),
|
||||
)
|
||||
.set(worktree_repository::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_diagnostic_summary(
|
||||
&self,
|
||||
update: &proto::UpdateDiagnosticSummary,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let summary = update
|
||||
.summary
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid summary"))?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
// Update summary.
|
||||
worktree_diagnostic_summary::Entity::insert(worktree_diagnostic_summary::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
path: ActiveValue::set(summary.path.clone()),
|
||||
language_server_id: ActiveValue::set(summary.language_server_id as i64),
|
||||
error_count: ActiveValue::set(summary.error_count as i32),
|
||||
warning_count: ActiveValue::set(summary.warning_count as i32),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_diagnostic_summary::Column::ProjectId,
|
||||
worktree_diagnostic_summary::Column::WorktreeId,
|
||||
worktree_diagnostic_summary::Column::Path,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_diagnostic_summary::Column::LanguageServerId,
|
||||
worktree_diagnostic_summary::Column::ErrorCount,
|
||||
worktree_diagnostic_summary::Column::WarningCount,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn start_language_server(
|
||||
&self,
|
||||
update: &proto::StartLanguageServer,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let server = update
|
||||
.server
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid language server"))?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
// Add the newly-started language server.
|
||||
language_server::Entity::insert(language_server::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
id: ActiveValue::set(server.id as i64),
|
||||
name: ActiveValue::set(server.name.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
language_server::Column::ProjectId,
|
||||
language_server::Column::Id,
|
||||
])
|
||||
.update_column(language_server::Column::Name)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_worktree_settings(
|
||||
&self,
|
||||
update: &proto::UpdateWorktreeSettings,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
if let Some(content) = &update.content {
|
||||
worktree_settings_file::Entity::insert(worktree_settings_file::ActiveModel {
|
||||
project_id: ActiveValue::Set(project_id),
|
||||
worktree_id: ActiveValue::Set(update.worktree_id as i64),
|
||||
path: ActiveValue::Set(update.path.clone()),
|
||||
content: ActiveValue::Set(content.clone()),
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_settings_file::Column::ProjectId,
|
||||
worktree_settings_file::Column::WorktreeId,
|
||||
worktree_settings_file::Column::Path,
|
||||
])
|
||||
.update_column(worktree_settings_file::Column::Content)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
} else {
|
||||
worktree_settings_file::Entity::delete(worktree_settings_file::ActiveModel {
|
||||
project_id: ActiveValue::Set(project_id),
|
||||
worktree_id: ActiveValue::Set(update.worktree_id as i64),
|
||||
path: ActiveValue::Set(update.path.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn join_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(Project, ReplicaId)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("must join a room first"))?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.room_id != participant.room_id {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let replica_ids = collaborators
|
||||
.iter()
|
||||
.map(|c| c.replica_id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut replica_id = ReplicaId(1);
|
||||
while replica_ids.contains(&replica_id) {
|
||||
replica_id.0 += 1;
|
||||
}
|
||||
let new_collaborator = project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(replica_id),
|
||||
is_host: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
collaborators.push(new_collaborator);
|
||||
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
let mut worktrees = db_worktrees
|
||||
.into_iter()
|
||||
.map(|db_worktree| {
|
||||
(
|
||||
db_worktree.id as u64,
|
||||
Worktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
entries: Default::default(),
|
||||
repository_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
// Populate worktree entries.
|
||||
{
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_entry::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) {
|
||||
worktree.entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate repository entries.
|
||||
{
|
||||
let mut db_repository_entries = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_repository::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_repository_entry) = db_repository_entries.next().await {
|
||||
let db_repository_entry = db_repository_entry?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
|
||||
{
|
||||
worktree.repository_entries.insert(
|
||||
db_repository_entry.work_directory_id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||
branch: db_repository_entry.branch,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree diagnostic summaries.
|
||||
{
|
||||
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
||||
.filter(worktree_diagnostic_summary::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_summary) = db_summaries.next().await {
|
||||
let db_summary = db_summary?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) {
|
||||
worktree
|
||||
.diagnostic_summaries
|
||||
.push(proto::DiagnosticSummary {
|
||||
path: db_summary.path,
|
||||
language_server_id: db_summary.language_server_id as u64,
|
||||
error_count: db_summary.error_count as u32,
|
||||
warning_count: db_summary.warning_count as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree settings files
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_settings_file.worktree_id as u64))
|
||||
{
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate language servers.
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let project = Project {
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, LeftProject)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let result = project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("not a collaborator on this project"))?;
|
||||
}
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
|
||||
follower::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(
|
||||
Condition::all()
|
||||
.add(follower::Column::ProjectId.eq(project_id))
|
||||
.add(
|
||||
follower::Column::LeaderConnectionServerId
|
||||
.eq(connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::LeaderConnectionId.eq(connection.id)),
|
||||
)
|
||||
.add(
|
||||
Condition::all()
|
||||
.add(follower::Column::ProjectId.eq(project_id))
|
||||
.add(
|
||||
follower::Column::FollowerConnectionServerId
|
||||
.eq(connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::FollowerConnectionId.eq(connection.id)),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
let left_project = LeftProject {
|
||||
id: project_id,
|
||||
host_user_id: project.host_user_id,
|
||||
host_connection_id: project.host_connection()?,
|
||||
connection_ids,
|
||||
};
|
||||
Ok((room, left_project))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn project_collaborators(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|collaborator| collaborator.connection_id == connection_id)
|
||||
{
|
||||
Ok(collaborators)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn project_connection_ids(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut connection_ids = HashSet::default();
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
connection_ids.insert(collaborator.connection());
|
||||
}
|
||||
|
||||
if connection_ids.contains(&connection_id) {
|
||||
Ok(connection_ids)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn project_guest_connection_ids(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(
|
||||
project_collaborator::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(project_collaborator::Column::IsHost.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
let mut guest_connection_ids = Vec::new();
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
guest_connection_ids.push(collaborator.connection());
|
||||
}
|
||||
Ok(guest_connection_ids)
|
||||
}
|
||||
|
||||
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<RoomId> {
|
||||
self.transaction(|tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project {} not found", project_id))?;
|
||||
Ok(project.room_id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn follow(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::ActiveModel {
|
||||
room_id: ActiveValue::set(room_id),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
leader_connection_server_id: ActiveValue::set(ServerId(
|
||||
leader_connection.owner_id as i32,
|
||||
)),
|
||||
leader_connection_id: ActiveValue::set(leader_connection.id as i32),
|
||||
follower_connection_server_id: ActiveValue::set(ServerId(
|
||||
follower_connection.owner_id as i32,
|
||||
)),
|
||||
follower_connection_id: ActiveValue::set(follower_connection.id as i32),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn unfollow(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(follower::Column::ProjectId.eq(project_id))
|
||||
.add(
|
||||
follower::Column::LeaderConnectionServerId
|
||||
.eq(leader_connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::LeaderConnectionId.eq(leader_connection.id))
|
||||
.add(
|
||||
follower::Column::FollowerConnectionServerId
|
||||
.eq(follower_connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::FollowerConnectionId.eq(follower_connection.id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
1093
crates/collab/src/db/queries/rooms.rs
Normal file
1093
crates/collab/src/db/queries/rooms.rs
Normal file
File diff suppressed because it is too large
Load Diff
81
crates/collab/src/db/queries/servers.rs
Normal file
81
crates/collab/src/db/queries/servers.rs
Normal file
@ -0,0 +1,81 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_server(&self, environment: &str) -> Result<ServerId> {
|
||||
self.transaction(|tx| async move {
|
||||
let server = server::ActiveModel {
|
||||
environment: ActiveValue::set(environment.into()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
Ok(server.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stale_room_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<Vec<RoomId>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
RoomId,
|
||||
}
|
||||
|
||||
let stale_server_epochs = self
|
||||
.stale_server_ids(environment, new_server_id, &tx)
|
||||
.await?;
|
||||
Ok(room_participant::Entity::find()
|
||||
.select_only()
|
||||
.column(room_participant::Column::RoomId)
|
||||
.distinct()
|
||||
.filter(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.is_in(stale_server_epochs),
|
||||
)
|
||||
.into_values::<_, QueryAs>()
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_stale_servers(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
server::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(server::Column::Environment.eq(environment))
|
||||
.add(server::Column::Id.ne(new_server_id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn stale_server_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ServerId>> {
|
||||
let stale_servers = server::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(server::Column::Environment.eq(environment))
|
||||
.add(server::Column::Id.ne(new_server_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
Ok(stale_servers.into_iter().map(|server| server.id).collect())
|
||||
}
|
||||
}
|
349
crates/collab/src/db/queries/signups.rs
Normal file
349
crates/collab/src/db/queries/signups.rs
Normal file
@ -0,0 +1,349 @@
|
||||
use super::*;
|
||||
use hyper::StatusCode;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_invite_from_code(
|
||||
&self,
|
||||
code: &str,
|
||||
email_address: &str,
|
||||
device_id: Option<&str>,
|
||||
added_to_mailing_list: bool,
|
||||
) -> Result<Invite> {
|
||||
self.transaction(|tx| async move {
|
||||
let existing_user = user::Entity::find()
|
||||
.filter(user::Column::EmailAddress.eq(email_address))
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
if existing_user.is_some() {
|
||||
Err(anyhow!("email address is already in use"))?;
|
||||
}
|
||||
|
||||
let inviting_user_with_invites = match user::Entity::find()
|
||||
.filter(
|
||||
user::Column::InviteCode
|
||||
.eq(code)
|
||||
.and(user::Column::InviteCount.gt(0)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
{
|
||||
Some(inviting_user) => inviting_user,
|
||||
None => {
|
||||
return Err(Error::Http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"unable to find an invite code with invites remaining".to_string(),
|
||||
))?
|
||||
}
|
||||
};
|
||||
user::Entity::update_many()
|
||||
.filter(
|
||||
user::Column::Id
|
||||
.eq(inviting_user_with_invites.id)
|
||||
.and(user::Column::InviteCount.gt(0)),
|
||||
)
|
||||
.col_expr(
|
||||
user::Column::InviteCount,
|
||||
Expr::col(user::Column::InviteCount).sub(1),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let signup = signup::Entity::insert(signup::ActiveModel {
|
||||
email_address: ActiveValue::set(email_address.into()),
|
||||
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
||||
email_confirmation_sent: ActiveValue::set(false),
|
||||
inviting_user_id: ActiveValue::set(Some(inviting_user_with_invites.id)),
|
||||
platform_linux: ActiveValue::set(false),
|
||||
platform_mac: ActiveValue::set(false),
|
||||
platform_windows: ActiveValue::set(false),
|
||||
platform_unknown: ActiveValue::set(true),
|
||||
device_id: ActiveValue::set(device_id.map(|device_id| device_id.into())),
|
||||
added_to_mailing_list: ActiveValue::set(added_to_mailing_list),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(signup::Column::EmailAddress)
|
||||
.update_column(signup::Column::InvitingUserId)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(Invite {
|
||||
email_address: signup.email_address,
|
||||
email_confirmation_code: signup.email_confirmation_code,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_user_from_invite(
|
||||
&self,
|
||||
invite: &Invite,
|
||||
user: NewUserParams,
|
||||
) -> Result<Option<NewUserResult>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let signup = signup::Entity::find()
|
||||
.filter(
|
||||
signup::Column::EmailAddress
|
||||
.eq(invite.email_address.as_str())
|
||||
.and(
|
||||
signup::Column::EmailConfirmationCode
|
||||
.eq(invite.email_confirmation_code.as_str()),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "no such invite".to_string()))?;
|
||||
|
||||
if signup.user_id.is_some() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(Some(invite.email_address.clone())),
|
||||
github_login: ActiveValue::set(user.github_login.clone()),
|
||||
github_user_id: ActiveValue::set(Some(user.github_user_id)),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(user.invite_count),
|
||||
invite_code: ActiveValue::set(Some(random_invite_code())),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(user::Column::GithubLogin)
|
||||
.update_columns([
|
||||
user::Column::EmailAddress,
|
||||
user::Column::GithubUserId,
|
||||
user::Column::Admin,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut signup = signup.into_active_model();
|
||||
signup.user_id = ActiveValue::set(Some(user.id));
|
||||
let signup = signup.update(&*tx).await?;
|
||||
|
||||
if let Some(inviting_user_id) = signup.inviting_user_id {
|
||||
let (user_id_a, user_id_b, a_to_b) = if inviting_user_id < user.id {
|
||||
(inviting_user_id, user.id, true)
|
||||
} else {
|
||||
(user.id, inviting_user_id, false)
|
||||
};
|
||||
|
||||
contact::Entity::insert(contact::ActiveModel {
|
||||
user_id_a: ActiveValue::set(user_id_a),
|
||||
user_id_b: ActiveValue::set(user_id_b),
|
||||
a_to_b: ActiveValue::set(a_to_b),
|
||||
should_notify: ActiveValue::set(true),
|
||||
accepted: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(OnConflict::new().do_nothing().to_owned())
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(Some(NewUserResult {
|
||||
user_id: user.id,
|
||||
metrics_id: user.metrics_id.to_string(),
|
||||
inviting_user_id: signup.inviting_user_id,
|
||||
signup_device_id: signup.device_id,
|
||||
}))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_invite_count_for_user(&self, id: UserId, count: i32) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
if count > 0 {
|
||||
user::Entity::update_many()
|
||||
.filter(
|
||||
user::Column::Id
|
||||
.eq(id)
|
||||
.and(user::Column::InviteCode.is_null()),
|
||||
)
|
||||
.set(user::ActiveModel {
|
||||
invite_code: ActiveValue::set(Some(random_invite_code())),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
invite_count: ActiveValue::set(count),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_invite_code_for_user(&self, id: UserId) -> Result<Option<(String, i32)>> {
|
||||
self.transaction(|tx| async move {
|
||||
match user::Entity::find_by_id(id).one(&*tx).await? {
|
||||
Some(user) if user.invite_code.is_some() => {
|
||||
Ok(Some((user.invite_code.unwrap(), user.invite_count)))
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_for_invite_code(&self, code: &str) -> Result<User> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::find()
|
||||
.filter(user::Column::InviteCode.eq(code))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
Error::Http(
|
||||
StatusCode::NOT_FOUND,
|
||||
"that invite code does not exist".to_string(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_signup(&self, signup: &NewSignup) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
signup::Entity::insert(signup::ActiveModel {
|
||||
email_address: ActiveValue::set(signup.email_address.clone()),
|
||||
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
||||
email_confirmation_sent: ActiveValue::set(false),
|
||||
platform_mac: ActiveValue::set(signup.platform_mac),
|
||||
platform_windows: ActiveValue::set(signup.platform_windows),
|
||||
platform_linux: ActiveValue::set(signup.platform_linux),
|
||||
platform_unknown: ActiveValue::set(false),
|
||||
editor_features: ActiveValue::set(Some(signup.editor_features.clone())),
|
||||
programming_languages: ActiveValue::set(Some(signup.programming_languages.clone())),
|
||||
device_id: ActiveValue::set(signup.device_id.clone()),
|
||||
added_to_mailing_list: ActiveValue::set(signup.added_to_mailing_list),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(signup::Column::EmailAddress)
|
||||
.update_columns([
|
||||
signup::Column::PlatformMac,
|
||||
signup::Column::PlatformWindows,
|
||||
signup::Column::PlatformLinux,
|
||||
signup::Column::EditorFeatures,
|
||||
signup::Column::ProgrammingLanguages,
|
||||
signup::Column::DeviceId,
|
||||
signup::Column::AddedToMailingList,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_signup(&self, email_address: &str) -> Result<signup::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
let signup = signup::Entity::find()
|
||||
.filter(signup::Column::EmailAddress.eq(email_address))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
anyhow!("signup with email address {} doesn't exist", email_address)
|
||||
})?;
|
||||
|
||||
Ok(signup)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_waitlist_summary(&self) -> Result<WaitlistSummary> {
|
||||
self.transaction(|tx| async move {
|
||||
let query = "
|
||||
SELECT
|
||||
COUNT(*) as count,
|
||||
COALESCE(SUM(CASE WHEN platform_linux THEN 1 ELSE 0 END), 0) as linux_count,
|
||||
COALESCE(SUM(CASE WHEN platform_mac THEN 1 ELSE 0 END), 0) as mac_count,
|
||||
COALESCE(SUM(CASE WHEN platform_windows THEN 1 ELSE 0 END), 0) as windows_count,
|
||||
COALESCE(SUM(CASE WHEN platform_unknown THEN 1 ELSE 0 END), 0) as unknown_count
|
||||
FROM (
|
||||
SELECT *
|
||||
FROM signups
|
||||
WHERE
|
||||
NOT email_confirmation_sent
|
||||
) AS unsent
|
||||
";
|
||||
Ok(
|
||||
WaitlistSummary::find_by_statement(Statement::from_sql_and_values(
|
||||
self.pool.get_database_backend(),
|
||||
query.into(),
|
||||
vec![],
|
||||
))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("invalid result"))?,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> {
|
||||
let emails = invites
|
||||
.iter()
|
||||
.map(|s| s.email_address.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
signup::Entity::update_many()
|
||||
.filter(signup::Column::EmailAddress.is_in(emails.iter().copied()))
|
||||
.set(signup::ActiveModel {
|
||||
email_confirmation_sent: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_unsent_invites(&self, count: usize) -> Result<Vec<Invite>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(signup::Entity::find()
|
||||
.select_only()
|
||||
.column(signup::Column::EmailAddress)
|
||||
.column(signup::Column::EmailConfirmationCode)
|
||||
.filter(
|
||||
signup::Column::EmailConfirmationSent.eq(false).and(
|
||||
signup::Column::PlatformMac
|
||||
.eq(true)
|
||||
.or(signup::Column::PlatformUnknown.eq(true)),
|
||||
),
|
||||
)
|
||||
.order_by_asc(signup::Column::CreatedAt)
|
||||
.limit(count as u64)
|
||||
.into_model()
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
fn random_invite_code() -> String {
|
||||
nanoid::nanoid!(16)
|
||||
}
|
||||
|
||||
fn random_email_confirmation_code() -> String {
|
||||
nanoid::nanoid!(64)
|
||||
}
|
243
crates/collab/src/db/queries/users.rs
Normal file
243
crates/collab/src/db/queries/users.rs
Normal file
@ -0,0 +1,243 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_user(
|
||||
&self,
|
||||
email_address: &str,
|
||||
admin: bool,
|
||||
params: NewUserParams,
|
||||
) -> Result<NewUserResult> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(Some(email_address.into())),
|
||||
github_login: ActiveValue::set(params.github_login.clone()),
|
||||
github_user_id: ActiveValue::set(Some(params.github_user_id)),
|
||||
admin: ActiveValue::set(admin),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(user::Column::GithubLogin)
|
||||
.update_column(user::Column::GithubLogin)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(NewUserResult {
|
||||
user_id: user.id,
|
||||
metrics_id: user.metrics_id.to_string(),
|
||||
signup_device_id: None,
|
||||
inviting_user_id: None,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_by_id(&self, id: UserId) -> Result<Option<user::Model>> {
|
||||
self.transaction(|tx| async move { Ok(user::Entity::find_by_id(id).one(&*tx).await?) })
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::Id.is_in(ids.iter().copied()))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_or_create_user_by_github_account(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: Option<i32>,
|
||||
github_email: Option<&str>,
|
||||
) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = &*tx;
|
||||
if let Some(github_user_id) = github_user_id {
|
||||
if let Some(user_by_github_user_id) = user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
|
||||
user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
|
||||
Ok(Some(user_by_github_user_id.update(tx).await?))
|
||||
} else if let Some(user_by_github_login) = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_login = user_by_github_login.into_active_model();
|
||||
user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id));
|
||||
Ok(Some(user_by_github_login.update(tx).await?))
|
||||
} else {
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(github_email.map(|email| email.into())),
|
||||
github_login: ActiveValue::set(github_login.into()),
|
||||
github_user_id: ActiveValue::set(Some(github_user_id)),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(0),
|
||||
invite_code: ActiveValue::set(None),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
Ok(Some(user))
|
||||
}
|
||||
} else {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?)
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.order_by_asc(user::Column::GithubLogin)
|
||||
.limit(limit as u64)
|
||||
.offset(page as u64 * limit as u64)
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users_with_no_invites(
|
||||
&self,
|
||||
invited_by_another_user: bool,
|
||||
) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(
|
||||
user::Column::InviteCount
|
||||
.eq(0)
|
||||
.and(if invited_by_another_user {
|
||||
user::Column::InviterId.is_not_null()
|
||||
} else {
|
||||
user::Column::InviterId.is_null()
|
||||
}),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
MetricsId,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
let metrics_id: Uuid = user::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(user::Column::MetricsId)
|
||||
.into_values::<_, QueryAs>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find user"))?;
|
||||
Ok(metrics_id.to_string())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
admin: ActiveValue::set(is_admin),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
connected_once: ActiveValue::set(connected_once),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn destroy_user(&self, id: UserId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
access_token::Entity::delete_many()
|
||||
.filter(access_token::Column::UserId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
user::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let like_string = Self::fuzzy_like_string(name_query);
|
||||
let query = "
|
||||
SELECT users.*
|
||||
FROM users
|
||||
WHERE github_login ILIKE $1
|
||||
ORDER BY github_login <-> $2
|
||||
LIMIT $3
|
||||
";
|
||||
|
||||
Ok(user::Entity::find()
|
||||
.from_raw_sql(Statement::from_sql_and_values(
|
||||
self.pool.get_database_backend(),
|
||||
query.into(),
|
||||
vec![like_string.into(), name_query.into(), limit.into()],
|
||||
))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn fuzzy_like_string(string: &str) -> String {
|
||||
let mut result = String::with_capacity(string.len() * 2 + 1);
|
||||
for c in string.chars() {
|
||||
if c.is_alphanumeric() {
|
||||
result.push('%');
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
result.push('%');
|
||||
result
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
use super::{SignupId, UserId};
|
||||
use sea_orm::{entity::prelude::*, FromQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "signups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: SignupId,
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
pub email_confirmation_sent: bool,
|
||||
pub created_at: DateTime,
|
||||
pub device_id: Option<String>,
|
||||
pub user_id: Option<UserId>,
|
||||
pub inviting_user_id: Option<UserId>,
|
||||
pub platform_mac: bool,
|
||||
pub platform_linux: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_unknown: bool,
|
||||
pub editor_features: Option<Vec<String>>,
|
||||
pub programming_languages: Option<Vec<String>>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
||||
pub struct Invite {
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct NewSignup {
|
||||
pub email_address: String,
|
||||
pub platform_mac: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_linux: bool,
|
||||
pub editor_features: Vec<String>,
|
||||
pub programming_languages: Vec<String>,
|
||||
pub device_id: Option<String>,
|
||||
pub added_to_mailing_list: bool,
|
||||
pub created_at: Option<DateTime>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
|
||||
pub struct WaitlistSummary {
|
||||
pub count: i64,
|
||||
pub linux_count: i64,
|
||||
pub mac_count: i64,
|
||||
pub windows_count: i64,
|
||||
pub unknown_count: i64,
|
||||
}
|
24
crates/collab/src/db/tables.rs
Normal file
24
crates/collab/src/db/tables.rs
Normal file
@ -0,0 +1,24 @@
|
||||
pub mod access_token;
|
||||
pub mod buffer;
|
||||
pub mod buffer_operation;
|
||||
pub mod buffer_snapshot;
|
||||
pub mod channel;
|
||||
pub mod channel_buffer_collaborator;
|
||||
pub mod channel_member;
|
||||
pub mod channel_path;
|
||||
pub mod contact;
|
||||
pub mod follower;
|
||||
pub mod language_server;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod room;
|
||||
pub mod room_participant;
|
||||
pub mod server;
|
||||
pub mod signup;
|
||||
pub mod user;
|
||||
pub mod worktree;
|
||||
pub mod worktree_diagnostic_summary;
|
||||
pub mod worktree_entry;
|
||||
pub mod worktree_repository;
|
||||
pub mod worktree_repository_statuses;
|
||||
pub mod worktree_settings_file;
|
@ -1,4 +1,4 @@
|
||||
use super::{AccessTokenId, UserId};
|
||||
use crate::db::{AccessTokenId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
45
crates/collab/src/db/tables/buffer.rs
Normal file
45
crates/collab/src/db/tables/buffer.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use crate::db::{BufferId, ChannelId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: BufferId,
|
||||
pub epoch: i32,
|
||||
pub channel_id: ChannelId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::buffer_operation::Entity")]
|
||||
Operations,
|
||||
#[sea_orm(has_many = "super::buffer_snapshot::Entity")]
|
||||
Snapshots,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::buffer_operation::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Operations.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::buffer_snapshot::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Snapshots.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
34
crates/collab/src/db/tables/buffer_operation.rs
Normal file
34
crates/collab/src/db/tables/buffer_operation.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use crate::db::BufferId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffer_operations")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub buffer_id: BufferId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub epoch: i32,
|
||||
#[sea_orm(primary_key)]
|
||||
pub lamport_timestamp: i32,
|
||||
#[sea_orm(primary_key)]
|
||||
pub replica_id: i32,
|
||||
pub value: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::buffer::Entity",
|
||||
from = "Column::BufferId",
|
||||
to = "super::buffer::Column::Id"
|
||||
)]
|
||||
Buffer,
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
31
crates/collab/src/db/tables/buffer_snapshot.rs
Normal file
31
crates/collab/src/db/tables/buffer_snapshot.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use crate::db::BufferId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffer_snapshots")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub buffer_id: BufferId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub epoch: i32,
|
||||
pub text: String,
|
||||
pub operation_serialization_version: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::buffer::Entity",
|
||||
from = "Column::BufferId",
|
||||
to = "super::buffer::Column::Id"
|
||||
)]
|
||||
Buffer,
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
@ -1,4 +1,4 @@
|
||||
use super::ChannelId;
|
||||
use crate::db::ChannelId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
@ -15,8 +15,12 @@ impl ActiveModelBehavior for ActiveModel {}
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::room::Entity")]
|
||||
Room,
|
||||
#[sea_orm(has_one = "super::buffer::Entity")]
|
||||
Buffer,
|
||||
#[sea_orm(has_many = "super::channel_member::Entity")]
|
||||
Member,
|
||||
#[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
|
||||
BufferCollaborators,
|
||||
}
|
||||
|
||||
impl Related<super::channel_member::Entity> for Entity {
|
||||
@ -31,8 +35,14 @@ impl Related<super::room::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
// impl Related<super::follower::Entity> for Entity {
|
||||
// fn to() -> RelationDef {
|
||||
// Relation::Follower.def()
|
||||
// }
|
||||
// }
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel_buffer_collaborator::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::BufferCollaborators.def()
|
||||
}
|
||||
}
|
43
crates/collab/src/db/tables/channel_buffer_collaborator.rs
Normal file
43
crates/collab/src/db/tables/channel_buffer_collaborator.rs
Normal file
@ -0,0 +1,43 @@
|
||||
use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_buffer_collaborators")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ChannelBufferCollaboratorId,
|
||||
pub channel_id: ChannelId,
|
||||
pub connection_id: i32,
|
||||
pub connection_server_id: ServerId,
|
||||
pub connection_lost: bool,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.connection_server_id.0 as u32,
|
||||
id: self.connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
@ -1,6 +1,4 @@
|
||||
use crate::db::channel_member;
|
||||
|
||||
use super::{ChannelId, ChannelMemberId, UserId};
|
||||
use crate::db::{channel_member, ChannelId, ChannelMemberId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ChannelId;
|
||||
use crate::db::ChannelId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::{ContactId, UserId};
|
||||
use crate::db::{ContactId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
@ -30,29 +30,3 @@ pub enum Relation {
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Contact {
|
||||
Accepted {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
busy: bool,
|
||||
},
|
||||
Outgoing {
|
||||
user_id: UserId,
|
||||
},
|
||||
Incoming {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Contact {
|
||||
pub fn user_id(&self) -> UserId {
|
||||
match self {
|
||||
Contact::Accepted { user_id, .. } => *user_id,
|
||||
Contact::Outgoing { user_id } => *user_id,
|
||||
Contact::Incoming { user_id, .. } => *user_id,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +1,8 @@
|
||||
use super::{FollowerId, ProjectId, RoomId, ServerId};
|
||||
use crate::db::{FollowerId, ProjectId, RoomId, ServerId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "followers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
@ -1,4 +1,4 @@
|
||||
use super::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
|
||||
use crate::db::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{ChannelId, RoomId};
|
||||
use crate::db::{ChannelId, RoomId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::{ProjectId, RoomId, RoomParticipantId, ServerId, UserId};
|
||||
use crate::db::{ProjectId, RoomId, RoomParticipantId, ServerId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ServerId;
|
||||
use crate::db::ServerId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
28
crates/collab/src/db/tables/signup.rs
Normal file
28
crates/collab/src/db/tables/signup.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use crate::db::{SignupId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "signups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: SignupId,
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
pub email_confirmation_sent: bool,
|
||||
pub created_at: DateTime,
|
||||
pub device_id: Option<String>,
|
||||
pub user_id: Option<UserId>,
|
||||
pub inviting_user_id: Option<UserId>,
|
||||
pub platform_mac: bool,
|
||||
pub platform_linux: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_unknown: bool,
|
||||
pub editor_features: Option<Vec<String>>,
|
||||
pub programming_languages: Option<Vec<String>>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
@ -1,4 +1,4 @@
|
||||
use super::UserId;
|
||||
use crate::db::UserId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
@ -1,4 +1,4 @@
|
||||
use super::ProjectId;
|
||||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
File diff suppressed because it is too large
Load Diff
165
crates/collab/src/db/tests/buffer_tests.rs
Normal file
165
crates/collab/src/db/tests/buffer_tests.rs
Normal file
@ -0,0 +1,165 @@
|
||||
use super::*;
|
||||
use crate::test_both_dbs;
|
||||
use language::proto;
|
||||
use text::Buffer;
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_buffers,
|
||||
test_channel_buffers_postgres,
|
||||
test_channel_buffers_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
let a_id = db
|
||||
.create_user(
|
||||
"user_a@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_a".into(),
|
||||
github_user_id: 101,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let b_id = db
|
||||
.create_user(
|
||||
"user_b@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_b".into(),
|
||||
github_user_id: 102,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
// This user will not be a part of the channel
|
||||
let c_id = db
|
||||
.create_user(
|
||||
"user_c@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_c".into(),
|
||||
github_user_id: 102,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let owner_id = db.create_server("production").await.unwrap().0 as u32;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
|
||||
|
||||
db.invite_channel_member(zed_id, b_id, a_id, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.respond_to_channel_invite(zed_id, b_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection_id_a = ConnectionId { owner_id, id: 1 };
|
||||
let _ = db
|
||||
.join_channel_buffer(zed_id, a_id, connection_id_a)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut buffer_a = Buffer::new(0, 0, "".to_string());
|
||||
let mut operations = Vec::new();
|
||||
operations.push(buffer_a.edit([(0..0, "hello world")]));
|
||||
operations.push(buffer_a.edit([(5..5, ", cruel")]));
|
||||
operations.push(buffer_a.edit([(0..5, "goodbye")]));
|
||||
operations.push(buffer_a.undo().unwrap().1);
|
||||
assert_eq!(buffer_a.text(), "hello, cruel world");
|
||||
|
||||
let operations = operations
|
||||
.into_iter()
|
||||
.map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
db.update_channel_buffer(zed_id, a_id, &operations)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection_id_b = ConnectionId { owner_id, id: 2 };
|
||||
let buffer_response_b = db
|
||||
.join_channel_buffer(zed_id, b_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
|
||||
buffer_b
|
||||
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
|
||||
let operation = proto::deserialize_operation(operation).unwrap();
|
||||
if let language::Operation::Buffer(operation) = operation {
|
||||
operation
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(buffer_b.text(), "hello, cruel world");
|
||||
|
||||
// Ensure that C fails to open the buffer
|
||||
assert!(db
|
||||
.join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
// Ensure that both collaborators have shown up
|
||||
assert_eq!(
|
||||
buffer_response_b.collaborators,
|
||||
&[
|
||||
rpc::proto::Collaborator {
|
||||
user_id: a_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
// Ensure that get_channel_buffer_collaborators works
|
||||
let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
|
||||
assert_eq!(zed_collaborats, &[a_id, b_id]);
|
||||
|
||||
let collaborators = db
|
||||
.leave_channel_buffer(zed_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(collaborators, &[connection_id_a],);
|
||||
|
||||
let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap();
|
||||
let _ = db
|
||||
.join_channel_buffer(cargo_id, a_id, connection_id_a)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.leave_channel_buffers(connection_id_a).await.unwrap();
|
||||
|
||||
let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
|
||||
let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
|
||||
assert_eq!(zed_collaborators, &[]);
|
||||
assert_eq!(cargo_collaborators, &[]);
|
||||
|
||||
// When everyone has left the channel, the operations are collapsed into
|
||||
// a new base text.
|
||||
let buffer_response_b = db
|
||||
.join_channel_buffer(zed_id, b_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(buffer_response_b.base_text, "hello, cruel world");
|
||||
assert_eq!(buffer_response_b.operations, &[]);
|
||||
}
|
1573
crates/collab/src/db/tests/db_tests.rs
Normal file
1573
crates/collab/src/db/tests/db_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -35,8 +35,8 @@ use lazy_static::lazy_static;
|
||||
use prometheus::{register_int_gauge, IntGauge};
|
||||
use rpc::{
|
||||
proto::{
|
||||
self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo,
|
||||
RequestMessage,
|
||||
self, Ack, AddChannelBufferCollaborator, AnyTypedEnvelope, EntityMessage, EnvelopedMessage,
|
||||
LiveKitConnectionInfo, RequestMessage,
|
||||
},
|
||||
Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
|
||||
};
|
||||
@ -248,6 +248,9 @@ impl Server {
|
||||
.add_request_handler(remove_channel_member)
|
||||
.add_request_handler(set_channel_member_admin)
|
||||
.add_request_handler(rename_channel)
|
||||
.add_request_handler(join_channel_buffer)
|
||||
.add_request_handler(leave_channel_buffer)
|
||||
.add_message_handler(update_channel_buffer)
|
||||
.add_request_handler(get_channel_members)
|
||||
.add_request_handler(respond_to_channel_invite)
|
||||
.add_request_handler(join_channel)
|
||||
@ -851,6 +854,10 @@ async fn connection_lost(
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
leave_channel_buffers_for_session(&session)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
futures::select_biased! {
|
||||
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
|
||||
leave_room_for_session(&session).await.trace_err();
|
||||
@ -866,6 +873,8 @@ async fn connection_lost(
|
||||
}
|
||||
}
|
||||
update_user_contacts(session.user_id, &session).await?;
|
||||
|
||||
|
||||
}
|
||||
_ = teardown.changed().fuse() => {}
|
||||
}
|
||||
@ -2478,6 +2487,104 @@ async fn join_channel(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn join_channel_buffer(
|
||||
request: proto::JoinChannelBuffer,
|
||||
response: Response<proto::JoinChannelBuffer>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
|
||||
let open_response = db
|
||||
.join_channel_buffer(channel_id, session.user_id, session.connection_id)
|
||||
.await?;
|
||||
|
||||
let replica_id = open_response.replica_id;
|
||||
let collaborators = open_response.collaborators.clone();
|
||||
|
||||
response.send(open_response)?;
|
||||
|
||||
let update = AddChannelBufferCollaborator {
|
||||
channel_id: channel_id.to_proto(),
|
||||
collaborator: Some(proto::Collaborator {
|
||||
user_id: session.user_id.to_proto(),
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
replica_id,
|
||||
}),
|
||||
};
|
||||
channel_buffer_updated(
|
||||
session.connection_id,
|
||||
collaborators
|
||||
.iter()
|
||||
.filter_map(|collaborator| Some(collaborator.peer_id?.into())),
|
||||
&update,
|
||||
&session.peer,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_channel_buffer(
|
||||
request: proto::UpdateChannelBuffer,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
|
||||
let collaborators = db
|
||||
.update_channel_buffer(channel_id, session.user_id, &request.operations)
|
||||
.await?;
|
||||
|
||||
channel_buffer_updated(
|
||||
session.connection_id,
|
||||
collaborators,
|
||||
&proto::UpdateChannelBuffer {
|
||||
channel_id: channel_id.to_proto(),
|
||||
operations: request.operations,
|
||||
},
|
||||
&session.peer,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn leave_channel_buffer(
|
||||
request: proto::LeaveChannelBuffer,
|
||||
response: Response<proto::LeaveChannelBuffer>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
|
||||
let collaborators_to_notify = db
|
||||
.leave_channel_buffer(channel_id, session.connection_id)
|
||||
.await?;
|
||||
|
||||
response.send(Ack {})?;
|
||||
|
||||
channel_buffer_updated(
|
||||
session.connection_id,
|
||||
collaborators_to_notify,
|
||||
&proto::RemoveChannelBufferCollaborator {
|
||||
channel_id: channel_id.to_proto(),
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
},
|
||||
&session.peer,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn channel_buffer_updated<T: EnvelopedMessage>(
|
||||
sender_id: ConnectionId,
|
||||
collaborators: impl IntoIterator<Item = ConnectionId>,
|
||||
message: &T,
|
||||
peer: &Peer,
|
||||
) {
|
||||
broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| {
|
||||
peer.send(peer_id.into(), message.clone())
|
||||
});
|
||||
}
|
||||
|
||||
async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
|
||||
let project_id = ProjectId::from_proto(request.project_id);
|
||||
let project_connection_ids = session
|
||||
@ -2803,6 +2910,28 @@ async fn leave_room_for_session(session: &Session) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
|
||||
let left_channel_buffers = session
|
||||
.db()
|
||||
.await
|
||||
.leave_channel_buffers(session.connection_id)
|
||||
.await?;
|
||||
|
||||
for (channel_id, connections) in left_channel_buffers {
|
||||
channel_buffer_updated(
|
||||
session.connection_id,
|
||||
connections,
|
||||
&proto::RemoveChannelBufferCollaborator {
|
||||
channel_id: channel_id.to_proto(),
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
},
|
||||
&session.peer,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn project_left(project: &db::LeftProject, session: &Session) {
|
||||
for connection_id in &project.connection_ids {
|
||||
if project.host_user_id == session.user_id {
|
||||
|
@ -1,14 +1,14 @@
|
||||
use crate::{
|
||||
db::{NewUserParams, TestDb, UserId},
|
||||
db::{tests::TestDb, NewUserParams, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Server, CLEANUP_TIMEOUT},
|
||||
AppState,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use call::{ActiveCall, Room};
|
||||
use channel::ChannelStore;
|
||||
use client::{
|
||||
self, proto::PeerId, ChannelStore, Client, Connection, Credentials, EstablishConnectionError,
|
||||
UserStore,
|
||||
self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
|
||||
};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::FakeFs;
|
||||
@ -31,6 +31,7 @@ use std::{
|
||||
use util::http::FakeHttpClient;
|
||||
use workspace::Workspace;
|
||||
|
||||
mod channel_buffer_tests;
|
||||
mod channel_tests;
|
||||
mod integration_tests;
|
||||
mod randomized_integration_tests;
|
||||
@ -210,6 +211,7 @@ impl TestServer {
|
||||
workspace::init(app_state.clone(), cx);
|
||||
audio::init((), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client);
|
||||
});
|
||||
|
||||
client
|
||||
|
426
crates/collab/src/tests/channel_buffer_tests.rs
Normal file
426
crates/collab/src/tests/channel_buffer_tests.rs
Normal file
@ -0,0 +1,426 @@
|
||||
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
|
||||
use call::ActiveCall;
|
||||
use channel::Channel;
|
||||
use client::UserId;
|
||||
use collab_ui::channel_view::ChannelView;
|
||||
use collections::HashMap;
|
||||
use futures::future;
|
||||
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
|
||||
use rpc::{proto, RECEIVE_TIMEOUT};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_core_channel_buffers(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
||||
let zed_id = server
|
||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Client A joins the channel buffer
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Client A edits the buffer
|
||||
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
|
||||
|
||||
buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.edit([(0..0, "hello world")], None, cx)
|
||||
});
|
||||
buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.edit([(5..5, ", cruel")], None, cx)
|
||||
});
|
||||
buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.edit([(0..5, "goodbye")], None, cx)
|
||||
});
|
||||
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
|
||||
deterministic.run_until_parked();
|
||||
|
||||
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
|
||||
|
||||
// Client B joins the channel buffer
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(
|
||||
buffer.collaborators(),
|
||||
&[client_a.user_id(), client_b.user_id()],
|
||||
);
|
||||
});
|
||||
|
||||
// Client B sees the correct text, and then edits it
|
||||
let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer());
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()),
|
||||
buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id())
|
||||
);
|
||||
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world");
|
||||
buffer_b.update(cx_b, |buffer, cx| {
|
||||
buffer.edit([(7..12, "beautiful")], None, cx)
|
||||
});
|
||||
|
||||
// Both A and B see the new edit
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
|
||||
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
|
||||
|
||||
// Client A closes the channel buffer.
|
||||
cx_a.update(|_| drop(channel_buffer_a));
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Client B sees that client A is gone from the channel buffer.
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// Client A rejoins the channel buffer
|
||||
let _channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channels, cx| {
|
||||
channels.open_channel_buffer(zed_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Sanity test, make sure we saw A rejoining
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(
|
||||
&buffer.collaborators(),
|
||||
&[client_b.user_id(), client_a.user_id()],
|
||||
);
|
||||
});
|
||||
|
||||
// Client A loses connection.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
|
||||
// Client B observes A disconnect
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// TODO:
|
||||
// - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects
|
||||
// - Test interaction with channel deletion while buffer is open
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_channel_buffer_replica_ids(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
cx_c: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
let client_c = server.create_client(cx_c, "user_c").await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel(
|
||||
"zed",
|
||||
(&client_a, cx_a),
|
||||
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
let active_call_c = cx_c.read(ActiveCall::global);
|
||||
|
||||
// Clients A and B join a channel.
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.join_channel(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Clients A, B, and C join a channel buffer
|
||||
// C first so that the replica IDs in the project and the channel buffer are different
|
||||
let channel_buffer_c = client_c
|
||||
.channel_store()
|
||||
.update(cx_c, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Client B shares a project
|
||||
client_b
|
||||
.fs()
|
||||
.insert_tree("/dir", json!({ "file.txt": "contents" }))
|
||||
.await;
|
||||
let (project_b, _) = client_b.build_local_project("/dir", cx_b).await;
|
||||
let shared_project_id = active_call_b
|
||||
.update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Client A joins the project
|
||||
let project_a = client_a.build_remote_project(shared_project_id, cx_a).await;
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Client C is in a separate project.
|
||||
client_c.fs().insert_tree("/dir", json!({})).await;
|
||||
let (separate_project_c, _) = client_c.build_local_project("/dir", cx_c).await;
|
||||
|
||||
// Note that each user has a different replica id in the projects vs the
|
||||
// channel buffer.
|
||||
channel_buffer_a.read_with(cx_a, |channel_buffer, cx| {
|
||||
assert_eq!(project_a.read(cx).replica_id(), 1);
|
||||
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 2);
|
||||
});
|
||||
channel_buffer_b.read_with(cx_b, |channel_buffer, cx| {
|
||||
assert_eq!(project_b.read(cx).replica_id(), 0);
|
||||
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 1);
|
||||
});
|
||||
channel_buffer_c.read_with(cx_c, |channel_buffer, cx| {
|
||||
// C is not in the project
|
||||
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0);
|
||||
});
|
||||
|
||||
let channel_window_a =
|
||||
cx_a.add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), cx));
|
||||
let channel_window_b =
|
||||
cx_b.add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), cx));
|
||||
let channel_window_c = cx_c.add_window(|cx| {
|
||||
ChannelView::new(separate_project_c.clone(), channel_buffer_c.clone(), cx)
|
||||
});
|
||||
|
||||
let channel_view_a = channel_window_a.root(cx_a);
|
||||
let channel_view_b = channel_window_b.root(cx_b);
|
||||
let channel_view_c = channel_window_c.root(cx_c);
|
||||
|
||||
// For clients A and B, the replica ids in the channel buffer are mapped
|
||||
// so that they match the same users' replica ids in their shared project.
|
||||
channel_view_a.read_with(cx_a, |view, cx| {
|
||||
assert_eq!(
|
||||
view.editor.read(cx).replica_id_map().unwrap(),
|
||||
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<_, _>>()
|
||||
);
|
||||
});
|
||||
channel_view_b.read_with(cx_b, |view, cx| {
|
||||
assert_eq!(
|
||||
view.editor.read(cx).replica_id_map().unwrap(),
|
||||
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<u16, u16>>(),
|
||||
)
|
||||
});
|
||||
|
||||
// Client C only sees themself, as they're not part of any shared project
|
||||
channel_view_c.read_with(cx_c, |view, cx| {
|
||||
assert_eq!(
|
||||
view.editor.read(cx).replica_id_map().unwrap(),
|
||||
&[(0, 0)].into_iter().collect::<HashMap<u16, u16>>(),
|
||||
);
|
||||
});
|
||||
|
||||
// Client C joins the project that clients A and B are in.
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| call.join_channel(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_c = client_c.build_remote_project(shared_project_id, cx_c).await;
|
||||
deterministic.run_until_parked();
|
||||
project_c.read_with(cx_c, |project, _| {
|
||||
assert_eq!(project.replica_id(), 2);
|
||||
});
|
||||
|
||||
// For clients A and B, client C's replica id in the channel buffer is
|
||||
// now mapped to their replica id in the shared project.
|
||||
channel_view_a.read_with(cx_a, |view, cx| {
|
||||
assert_eq!(
|
||||
view.editor.read(cx).replica_id_map().unwrap(),
|
||||
&[(1, 0), (2, 1), (0, 2)]
|
||||
.into_iter()
|
||||
.collect::<HashMap<_, _>>()
|
||||
);
|
||||
});
|
||||
channel_view_b.read_with(cx_b, |view, cx| {
|
||||
assert_eq!(
|
||||
view.editor.read(cx).replica_id_map().unwrap(),
|
||||
&[(1, 0), (2, 1), (0, 2)]
|
||||
.into_iter()
|
||||
.collect::<HashMap<_, _>>(),
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mut TestAppContext) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
|
||||
let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
|
||||
|
||||
let channel_buffer_1 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
let channel_buffer_2 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
let channel_buffer_3 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
|
||||
// All concurrent tasks for opening a channel buffer return the same model handle.
|
||||
let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
|
||||
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
|
||||
.await
|
||||
.unwrap();
|
||||
let model_id = channel_buffer_1.id();
|
||||
assert_eq!(channel_buffer_1, channel_buffer_2);
|
||||
assert_eq!(channel_buffer_1, channel_buffer_3);
|
||||
|
||||
channel_buffer_1.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "hello")], None, cx);
|
||||
})
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
|
||||
cx_a.update(|_| {
|
||||
drop(channel_buffer_1);
|
||||
drop(channel_buffer_2);
|
||||
drop(channel_buffer_3);
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// The channel buffer can be reopened after dropping it.
|
||||
let channel_buffer = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_ne!(channel_buffer.id(), model_id);
|
||||
channel_buffer.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, _| {
|
||||
assert_eq!(buffer.text(), "hello");
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_channel_buffer_disconnect(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
|
||||
channel_buffer_a.update(cx_a, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.channel().as_ref(),
|
||||
&Channel {
|
||||
id: channel_id,
|
||||
name: "zed".to_string()
|
||||
}
|
||||
);
|
||||
assert!(!buffer.is_connected());
|
||||
});
|
||||
|
||||
deterministic.run_until_parked();
|
||||
|
||||
server.allow_connections();
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
|
||||
deterministic.run_until_parked();
|
||||
|
||||
client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel_store, _| {
|
||||
channel_store.remove_channel(channel_id)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Channel buffer observed the deletion
|
||||
channel_buffer_b.update(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.channel().as_ref(),
|
||||
&Channel {
|
||||
id: channel_id,
|
||||
name: "zed".to_string()
|
||||
}
|
||||
);
|
||||
assert!(!buffer.is_connected());
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
|
||||
assert_eq!(
|
||||
collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| collaborator.user_id)
|
||||
.collect::<Vec<_>>(),
|
||||
ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
fn buffer_text(channel_buffer: &ModelHandle<language::Buffer>, cx: &mut TestAppContext) -> String {
|
||||
channel_buffer.read_with(cx, |buffer, _| buffer.text())
|
||||
}
|
@ -3,7 +3,8 @@ use crate::{
|
||||
tests::{room_participants, RoomParticipants, TestServer},
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use client::{ChannelId, ChannelMembership, ChannelStore, User};
|
||||
use channel::{ChannelId, ChannelMembership, ChannelStore};
|
||||
use client::User;
|
||||
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
|
||||
use rpc::{proto, RECEIVE_TIMEOUT};
|
||||
use std::sync::Arc;
|
||||
@ -798,7 +799,7 @@ async fn test_lost_channel_creation(
|
||||
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Sanity check
|
||||
// Sanity check, B has the invitation
|
||||
assert_channel_invitations(
|
||||
client_b.channel_store(),
|
||||
cx_b,
|
||||
@ -810,6 +811,7 @@ async fn test_lost_channel_creation(
|
||||
}],
|
||||
);
|
||||
|
||||
// A creates a subchannel while the invite is still pending.
|
||||
let subchannel_id = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel_store, cx| {
|
||||
@ -840,7 +842,7 @@ async fn test_lost_channel_creation(
|
||||
],
|
||||
);
|
||||
|
||||
// Accept the invite
|
||||
// Client B accepts the invite
|
||||
client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel_store, _| {
|
||||
@ -851,7 +853,7 @@ async fn test_lost_channel_creation(
|
||||
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// B should now see the channel
|
||||
// Client B should now see the channel
|
||||
assert_channels(
|
||||
client_b.channel_store(),
|
||||
cx_b,
|
||||
|
@ -4163,6 +4163,7 @@ async fn test_collaborating_with_completion(
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
|
@ -26,6 +26,7 @@ auto_update = { path = "../auto_update" }
|
||||
db = { path = "../db" }
|
||||
call = { path = "../call" }
|
||||
client = { path = "../client" }
|
||||
channel = { path = "../channel" }
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
context_menu = { path = "../context_menu" }
|
||||
@ -33,6 +34,7 @@ editor = { path = "../editor" }
|
||||
feedback = { path = "../feedback" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
menu = { path = "../menu" }
|
||||
picker = { path = "../picker" }
|
||||
project = { path = "../project" }
|
||||
|
351
crates/collab_ui/src/channel_view.rs
Normal file
351
crates/collab_ui/src/channel_view.rs
Normal file
@ -0,0 +1,351 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel::{
|
||||
channel_buffer::{self, ChannelBuffer},
|
||||
ChannelId,
|
||||
};
|
||||
use client::proto;
|
||||
use clock::ReplicaId;
|
||||
use collections::HashMap;
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{ChildView, Label},
|
||||
geometry::vector::Vector2F,
|
||||
AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View,
|
||||
ViewContext, ViewHandle,
|
||||
};
|
||||
use project::Project;
|
||||
use std::any::Any;
|
||||
use workspace::{
|
||||
item::{FollowableItem, Item, ItemHandle},
|
||||
register_followable_item,
|
||||
searchable::SearchableItemHandle,
|
||||
ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
actions!(channel_view, [Deploy]);
|
||||
|
||||
pub(crate) fn init(cx: &mut AppContext) {
|
||||
register_followable_item::<ChannelView>(cx)
|
||||
}
|
||||
|
||||
pub struct ChannelView {
|
||||
pub editor: ViewHandle<Editor>,
|
||||
project: ModelHandle<Project>,
|
||||
channel_buffer: ModelHandle<ChannelBuffer>,
|
||||
remote_id: Option<ViewId>,
|
||||
_editor_event_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl ChannelView {
|
||||
pub fn open(
|
||||
channel_id: ChannelId,
|
||||
pane: ViewHandle<Pane>,
|
||||
workspace: ViewHandle<Workspace>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().to_owned();
|
||||
let channel_store = workspace.app_state().channel_store.clone();
|
||||
let markdown = workspace
|
||||
.app_state()
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
let channel_buffer =
|
||||
channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let channel_buffer = channel_buffer.await?;
|
||||
let markdown = markdown.await?;
|
||||
channel_buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(markdown), cx);
|
||||
})
|
||||
});
|
||||
|
||||
pane.update(&mut cx, |pane, cx| {
|
||||
pane.items_of_type::<Self>()
|
||||
.find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer)
|
||||
.unwrap_or_else(|| cx.add_view(|cx| Self::new(project, channel_buffer, cx)))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("pane was dropped"))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
project: ModelHandle<Project>,
|
||||
channel_buffer: ModelHandle<ChannelBuffer>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = channel_buffer.read(cx).buffer();
|
||||
// buffer.update(cx, |buffer, cx| buffer.set_language(language, cx));
|
||||
let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx));
|
||||
let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()));
|
||||
|
||||
cx.subscribe(&project, Self::handle_project_event).detach();
|
||||
cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event)
|
||||
.detach();
|
||||
|
||||
let this = Self {
|
||||
editor,
|
||||
project,
|
||||
channel_buffer,
|
||||
remote_id: None,
|
||||
_editor_event_subscription,
|
||||
};
|
||||
this.refresh_replica_id_map(cx);
|
||||
this
|
||||
}
|
||||
|
||||
fn handle_project_event(
|
||||
&mut self,
|
||||
_: ModelHandle<Project>,
|
||||
event: &project::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
project::Event::RemoteIdChanged(_) => {}
|
||||
project::Event::DisconnectedFromHost => {}
|
||||
project::Event::Closed => {}
|
||||
project::Event::CollaboratorUpdated { .. } => {}
|
||||
project::Event::CollaboratorLeft(_) => {}
|
||||
project::Event::CollaboratorJoined(_) => {}
|
||||
_ => return,
|
||||
}
|
||||
self.refresh_replica_id_map(cx);
|
||||
}
|
||||
|
||||
fn handle_channel_buffer_event(
|
||||
&mut self,
|
||||
_: ModelHandle<ChannelBuffer>,
|
||||
event: &channel_buffer::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
channel_buffer::Event::CollaboratorsChanged => {
|
||||
self.refresh_replica_id_map(cx);
|
||||
}
|
||||
channel_buffer::Event::Disconnected => self.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(true);
|
||||
cx.notify();
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a mapping of channel buffer replica ids to the corresponding
|
||||
/// replica ids in the current project.
|
||||
///
|
||||
/// Using this mapping, a given user can be displayed with the same color
|
||||
/// in the channel buffer as in other files in the project. Users who are
|
||||
/// in the channel buffer but not the project will not have a color.
|
||||
fn refresh_replica_id_map(&self, cx: &mut ViewContext<Self>) {
|
||||
let mut project_replica_ids_by_channel_buffer_replica_id = HashMap::default();
|
||||
let project = self.project.read(cx);
|
||||
let channel_buffer = self.channel_buffer.read(cx);
|
||||
project_replica_ids_by_channel_buffer_replica_id
|
||||
.insert(channel_buffer.replica_id(cx), project.replica_id());
|
||||
project_replica_ids_by_channel_buffer_replica_id.extend(
|
||||
channel_buffer
|
||||
.collaborators()
|
||||
.iter()
|
||||
.filter_map(|channel_buffer_collaborator| {
|
||||
project
|
||||
.collaborators()
|
||||
.values()
|
||||
.find_map(|project_collaborator| {
|
||||
(project_collaborator.user_id == channel_buffer_collaborator.user_id)
|
||||
.then_some((
|
||||
channel_buffer_collaborator.replica_id as ReplicaId,
|
||||
project_collaborator.replica_id,
|
||||
))
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_replica_id_map(Some(project_replica_ids_by_channel_buffer_replica_id), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for ChannelView {
|
||||
type Event = editor::Event;
|
||||
}
|
||||
|
||||
impl View for ChannelView {
|
||||
fn ui_name() -> &'static str {
|
||||
"ChannelView"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
|
||||
ChildView::new(self.editor.as_any(), cx).into_any()
|
||||
}
|
||||
|
||||
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(self.editor.as_any())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for ChannelView {
|
||||
fn tab_content<V: 'static>(
|
||||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
cx: &gpui::AppContext,
|
||||
) -> AnyElement<V> {
|
||||
let channel_name = &self.channel_buffer.read(cx).channel().name;
|
||||
let label = if self.channel_buffer.read(cx).is_connected() {
|
||||
format!("#{}", channel_name)
|
||||
} else {
|
||||
format!("#{} (disconnected)", channel_name)
|
||||
};
|
||||
Label::new(label, style.label.to_owned()).into_any()
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self> {
|
||||
Some(Self::new(
|
||||
self.project.clone(),
|
||||
self.channel_buffer.clone(),
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _cx: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.navigate(data, cx))
|
||||
}
|
||||
|
||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::deactivated(editor, cx))
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::set_nav_history(editor, history, cx))
|
||||
}
|
||||
|
||||
fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
Some(Box::new(self.editor.clone()))
|
||||
}
|
||||
|
||||
fn show_toolbar(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
|
||||
self.editor.read(cx).pixel_position_of_cursor(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl FollowableItem for ChannelView {
|
||||
fn remote_id(&self) -> Option<workspace::ViewId> {
|
||||
self.remote_id
|
||||
}
|
||||
|
||||
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
|
||||
let channel = self.channel_buffer.read(cx).channel();
|
||||
Some(proto::view::Variant::ChannelView(
|
||||
proto::view::ChannelView {
|
||||
channel_id: channel.id,
|
||||
editor: if let Some(proto::view::Variant::Editor(proto)) =
|
||||
self.editor.read(cx).to_state_proto(cx)
|
||||
{
|
||||
Some(proto)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn from_state_proto(
|
||||
pane: ViewHandle<workspace::Pane>,
|
||||
workspace: ViewHandle<workspace::Workspace>,
|
||||
remote_id: workspace::ViewId,
|
||||
state: &mut Option<proto::view::Variant>,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<gpui::Task<anyhow::Result<ViewHandle<Self>>>> {
|
||||
let Some(proto::view::Variant::ChannelView(_)) = state else { return None };
|
||||
let Some(proto::view::Variant::ChannelView(state)) = state.take() else { unreachable!() };
|
||||
|
||||
let open = ChannelView::open(state.channel_id, pane, workspace, cx);
|
||||
|
||||
Some(cx.spawn(|mut cx| async move {
|
||||
let this = open.await?;
|
||||
|
||||
let task = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.remote_id = Some(remote_id);
|
||||
|
||||
if let Some(state) = state.editor {
|
||||
Some(this.editor.update(cx, |editor, cx| {
|
||||
editor.apply_update_proto(
|
||||
&this.project,
|
||||
proto::update_view::Variant::Editor(proto::update_view::Editor {
|
||||
selections: state.selections,
|
||||
pending_selection: state.pending_selection,
|
||||
scroll_top_anchor: state.scroll_top_anchor,
|
||||
scroll_x: state.scroll_x,
|
||||
scroll_y: state.scroll_y,
|
||||
..Default::default()
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.ok_or_else(|| anyhow!("window was closed"))?;
|
||||
|
||||
if let Some(task) = task {
|
||||
task.await?;
|
||||
}
|
||||
|
||||
Ok(this)
|
||||
}))
|
||||
}
|
||||
|
||||
fn add_event_to_update_proto(
|
||||
&self,
|
||||
event: &Self::Event,
|
||||
update: &mut Option<proto::update_view::Variant>,
|
||||
cx: &AppContext,
|
||||
) -> bool {
|
||||
self.editor
|
||||
.read(cx)
|
||||
.add_event_to_update_proto(event, update, cx)
|
||||
}
|
||||
|
||||
fn apply_update_proto(
|
||||
&mut self,
|
||||
project: &ModelHandle<Project>,
|
||||
message: proto::update_view::Variant,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> gpui::Task<anyhow::Result<()>> {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.apply_update_proto(project, message, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn set_leader_replica_id(
|
||||
&mut self,
|
||||
leader_replica_id: Option<u16>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_leader_replica_id(leader_replica_id, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool {
|
||||
Editor::should_unfollow_on_event(event, cx)
|
||||
}
|
||||
}
|
@ -4,10 +4,8 @@ mod panel_settings;
|
||||
|
||||
use anyhow::Result;
|
||||
use call::ActiveCall;
|
||||
use client::{
|
||||
proto::PeerId, Channel, ChannelEvent, ChannelId, ChannelStore, Client, Contact, User, UserStore,
|
||||
};
|
||||
|
||||
use channel::{Channel, ChannelEvent, ChannelId, ChannelStore};
|
||||
use client::{proto::PeerId, Client, Contact, User, UserStore};
|
||||
use context_menu::{ContextMenu, ContextMenuItem};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Cancel, Editor};
|
||||
@ -16,16 +14,18 @@ use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{
|
||||
Canvas, ChildView, Empty, Flex, Image, Label, List, ListOffset, ListState,
|
||||
MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, Stack, Svg,
|
||||
Canvas, ChildView, Component, Empty, Flex, Image, Label, List, ListOffset, ListState,
|
||||
MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable,
|
||||
Stack, Svg,
|
||||
},
|
||||
fonts::TextStyle,
|
||||
geometry::{
|
||||
rect::RectF,
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
impl_actions,
|
||||
platform::{CursorStyle, MouseButton, PromptLevel},
|
||||
serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, ModelHandle,
|
||||
serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle,
|
||||
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use menu::{Confirm, SelectNext, SelectPrev};
|
||||
@ -35,7 +35,7 @@ use serde_derive::{Deserialize, Serialize};
|
||||
use settings::SettingsStore;
|
||||
use staff_mode::StaffMode;
|
||||
use std::{borrow::Cow, mem, sync::Arc};
|
||||
use theme::IconButton;
|
||||
use theme::{components::ComponentExt, IconButton};
|
||||
use util::{iife, ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel},
|
||||
@ -43,7 +43,10 @@ use workspace::{
|
||||
Workspace,
|
||||
};
|
||||
|
||||
use crate::face_pile::FacePile;
|
||||
use crate::{
|
||||
channel_view::{self, ChannelView},
|
||||
face_pile::FacePile,
|
||||
};
|
||||
use channel_modal::ChannelModal;
|
||||
|
||||
use self::contact_finder::ContactFinder;
|
||||
@ -53,6 +56,11 @@ struct RemoveChannel {
|
||||
channel_id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
struct ToggleCollapse {
|
||||
channel_id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
struct NewChannel {
|
||||
channel_id: u64,
|
||||
@ -73,7 +81,21 @@ struct RenameChannel {
|
||||
channel_id: u64,
|
||||
}
|
||||
|
||||
actions!(collab_panel, [ToggleFocus, Remove, Secondary]);
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
struct OpenChannelBuffer {
|
||||
channel_id: u64,
|
||||
}
|
||||
|
||||
actions!(
|
||||
collab_panel,
|
||||
[
|
||||
ToggleFocus,
|
||||
Remove,
|
||||
Secondary,
|
||||
CollapseSelectedChannel,
|
||||
ExpandSelectedChannel
|
||||
]
|
||||
);
|
||||
|
||||
impl_actions!(
|
||||
collab_panel,
|
||||
@ -82,16 +104,19 @@ impl_actions!(
|
||||
NewChannel,
|
||||
InviteMembers,
|
||||
ManageMembers,
|
||||
RenameChannel
|
||||
RenameChannel,
|
||||
ToggleCollapse,
|
||||
OpenChannelBuffer
|
||||
]
|
||||
);
|
||||
|
||||
const CHANNELS_PANEL_KEY: &'static str = "ChannelsPanel";
|
||||
const COLLABORATION_PANEL_KEY: &'static str = "CollaborationPanel";
|
||||
|
||||
pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
|
||||
settings::register::<panel_settings::CollaborationPanelSettings>(cx);
|
||||
contact_finder::init(cx);
|
||||
channel_modal::init(cx);
|
||||
channel_view::init(cx);
|
||||
|
||||
cx.add_action(CollabPanel::cancel);
|
||||
cx.add_action(CollabPanel::select_next);
|
||||
@ -105,6 +130,10 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
|
||||
cx.add_action(CollabPanel::manage_members);
|
||||
cx.add_action(CollabPanel::rename_selected_channel);
|
||||
cx.add_action(CollabPanel::rename_channel);
|
||||
cx.add_action(CollabPanel::toggle_channel_collapsed);
|
||||
cx.add_action(CollabPanel::collapse_selected_channel);
|
||||
cx.add_action(CollabPanel::expand_selected_channel);
|
||||
cx.add_action(CollabPanel::open_channel_buffer);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -147,6 +176,7 @@ pub struct CollabPanel {
|
||||
list_state: ListState<Self>,
|
||||
subscriptions: Vec<Subscription>,
|
||||
collapsed_sections: Vec<Section>,
|
||||
collapsed_channels: Vec<ChannelId>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
context_menu_on_selected: bool,
|
||||
}
|
||||
@ -154,6 +184,7 @@ pub struct CollabPanel {
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializedChannelsPanel {
|
||||
width: Option<f32>,
|
||||
collapsed_channels: Vec<ChannelId>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -198,6 +229,9 @@ enum ListEntry {
|
||||
channel: Arc<Channel>,
|
||||
depth: usize,
|
||||
},
|
||||
ChannelNotes {
|
||||
channel_id: ChannelId,
|
||||
},
|
||||
ChannelEditor {
|
||||
depth: usize,
|
||||
},
|
||||
@ -341,6 +375,12 @@ impl CollabPanel {
|
||||
return channel_row;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => this.render_channel_notes(
|
||||
*channel_id,
|
||||
&theme.collab_panel,
|
||||
is_selected,
|
||||
cx,
|
||||
),
|
||||
ListEntry::ChannelInvite(channel) => Self::render_channel_invite(
|
||||
channel.clone(),
|
||||
this.channel_store.clone(),
|
||||
@ -398,6 +438,7 @@ impl CollabPanel {
|
||||
subscriptions: Vec::default(),
|
||||
match_candidates: Vec::default(),
|
||||
collapsed_sections: vec![Section::Offline],
|
||||
collapsed_channels: Vec::default(),
|
||||
workspace: workspace.weak_handle(),
|
||||
client: workspace.app_state().client.clone(),
|
||||
context_menu_on_selected: true,
|
||||
@ -464,7 +505,7 @@ impl CollabPanel {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let serialized_panel = if let Some(panel) = cx
|
||||
.background()
|
||||
.spawn(async move { KEY_VALUE_STORE.read_kvp(CHANNELS_PANEL_KEY) })
|
||||
.spawn(async move { KEY_VALUE_STORE.read_kvp(COLLABORATION_PANEL_KEY) })
|
||||
.await
|
||||
.log_err()
|
||||
.flatten()
|
||||
@ -479,6 +520,7 @@ impl CollabPanel {
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.width = serialized_panel.width;
|
||||
panel.collapsed_channels = serialized_panel.collapsed_channels;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
@ -489,12 +531,16 @@ impl CollabPanel {
|
||||
|
||||
fn serialize(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let width = self.width;
|
||||
let collapsed_channels = self.collapsed_channels.clone();
|
||||
self.pending_serialization = cx.background().spawn(
|
||||
async move {
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
CHANNELS_PANEL_KEY.into(),
|
||||
serde_json::to_string(&SerializedChannelsPanel { width })?,
|
||||
COLLABORATION_PANEL_KEY.into(),
|
||||
serde_json::to_string(&SerializedChannelsPanel {
|
||||
width,
|
||||
collapsed_channels,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
@ -518,6 +564,10 @@ impl CollabPanel {
|
||||
if !self.collapsed_sections.contains(&Section::ActiveCall) {
|
||||
let room = room.read(cx);
|
||||
|
||||
if let Some(channel_id) = room.channel_id() {
|
||||
self.entries.push(ListEntry::ChannelNotes { channel_id })
|
||||
}
|
||||
|
||||
// Populate the active user.
|
||||
if let Some(user) = user_store.current_user() {
|
||||
self.match_candidates.clear();
|
||||
@ -657,10 +707,24 @@ impl CollabPanel {
|
||||
self.entries.push(ListEntry::ChannelEditor { depth: 0 });
|
||||
}
|
||||
}
|
||||
let mut collapse_depth = None;
|
||||
for mat in matches {
|
||||
let (depth, channel) =
|
||||
channel_store.channel_at_index(mat.candidate_id).unwrap();
|
||||
|
||||
if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) {
|
||||
collapse_depth = Some(depth);
|
||||
} else if let Some(collapsed_depth) = collapse_depth {
|
||||
if depth > collapsed_depth {
|
||||
continue;
|
||||
}
|
||||
if self.is_channel_collapsed(channel.id) {
|
||||
collapse_depth = Some(depth);
|
||||
} else {
|
||||
collapse_depth = None;
|
||||
}
|
||||
}
|
||||
|
||||
match &self.channel_editing_state {
|
||||
Some(ChannelEditingState::Create { parent_id, .. })
|
||||
if *parent_id == Some(channel.id) =>
|
||||
@ -963,25 +1027,19 @@ impl CollabPanel {
|
||||
) -> AnyElement<Self> {
|
||||
enum JoinProject {}
|
||||
|
||||
let font_cache = cx.font_cache();
|
||||
let host_avatar_height = theme
|
||||
let host_avatar_width = theme
|
||||
.contact_avatar
|
||||
.width
|
||||
.or(theme.contact_avatar.height)
|
||||
.unwrap_or(0.);
|
||||
let row = &theme.project_row.inactive_state().default;
|
||||
let tree_branch = theme.tree_branch;
|
||||
let line_height = row.name.text.line_height(font_cache);
|
||||
let cap_height = row.name.text.cap_height(font_cache);
|
||||
let baseline_offset =
|
||||
row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.;
|
||||
let project_name = if worktree_root_names.is_empty() {
|
||||
"untitled".to_string()
|
||||
} else {
|
||||
worktree_root_names.join(", ")
|
||||
};
|
||||
|
||||
MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, _| {
|
||||
MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
|
||||
let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
|
||||
let row = theme
|
||||
.project_row
|
||||
@ -989,39 +1047,20 @@ impl CollabPanel {
|
||||
.style_for(mouse_state);
|
||||
|
||||
Flex::row()
|
||||
.with_child(render_tree_branch(
|
||||
tree_branch,
|
||||
&row.name.text,
|
||||
is_last,
|
||||
vec2f(host_avatar_width, theme.row_height),
|
||||
cx.font_cache(),
|
||||
))
|
||||
.with_child(
|
||||
Stack::new()
|
||||
.with_child(Canvas::new(move |scene, bounds, _, _, _| {
|
||||
let start_x =
|
||||
bounds.min_x() + (bounds.width() / 2.) - (tree_branch.width / 2.);
|
||||
let end_x = bounds.max_x();
|
||||
let start_y = bounds.min_y();
|
||||
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
|
||||
|
||||
scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, start_y),
|
||||
vec2f(
|
||||
start_x + tree_branch.width,
|
||||
if is_last { end_y } else { bounds.max_y() },
|
||||
),
|
||||
),
|
||||
background: Some(tree_branch.color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radii: (0.).into(),
|
||||
});
|
||||
scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, end_y),
|
||||
vec2f(end_x, end_y + tree_branch.width),
|
||||
),
|
||||
background: Some(tree_branch.color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radii: (0.).into(),
|
||||
});
|
||||
}))
|
||||
Svg::new("icons/file_icons/folder.svg")
|
||||
.with_color(theme.channel_hash.color)
|
||||
.constrained()
|
||||
.with_width(host_avatar_height),
|
||||
.with_width(theme.channel_hash.width)
|
||||
.aligned()
|
||||
.left(),
|
||||
)
|
||||
.with_child(
|
||||
Label::new(project_name, row.name.text.clone())
|
||||
@ -1196,7 +1235,7 @@ impl CollabPanel {
|
||||
});
|
||||
|
||||
if let Some(name) = channel_name {
|
||||
Cow::Owned(format!("Current Call - #{}", name))
|
||||
Cow::Owned(format!("#{}", name))
|
||||
} else {
|
||||
Cow::Borrowed("Current Call")
|
||||
}
|
||||
@ -1332,7 +1371,7 @@ impl CollabPanel {
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
if can_collapse {
|
||||
this.toggle_expanded(section, cx);
|
||||
this.toggle_section_expanded(section, cx);
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1479,6 +1518,11 @@ impl CollabPanel {
|
||||
cx: &AppContext,
|
||||
) -> AnyElement<Self> {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Empty::new()
|
||||
.constrained()
|
||||
.with_width(theme.collab_panel.disclosure.button_space()),
|
||||
)
|
||||
.with_child(
|
||||
Svg::new("icons/hash.svg")
|
||||
.with_color(theme.collab_panel.channel_hash.color)
|
||||
@ -1537,6 +1581,10 @@ impl CollabPanel {
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> AnyElement<Self> {
|
||||
let channel_id = channel.id;
|
||||
let has_children = self.channel_store.read(cx).has_children(channel_id);
|
||||
let disclosed =
|
||||
has_children.then(|| !self.collapsed_channels.binary_search(&channel_id).is_ok());
|
||||
|
||||
let is_active = iife!({
|
||||
let call_channel = ActiveCall::global(cx)
|
||||
.read(cx)
|
||||
@ -1550,7 +1598,7 @@ impl CollabPanel {
|
||||
const FACEPILE_LIMIT: usize = 3;
|
||||
|
||||
MouseEventHandler::new::<Channel, _>(channel.id as usize, cx, |state, cx| {
|
||||
Flex::row()
|
||||
Flex::<Self>::row()
|
||||
.with_child(
|
||||
Svg::new("icons/hash.svg")
|
||||
.with_color(theme.channel_hash.color)
|
||||
@ -1599,6 +1647,11 @@ impl CollabPanel {
|
||||
}
|
||||
})
|
||||
.align_children_center()
|
||||
.styleable_component()
|
||||
.disclosable(disclosed, Box::new(ToggleCollapse { channel_id }))
|
||||
.with_id(channel_id as usize)
|
||||
.with_style(theme.disclosure.clone())
|
||||
.element()
|
||||
.constrained()
|
||||
.with_height(theme.row_height)
|
||||
.contained()
|
||||
@ -1618,6 +1671,61 @@ impl CollabPanel {
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_channel_notes(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
theme: &theme::CollabPanel,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> AnyElement<Self> {
|
||||
enum ChannelNotes {}
|
||||
let host_avatar_width = theme
|
||||
.contact_avatar
|
||||
.width
|
||||
.or(theme.contact_avatar.height)
|
||||
.unwrap_or(0.);
|
||||
|
||||
MouseEventHandler::new::<ChannelNotes, _>(channel_id as usize, cx, |state, cx| {
|
||||
let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
|
||||
let row = theme.project_row.in_state(is_selected).style_for(state);
|
||||
|
||||
Flex::<Self>::row()
|
||||
.with_child(render_tree_branch(
|
||||
tree_branch,
|
||||
&row.name.text,
|
||||
true,
|
||||
vec2f(host_avatar_width, theme.row_height),
|
||||
cx.font_cache(),
|
||||
))
|
||||
.with_child(
|
||||
Svg::new("icons/radix/file.svg")
|
||||
.with_color(theme.channel_hash.color)
|
||||
.constrained()
|
||||
.with_width(theme.channel_hash.width)
|
||||
.aligned()
|
||||
.left(),
|
||||
)
|
||||
.with_child(
|
||||
Label::new("notes", theme.channel_name.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.channel_name.container)
|
||||
.aligned()
|
||||
.left()
|
||||
.flex(1., true),
|
||||
)
|
||||
.constrained()
|
||||
.with_height(theme.row_height)
|
||||
.contained()
|
||||
.with_style(*theme.channel_row.style_for(is_selected, state))
|
||||
.with_padding_left(theme.channel_row.default_style().padding.left)
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
this.open_channel_buffer(&OpenChannelBuffer { channel_id }, cx);
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_channel_invite(
|
||||
channel: Arc<Channel>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
@ -1815,39 +1923,52 @@ impl CollabPanel {
|
||||
channel_id: u64,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if self.channel_store.read(cx).is_user_admin(channel_id) {
|
||||
self.context_menu_on_selected = position.is_none();
|
||||
self.context_menu_on_selected = position.is_none();
|
||||
|
||||
self.context_menu.update(cx, |context_menu, cx| {
|
||||
context_menu.set_position_mode(if self.context_menu_on_selected {
|
||||
OverlayPositionMode::Local
|
||||
} else {
|
||||
OverlayPositionMode::Window
|
||||
});
|
||||
|
||||
context_menu.show(
|
||||
position.unwrap_or_default(),
|
||||
if self.context_menu_on_selected {
|
||||
gpui::elements::AnchorCorner::TopRight
|
||||
} else {
|
||||
gpui::elements::AnchorCorner::BottomLeft
|
||||
},
|
||||
vec![
|
||||
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
|
||||
ContextMenuItem::action("Manage", ManageMembers { channel_id }),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
self.context_menu.update(cx, |context_menu, cx| {
|
||||
context_menu.set_position_mode(if self.context_menu_on_selected {
|
||||
OverlayPositionMode::Local
|
||||
} else {
|
||||
OverlayPositionMode::Window
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
let expand_action_name = if self.is_channel_collapsed(channel_id) {
|
||||
"Expand Subchannels"
|
||||
} else {
|
||||
"Collapse Subchannels"
|
||||
};
|
||||
|
||||
let mut items = vec![
|
||||
ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }),
|
||||
ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }),
|
||||
];
|
||||
|
||||
if self.channel_store.read(cx).is_user_admin(channel_id) {
|
||||
items.extend([
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
|
||||
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("Invite Members", InviteMembers { channel_id }),
|
||||
ContextMenuItem::action("Manage Members", ManageMembers { channel_id }),
|
||||
ContextMenuItem::Separator,
|
||||
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
|
||||
]);
|
||||
}
|
||||
|
||||
context_menu.show(
|
||||
position.unwrap_or_default(),
|
||||
if self.context_menu_on_selected {
|
||||
gpui::elements::AnchorCorner::TopRight
|
||||
} else {
|
||||
gpui::elements::AnchorCorner::BottomLeft
|
||||
},
|
||||
items,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
@ -1912,7 +2033,7 @@ impl CollabPanel {
|
||||
| Section::Online
|
||||
| Section::Offline
|
||||
| Section::ChannelInvites => {
|
||||
self.toggle_expanded(*section, cx);
|
||||
self.toggle_section_expanded(*section, cx);
|
||||
}
|
||||
},
|
||||
ListEntry::Contact { contact, calling } => {
|
||||
@ -2000,7 +2121,7 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn toggle_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) {
|
||||
fn toggle_section_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) {
|
||||
if let Some(ix) = self.collapsed_sections.iter().position(|s| *s == section) {
|
||||
self.collapsed_sections.remove(ix);
|
||||
} else {
|
||||
@ -2009,6 +2130,55 @@ impl CollabPanel {
|
||||
self.update_entries(false, cx);
|
||||
}
|
||||
|
||||
fn collapse_selected_channel(
|
||||
&mut self,
|
||||
_: &CollapseSelectedChannel,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if self.is_channel_collapsed(channel_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
|
||||
}
|
||||
|
||||
fn expand_selected_channel(&mut self, _: &ExpandSelectedChannel, cx: &mut ViewContext<Self>) {
|
||||
let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if !self.is_channel_collapsed(channel_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
|
||||
}
|
||||
|
||||
fn toggle_channel_collapsed(&mut self, action: &ToggleCollapse, cx: &mut ViewContext<Self>) {
|
||||
let channel_id = action.channel_id;
|
||||
|
||||
match self.collapsed_channels.binary_search(&channel_id) {
|
||||
Ok(ix) => {
|
||||
self.collapsed_channels.remove(ix);
|
||||
}
|
||||
Err(ix) => {
|
||||
self.collapsed_channels.insert(ix, channel_id);
|
||||
}
|
||||
};
|
||||
self.serialize(cx);
|
||||
self.update_entries(true, cx);
|
||||
cx.notify();
|
||||
cx.focus_self();
|
||||
}
|
||||
|
||||
fn is_channel_collapsed(&self, channel: ChannelId) -> bool {
|
||||
self.collapsed_channels.binary_search(&channel).is_ok()
|
||||
}
|
||||
|
||||
fn leave_call(cx: &mut ViewContext<Self>) {
|
||||
ActiveCall::global(cx)
|
||||
.update(cx, |call, cx| call.hang_up(cx))
|
||||
@ -2048,6 +2218,8 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
fn new_subchannel(&mut self, action: &NewChannel, cx: &mut ViewContext<Self>) {
|
||||
self.collapsed_channels
|
||||
.retain(|&channel| channel != action.channel_id);
|
||||
self.channel_editing_state = Some(ChannelEditingState::Create {
|
||||
parent_id: Some(action.channel_id),
|
||||
pending_name: None,
|
||||
@ -2103,6 +2275,21 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext<Self>) {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let pane = workspace.read(cx).active_pane().clone();
|
||||
let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let channel_view = channel_view.await?;
|
||||
pane.update(&mut cx, |pane, cx| {
|
||||
pane.add_item(Box::new(channel_view), true, true, None, cx)
|
||||
});
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) {
|
||||
let Some(channel) = self.selected_channel() else {
|
||||
return;
|
||||
@ -2261,6 +2448,51 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_tree_branch(
|
||||
branch_style: theme::TreeBranch,
|
||||
row_style: &TextStyle,
|
||||
is_last: bool,
|
||||
size: Vector2F,
|
||||
font_cache: &FontCache,
|
||||
) -> gpui::elements::ConstrainedBox<CollabPanel> {
|
||||
let line_height = row_style.line_height(font_cache);
|
||||
let cap_height = row_style.cap_height(font_cache);
|
||||
let baseline_offset = row_style.baseline_offset(font_cache) + (size.y() - line_height) / 2.;
|
||||
|
||||
Canvas::new(move |scene, bounds, _, _, _| {
|
||||
scene.paint_layer(None, |scene| {
|
||||
let start_x = bounds.min_x() + (bounds.width() / 2.) - (branch_style.width / 2.);
|
||||
let end_x = bounds.max_x();
|
||||
let start_y = bounds.min_y();
|
||||
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
|
||||
|
||||
scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, start_y),
|
||||
vec2f(
|
||||
start_x + branch_style.width,
|
||||
if is_last { end_y } else { bounds.max_y() },
|
||||
),
|
||||
),
|
||||
background: Some(branch_style.color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radii: (0.).into(),
|
||||
});
|
||||
scene.push_quad(gpui::Quad {
|
||||
bounds: RectF::from_points(
|
||||
vec2f(start_x, end_y),
|
||||
vec2f(end_x, end_y + branch_style.width),
|
||||
),
|
||||
background: Some(branch_style.color),
|
||||
border: gpui::Border::default(),
|
||||
corner_radii: (0.).into(),
|
||||
});
|
||||
})
|
||||
})
|
||||
.constrained()
|
||||
.with_width(size.x())
|
||||
}
|
||||
|
||||
impl View for CollabPanel {
|
||||
fn ui_name() -> &'static str {
|
||||
"CollabPanel"
|
||||
@ -2354,7 +2586,7 @@ impl View for CollabPanel {
|
||||
.into_any()
|
||||
})
|
||||
.on_click(MouseButton::Left, |_, _, cx| cx.focus_self())
|
||||
.into_any_named("channels panel")
|
||||
.into_any_named("collab panel")
|
||||
}
|
||||
}
|
||||
|
||||
@ -2404,7 +2636,10 @@ impl Panel for CollabPanel {
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self) -> (String, Option<Box<dyn gpui::Action>>) {
|
||||
("Channels Panel".to_string(), Some(Box::new(ToggleFocus)))
|
||||
(
|
||||
"Collaboration Panel".to_string(),
|
||||
Some(Box::new(ToggleFocus)),
|
||||
)
|
||||
}
|
||||
|
||||
fn should_change_position_on_event(event: &Self::Event) -> bool {
|
||||
@ -2467,6 +2702,14 @@ impl PartialEq for ListEntry {
|
||||
return channel_1.id == channel_2.id && depth_1 == depth_2;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
} = other
|
||||
{
|
||||
return channel_id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelInvite(channel_1) => {
|
||||
if let ListEntry::ChannelInvite(channel_2) = other {
|
||||
return channel_1.id == channel_2.id;
|
||||
|
@ -1,4 +1,5 @@
|
||||
use client::{proto, ChannelId, ChannelMembership, ChannelStore, User, UserId, UserStore};
|
||||
use channel::{ChannelId, ChannelMembership, ChannelStore};
|
||||
use client::{proto, User, UserId, UserStore};
|
||||
use context_menu::{ContextMenu, ContextMenuItem};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
|
@ -1096,7 +1096,7 @@ impl CollabTitlebarItem {
|
||||
style
|
||||
}
|
||||
|
||||
fn render_face<V: View>(
|
||||
fn render_face<V: 'static>(
|
||||
avatar: Arc<ImageData>,
|
||||
avatar_style: AvatarStyle,
|
||||
background_color: Color,
|
||||
|
@ -1,3 +1,4 @@
|
||||
pub mod channel_view;
|
||||
pub mod collab_panel;
|
||||
mod collab_titlebar_item;
|
||||
mod contact_notification;
|
||||
@ -49,6 +50,7 @@ pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
@ -57,6 +59,7 @@ pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
@ -73,11 +76,18 @@ pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
let client = call.client();
|
||||
room.update(cx, |room, cx| {
|
||||
if room.is_muted(cx) {
|
||||
ActiveCall::report_call_event_for_room("enable microphone", room.id(), &client, cx);
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable microphone",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable microphone",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
|
@ -2,14 +2,14 @@ use client::User;
|
||||
use gpui::{
|
||||
elements::*,
|
||||
platform::{CursorStyle, MouseButton},
|
||||
AnyElement, Element, View, ViewContext,
|
||||
AnyElement, Element, ViewContext,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
enum Dismiss {}
|
||||
enum Button {}
|
||||
|
||||
pub fn render_user_notification<F, V>(
|
||||
pub fn render_user_notification<F, V: 'static>(
|
||||
user: Arc<User>,
|
||||
title: &'static str,
|
||||
body: Option<&'static str>,
|
||||
@ -19,7 +19,6 @@ pub fn render_user_notification<F, V>(
|
||||
) -> AnyElement<V>
|
||||
where
|
||||
F: 'static + Fn(&mut V, &mut ViewContext<V>),
|
||||
V: View,
|
||||
{
|
||||
let theme = theme::current(cx).clone();
|
||||
let theme = &theme.contact_notification;
|
||||
|
18
crates/component_test/Cargo.toml
Normal file
18
crates/component_test/Cargo.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "component_test"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/component_test.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
gpui = { path = "../gpui" }
|
||||
settings = { path = "../settings" }
|
||||
util = { path = "../util" }
|
||||
theme = { path = "../theme" }
|
||||
workspace = { path = "../workspace" }
|
||||
project = { path = "../project" }
|
121
crates/component_test/src/component_test.rs
Normal file
121
crates/component_test/src/component_test.rs
Normal file
@ -0,0 +1,121 @@
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{Component, Flex, ParentElement, SafeStylable},
|
||||
AppContext, Element, Entity, ModelHandle, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::Project;
|
||||
use theme::components::{action_button::Button, label::Label, ComponentExt};
|
||||
use workspace::{
|
||||
item::Item, register_deserializable_item, ItemId, Pane, PaneBackdrop, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(ComponentTest::toggle_disclosure);
|
||||
cx.add_action(ComponentTest::toggle_toggle);
|
||||
cx.add_action(ComponentTest::deploy);
|
||||
register_deserializable_item::<ComponentTest>(cx);
|
||||
}
|
||||
|
||||
actions!(
|
||||
test,
|
||||
[NoAction, ToggleDisclosure, ToggleToggle, NewComponentTest]
|
||||
);
|
||||
|
||||
struct ComponentTest {
|
||||
disclosed: bool,
|
||||
toggled: bool,
|
||||
}
|
||||
|
||||
impl ComponentTest {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
disclosed: false,
|
||||
toggled: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn deploy(workspace: &mut Workspace, _: &NewComponentTest, cx: &mut ViewContext<Workspace>) {
|
||||
workspace.add_item(Box::new(cx.add_view(|_| ComponentTest::new())), cx);
|
||||
}
|
||||
|
||||
fn toggle_disclosure(&mut self, _: &ToggleDisclosure, cx: &mut ViewContext<Self>) {
|
||||
self.disclosed = !self.disclosed;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn toggle_toggle(&mut self, _: &ToggleToggle, cx: &mut ViewContext<Self>) {
|
||||
self.toggled = !self.toggled;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for ComponentTest {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for ComponentTest {
|
||||
fn ui_name() -> &'static str {
|
||||
"Component Test"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
|
||||
let theme = theme::current(cx);
|
||||
|
||||
PaneBackdrop::new(
|
||||
cx.view_id(),
|
||||
Flex::column()
|
||||
.with_spacing(10.)
|
||||
.with_child(
|
||||
Button::action(NoAction)
|
||||
.with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
|
||||
.with_contents(Label::new("Click me!"))
|
||||
.with_style(theme.component_test.button.clone())
|
||||
.element(),
|
||||
)
|
||||
.with_child(
|
||||
Button::action(ToggleToggle)
|
||||
.with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
|
||||
.with_contents(Label::new("Toggle me!"))
|
||||
.toggleable(self.toggled)
|
||||
.with_style(theme.component_test.toggle.clone())
|
||||
.element(),
|
||||
)
|
||||
.with_child(
|
||||
Label::new("A disclosure")
|
||||
.disclosable(Some(self.disclosed), Box::new(ToggleDisclosure))
|
||||
.with_style(theme.component_test.disclosure.clone())
|
||||
.element(),
|
||||
)
|
||||
.constrained()
|
||||
.with_width(200.)
|
||||
.aligned()
|
||||
.into_any(),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for ComponentTest {
|
||||
fn tab_content<V: 'static>(
|
||||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
_: &AppContext,
|
||||
) -> gpui::AnyElement<V> {
|
||||
gpui::elements::Label::new("Component test", style.label.clone()).into_any()
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("ComponentTest")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: WorkspaceId,
|
||||
_item_id: ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
Task::ready(Ok(cx.add_view(|_| Self::new())))
|
||||
}
|
||||
}
|
@ -538,7 +538,7 @@ impl ProjectDiagnosticsEditor {
|
||||
}
|
||||
|
||||
impl Item for ProjectDiagnosticsEditor {
|
||||
fn tab_content<T: View>(
|
||||
fn tab_content<T: 'static>(
|
||||
&self,
|
||||
_detail: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
@ -735,7 +735,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn render_summary<T: View>(
|
||||
pub(crate) fn render_summary<T: 'static>(
|
||||
summary: &DiagnosticSummary,
|
||||
text_style: &TextStyle,
|
||||
theme: &theme::ProjectDiagnostics,
|
||||
|
@ -11,7 +11,7 @@ use gpui::{
|
||||
|
||||
const DEAD_ZONE: f32 = 4.;
|
||||
|
||||
enum State<V: View> {
|
||||
enum State<V> {
|
||||
Down {
|
||||
region_offset: Vector2F,
|
||||
region: RectF,
|
||||
@ -31,7 +31,7 @@ enum State<V: View> {
|
||||
Canceled,
|
||||
}
|
||||
|
||||
impl<V: View> Clone for State<V> {
|
||||
impl<V> Clone for State<V> {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
&State::Down {
|
||||
@ -68,12 +68,12 @@ impl<V: View> Clone for State<V> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DragAndDrop<V: View> {
|
||||
pub struct DragAndDrop<V> {
|
||||
containers: HashSet<WeakViewHandle<V>>,
|
||||
currently_dragged: Option<State<V>>,
|
||||
}
|
||||
|
||||
impl<V: View> Default for DragAndDrop<V> {
|
||||
impl<V> Default for DragAndDrop<V> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
containers: Default::default(),
|
||||
@ -82,7 +82,7 @@ impl<V: View> Default for DragAndDrop<V> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: View> DragAndDrop<V> {
|
||||
impl<V: 'static> DragAndDrop<V> {
|
||||
pub fn register_container(&mut self, handle: WeakViewHandle<V>) {
|
||||
self.containers.insert(handle);
|
||||
}
|
||||
@ -291,7 +291,7 @@ impl<V: View> DragAndDrop<V> {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Draggable<V: View> {
|
||||
pub trait Draggable<V> {
|
||||
fn as_draggable<D: View, P: Any>(
|
||||
self,
|
||||
payload: P,
|
||||
@ -301,7 +301,7 @@ pub trait Draggable<V: View> {
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
impl<V: View> Draggable<V> for MouseEventHandler<V> {
|
||||
impl<V: 'static> Draggable<V> for MouseEventHandler<V> {
|
||||
fn as_draggable<D: View, P: Any>(
|
||||
self,
|
||||
payload: P,
|
||||
|
@ -559,6 +559,7 @@ pub struct Editor {
|
||||
blink_manager: ModelHandle<BlinkManager>,
|
||||
show_local_selections: bool,
|
||||
mode: EditorMode,
|
||||
replica_id_mapping: Option<HashMap<ReplicaId, ReplicaId>>,
|
||||
show_gutter: bool,
|
||||
show_wrap_guides: Option<bool>,
|
||||
placeholder_text: Option<Arc<str>>,
|
||||
@ -577,6 +578,7 @@ pub struct Editor {
|
||||
searchable: bool,
|
||||
cursor_shape: CursorShape,
|
||||
collapse_matches: bool,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
workspace: Option<(WeakViewHandle<Workspace>, i64)>,
|
||||
keymap_context_layers: BTreeMap<TypeId, KeymapContext>,
|
||||
input_enabled: bool,
|
||||
@ -1393,6 +1395,7 @@ impl Editor {
|
||||
blink_manager: blink_manager.clone(),
|
||||
show_local_selections: true,
|
||||
mode,
|
||||
replica_id_mapping: None,
|
||||
show_gutter: mode == EditorMode::Full,
|
||||
show_wrap_guides: None,
|
||||
placeholder_text: None,
|
||||
@ -1412,6 +1415,7 @@ impl Editor {
|
||||
searchable: true,
|
||||
override_text_style: None,
|
||||
cursor_shape: Default::default(),
|
||||
autoindent_mode: Some(AutoindentMode::EachLine),
|
||||
collapse_matches: false,
|
||||
workspace: None,
|
||||
keymap_context_layers: Default::default(),
|
||||
@ -1590,10 +1594,31 @@ impl Editor {
|
||||
self.input_enabled = input_enabled;
|
||||
}
|
||||
|
||||
pub fn set_autoindent(&mut self, autoindent: bool) {
|
||||
if autoindent {
|
||||
self.autoindent_mode = Some(AutoindentMode::EachLine);
|
||||
} else {
|
||||
self.autoindent_mode = None;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_read_only(&mut self, read_only: bool) {
|
||||
self.read_only = read_only;
|
||||
}
|
||||
|
||||
pub fn replica_id_map(&self) -> Option<&HashMap<ReplicaId, ReplicaId>> {
|
||||
self.replica_id_mapping.as_ref()
|
||||
}
|
||||
|
||||
pub fn set_replica_id_map(
|
||||
&mut self,
|
||||
mapping: Option<HashMap<ReplicaId, ReplicaId>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.replica_id_mapping = mapping;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn selections_did_change(
|
||||
&mut self,
|
||||
local: bool,
|
||||
@ -1722,7 +1747,32 @@ impl Editor {
|
||||
}
|
||||
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, Some(AutoindentMode::EachLine), cx)
|
||||
buffer.edit(edits, self.autoindent_mode.clone(), cx)
|
||||
});
|
||||
}
|
||||
|
||||
pub fn edit_with_block_indent<I, S, T>(
|
||||
&mut self,
|
||||
edits: I,
|
||||
original_indent_columns: Vec<u32>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) where
|
||||
I: IntoIterator<Item = (Range<S>, T)>,
|
||||
S: ToOffset,
|
||||
T: Into<Arc<str>>,
|
||||
{
|
||||
if self.read_only {
|
||||
return;
|
||||
}
|
||||
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
edits,
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
@ -2093,12 +2143,12 @@ impl Editor {
|
||||
for (selection, autoclose_region) in
|
||||
self.selections_with_autoclose_regions(selections, &snapshot)
|
||||
{
|
||||
if let Some(language) = snapshot.language_scope_at(selection.head()) {
|
||||
if let Some(scope) = snapshot.language_scope_at(selection.head()) {
|
||||
// Determine if the inserted text matches the opening or closing
|
||||
// bracket of any of this language's bracket pairs.
|
||||
let mut bracket_pair = None;
|
||||
let mut is_bracket_pair_start = false;
|
||||
for (pair, enabled) in language.brackets() {
|
||||
for (pair, enabled) in scope.brackets() {
|
||||
if enabled && pair.close && pair.start.ends_with(text.as_ref()) {
|
||||
bracket_pair = Some(pair.clone());
|
||||
is_bracket_pair_start = true;
|
||||
@ -2120,7 +2170,7 @@ impl Editor {
|
||||
let following_text_allows_autoclose = snapshot
|
||||
.chars_at(selection.start)
|
||||
.next()
|
||||
.map_or(true, |c| language.should_autoclose_before(c));
|
||||
.map_or(true, |c| scope.should_autoclose_before(c));
|
||||
let preceding_text_matches_prefix = prefix_len == 0
|
||||
|| (selection.start.column >= (prefix_len as u32)
|
||||
&& snapshot.contains_str_at(
|
||||
@ -2197,7 +2247,7 @@ impl Editor {
|
||||
drop(snapshot);
|
||||
self.transact(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, Some(AutoindentMode::EachLine), cx);
|
||||
buffer.edit(edits, this.autoindent_mode.clone(), cx);
|
||||
});
|
||||
|
||||
let new_anchor_selections = new_selections.iter().map(|e| &e.0);
|
||||
@ -2657,7 +2707,6 @@ impl Editor {
|
||||
false
|
||||
});
|
||||
}
|
||||
|
||||
fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option<String> {
|
||||
let offset = position.to_offset(buffer);
|
||||
let (word_range, kind) = buffer.surrounding_word(offset);
|
||||
@ -3037,7 +3086,7 @@ impl Editor {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
ranges.iter().map(|range| (range.clone(), text)),
|
||||
Some(AutoindentMode::EachLine),
|
||||
this.autoindent_mode.clone(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@ -4732,6 +4781,7 @@ impl Editor {
|
||||
let mut clipboard_selections = Vec::with_capacity(selections.len());
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut is_first = true;
|
||||
for selection in &mut selections {
|
||||
let is_entire_line = selection.is_empty() || self.selections.line_mode;
|
||||
if is_entire_line {
|
||||
@ -4739,6 +4789,11 @@ impl Editor {
|
||||
selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0));
|
||||
selection.goal = SelectionGoal::None;
|
||||
}
|
||||
if is_first {
|
||||
is_first = false;
|
||||
} else {
|
||||
text += "\n";
|
||||
}
|
||||
let mut len = 0;
|
||||
for chunk in buffer.text_for_range(selection.start..selection.end) {
|
||||
text.push_str(chunk);
|
||||
@ -4769,6 +4824,7 @@ impl Editor {
|
||||
let mut clipboard_selections = Vec::with_capacity(selections.len());
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut is_first = true;
|
||||
for selection in selections.iter() {
|
||||
let mut start = selection.start;
|
||||
let mut end = selection.end;
|
||||
@ -4777,6 +4833,11 @@ impl Editor {
|
||||
start = Point::new(start.row, 0);
|
||||
end = cmp::min(max_point, Point::new(end.row + 1, 0));
|
||||
}
|
||||
if is_first {
|
||||
is_first = false;
|
||||
} else {
|
||||
text += "\n";
|
||||
}
|
||||
let mut len = 0;
|
||||
for chunk in buffer.text_for_range(start..end) {
|
||||
text.push_str(chunk);
|
||||
@ -4796,7 +4857,7 @@ impl Editor {
|
||||
pub fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
|
||||
self.transact(cx, |this, cx| {
|
||||
if let Some(item) = cx.read_from_clipboard() {
|
||||
let mut clipboard_text = Cow::Borrowed(item.text());
|
||||
let clipboard_text = Cow::Borrowed(item.text());
|
||||
if let Some(mut clipboard_selections) = item.metadata::<Vec<ClipboardSelection>>() {
|
||||
let old_selections = this.selections.all::<usize>(cx);
|
||||
let all_selections_were_entire_line =
|
||||
@ -4804,18 +4865,7 @@ impl Editor {
|
||||
let first_selection_indent_column =
|
||||
clipboard_selections.first().map(|s| s.first_line_indent);
|
||||
if clipboard_selections.len() != old_selections.len() {
|
||||
let mut newline_separated_text = String::new();
|
||||
let mut clipboard_selections = clipboard_selections.drain(..).peekable();
|
||||
let mut ix = 0;
|
||||
while let Some(clipboard_selection) = clipboard_selections.next() {
|
||||
newline_separated_text
|
||||
.push_str(&clipboard_text[ix..ix + clipboard_selection.len]);
|
||||
ix += clipboard_selection.len;
|
||||
if clipboard_selections.peek().is_some() {
|
||||
newline_separated_text.push('\n');
|
||||
}
|
||||
}
|
||||
clipboard_text = Cow::Owned(newline_separated_text);
|
||||
clipboard_selections.drain(..);
|
||||
}
|
||||
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
@ -4831,8 +4881,9 @@ impl Editor {
|
||||
if let Some(clipboard_selection) = clipboard_selections.get(ix) {
|
||||
let end_offset = start_offset + clipboard_selection.len;
|
||||
to_insert = &clipboard_text[start_offset..end_offset];
|
||||
dbg!(start_offset, end_offset, &clipboard_text, &to_insert);
|
||||
entire_line = clipboard_selection.is_entire_line;
|
||||
start_offset = end_offset;
|
||||
start_offset = end_offset + 1;
|
||||
original_indent_column =
|
||||
Some(clipboard_selection.first_line_indent);
|
||||
} else {
|
||||
@ -8527,6 +8578,7 @@ fn build_style(
|
||||
font_size,
|
||||
font_properties,
|
||||
underline: Default::default(),
|
||||
soft_wrap: false,
|
||||
},
|
||||
placeholder_text: None,
|
||||
line_height_scalar,
|
||||
|
@ -5237,6 +5237,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
@ -6383,7 +6384,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
|
||||
.update(|cx| {
|
||||
Editor::from_state_proto(
|
||||
pane.clone(),
|
||||
project.clone(),
|
||||
workspace.clone(),
|
||||
ViewId {
|
||||
creator: Default::default(),
|
||||
id: 0,
|
||||
@ -6478,7 +6479,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
|
||||
.update(|cx| {
|
||||
Editor::from_state_proto(
|
||||
pane.clone(),
|
||||
project.clone(),
|
||||
workspace.clone(),
|
||||
ViewId {
|
||||
creator: Default::default(),
|
||||
id: 0,
|
||||
@ -7528,6 +7529,7 @@ async fn test_completions_with_additional_edits(cx: &mut gpui::TestAppContext) {
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
|
@ -62,6 +62,7 @@ struct SelectionLayout {
|
||||
head: DisplayPoint,
|
||||
cursor_shape: CursorShape,
|
||||
is_newest: bool,
|
||||
is_local: bool,
|
||||
range: Range<DisplayPoint>,
|
||||
active_rows: Range<u32>,
|
||||
}
|
||||
@ -73,6 +74,7 @@ impl SelectionLayout {
|
||||
cursor_shape: CursorShape,
|
||||
map: &DisplaySnapshot,
|
||||
is_newest: bool,
|
||||
is_local: bool,
|
||||
) -> Self {
|
||||
let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
|
||||
let display_selection = point_selection.map(|p| p.to_display_point(map));
|
||||
@ -109,6 +111,7 @@ impl SelectionLayout {
|
||||
head,
|
||||
cursor_shape,
|
||||
is_newest,
|
||||
is_local,
|
||||
range,
|
||||
active_rows,
|
||||
}
|
||||
@ -605,7 +608,7 @@ impl EditorElement {
|
||||
visible_bounds: RectF,
|
||||
layout: &mut LayoutState,
|
||||
editor: &mut Editor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
cx: &mut PaintContext<Editor>,
|
||||
) {
|
||||
let line_height = layout.position_map.line_height;
|
||||
|
||||
@ -760,10 +763,9 @@ impl EditorElement {
|
||||
visible_bounds: RectF,
|
||||
layout: &mut LayoutState,
|
||||
editor: &mut Editor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
cx: &mut PaintContext<Editor>,
|
||||
) {
|
||||
let style = &self.style;
|
||||
let local_replica_id = editor.replica_id(cx);
|
||||
let scroll_position = layout.position_map.snapshot.scroll_position();
|
||||
let start_row = layout.visible_display_row_range.start;
|
||||
let scroll_top = scroll_position.y() * layout.position_map.line_height;
|
||||
@ -852,15 +854,13 @@ impl EditorElement {
|
||||
|
||||
for (replica_id, selections) in &layout.selections {
|
||||
let replica_id = *replica_id;
|
||||
let selection_style = style.replica_selection_style(replica_id);
|
||||
let selection_style = if let Some(replica_id) = replica_id {
|
||||
style.replica_selection_style(replica_id)
|
||||
} else {
|
||||
&style.absent_selection
|
||||
};
|
||||
|
||||
for selection in selections {
|
||||
if !selection.range.is_empty()
|
||||
&& (replica_id == local_replica_id
|
||||
|| Some(replica_id) == editor.leader_replica_id)
|
||||
{
|
||||
invisible_display_ranges.push(selection.range.clone());
|
||||
}
|
||||
self.paint_highlighted_range(
|
||||
scene,
|
||||
selection.range.clone(),
|
||||
@ -874,7 +874,10 @@ impl EditorElement {
|
||||
bounds,
|
||||
);
|
||||
|
||||
if editor.show_local_cursors(cx) || replica_id != local_replica_id {
|
||||
if selection.is_local && !selection.range.is_empty() {
|
||||
invisible_display_ranges.push(selection.range.clone());
|
||||
}
|
||||
if !selection.is_local || editor.show_local_cursors(cx) {
|
||||
let cursor_position = selection.head;
|
||||
if layout
|
||||
.visible_display_row_range
|
||||
@ -1337,7 +1340,7 @@ impl EditorElement {
|
||||
visible_bounds: RectF,
|
||||
layout: &mut LayoutState,
|
||||
editor: &mut Editor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
cx: &mut PaintContext<Editor>,
|
||||
) {
|
||||
let scroll_position = layout.position_map.snapshot.scroll_position();
|
||||
let scroll_left = scroll_position.x() * layout.position_map.em_width;
|
||||
@ -2124,7 +2127,7 @@ impl Element<Editor> for EditorElement {
|
||||
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
|
||||
};
|
||||
|
||||
let mut selections: Vec<(ReplicaId, Vec<SelectionLayout>)> = Vec::new();
|
||||
let mut selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)> = Vec::new();
|
||||
let mut active_rows = BTreeMap::new();
|
||||
let mut fold_ranges = Vec::new();
|
||||
let is_singleton = editor.is_singleton(cx);
|
||||
@ -2155,8 +2158,14 @@ impl Element<Editor> for EditorElement {
|
||||
.buffer_snapshot
|
||||
.remote_selections_in_range(&(start_anchor..end_anchor))
|
||||
{
|
||||
let replica_id = if let Some(mapping) = &editor.replica_id_mapping {
|
||||
mapping.get(&replica_id).copied()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// The local selections match the leader's selections.
|
||||
if Some(replica_id) == editor.leader_replica_id {
|
||||
if replica_id.is_some() && replica_id == editor.leader_replica_id {
|
||||
continue;
|
||||
}
|
||||
remote_selections
|
||||
@ -2168,6 +2177,7 @@ impl Element<Editor> for EditorElement {
|
||||
cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
));
|
||||
}
|
||||
selections.extend(remote_selections);
|
||||
@ -2191,6 +2201,7 @@ impl Element<Editor> for EditorElement {
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
is_newest,
|
||||
true,
|
||||
);
|
||||
if is_newest {
|
||||
newest_selection_head = Some(layout.head);
|
||||
@ -2206,11 +2217,18 @@ impl Element<Editor> for EditorElement {
|
||||
}
|
||||
|
||||
// Render the local selections in the leader's color when following.
|
||||
let local_replica_id = editor
|
||||
.leader_replica_id
|
||||
.unwrap_or_else(|| editor.replica_id(cx));
|
||||
let local_replica_id = if let Some(leader_replica_id) = editor.leader_replica_id {
|
||||
leader_replica_id
|
||||
} else {
|
||||
let replica_id = editor.replica_id(cx);
|
||||
if let Some(mapping) = &editor.replica_id_mapping {
|
||||
mapping.get(&replica_id).copied().unwrap_or(replica_id)
|
||||
} else {
|
||||
replica_id
|
||||
}
|
||||
};
|
||||
|
||||
selections.push((local_replica_id, layouts));
|
||||
selections.push((Some(local_replica_id), layouts));
|
||||
}
|
||||
|
||||
let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar;
|
||||
@ -2591,7 +2609,7 @@ pub struct LayoutState {
|
||||
blocks: Vec<BlockLayout>,
|
||||
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
|
||||
fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>,
|
||||
selections: Vec<(ReplicaId, Vec<SelectionLayout>)>,
|
||||
selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)>,
|
||||
scrollbar_row_range: Range<f32>,
|
||||
show_scrollbars: bool,
|
||||
is_singleton: bool,
|
||||
|
@ -49,11 +49,12 @@ impl FollowableItem for Editor {
|
||||
|
||||
fn from_state_proto(
|
||||
pane: ViewHandle<workspace::Pane>,
|
||||
project: ModelHandle<Project>,
|
||||
workspace: ViewHandle<Workspace>,
|
||||
remote_id: ViewId,
|
||||
state: &mut Option<proto::view::Variant>,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<Task<Result<ViewHandle<Self>>>> {
|
||||
let project = workspace.read(cx).project().to_owned();
|
||||
let Some(proto::view::Variant::Editor(_)) = state else { return None };
|
||||
let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() };
|
||||
|
||||
@ -561,7 +562,7 @@ impl Item for Editor {
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_content<T: View>(
|
||||
fn tab_content<T: 'static>(
|
||||
&self,
|
||||
detail: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
@ -753,7 +754,7 @@ impl Item for Editor {
|
||||
Some(Box::new(handle.clone()))
|
||||
}
|
||||
|
||||
fn pixel_position_of_cursor(&self) -> Option<Vector2F> {
|
||||
fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
|
||||
self.pixel_position_of_newest_cursor
|
||||
}
|
||||
|
||||
@ -1028,7 +1029,7 @@ impl SearchableItem for Editor {
|
||||
if let Some((_, _, excerpt_buffer)) = buffer.as_singleton() {
|
||||
ranges.extend(
|
||||
query
|
||||
.search(excerpt_buffer.as_rope())
|
||||
.search(excerpt_buffer, None)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
@ -1038,17 +1039,22 @@ impl SearchableItem for Editor {
|
||||
} else {
|
||||
for excerpt in buffer.excerpt_boundaries_in_range(0..buffer.len()) {
|
||||
let excerpt_range = excerpt.range.context.to_offset(&excerpt.buffer);
|
||||
let rope = excerpt.buffer.as_rope().slice(excerpt_range.clone());
|
||||
ranges.extend(query.search(&rope).await.into_iter().map(|range| {
|
||||
let start = excerpt
|
||||
.buffer
|
||||
.anchor_after(excerpt_range.start + range.start);
|
||||
let end = excerpt
|
||||
.buffer
|
||||
.anchor_before(excerpt_range.start + range.end);
|
||||
buffer.anchor_in_excerpt(excerpt.id.clone(), start)
|
||||
..buffer.anchor_in_excerpt(excerpt.id.clone(), end)
|
||||
}));
|
||||
ranges.extend(
|
||||
query
|
||||
.search(&excerpt.buffer, Some(excerpt_range.clone()))
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let start = excerpt
|
||||
.buffer
|
||||
.anchor_after(excerpt_range.start + range.start);
|
||||
let end = excerpt
|
||||
.buffer
|
||||
.anchor_before(excerpt_range.start + range.end);
|
||||
buffer.anchor_in_excerpt(excerpt.id.clone(), start)
|
||||
..buffer.anchor_in_excerpt(excerpt.id.clone(), end)
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
ranges
|
||||
|
@ -61,10 +61,10 @@ pub fn up_by_rows(
|
||||
goal: SelectionGoal,
|
||||
preserve_column_at_start: bool,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_column = if let SelectionGoal::Column(column) = goal {
|
||||
column
|
||||
} else {
|
||||
map.column_to_chars(start.row(), start.column())
|
||||
let mut goal_column = match goal {
|
||||
SelectionGoal::Column(column) => column,
|
||||
SelectionGoal::ColumnRange { end, .. } => end,
|
||||
_ => map.column_to_chars(start.row(), start.column()),
|
||||
};
|
||||
|
||||
let prev_row = start.row().saturating_sub(row_count);
|
||||
@ -95,10 +95,10 @@ pub fn down_by_rows(
|
||||
goal: SelectionGoal,
|
||||
preserve_column_at_end: bool,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_column = if let SelectionGoal::Column(column) = goal {
|
||||
column
|
||||
} else {
|
||||
map.column_to_chars(start.row(), start.column())
|
||||
let mut goal_column = match goal {
|
||||
SelectionGoal::Column(column) => column,
|
||||
SelectionGoal::ColumnRange { end, .. } => end,
|
||||
_ => map.column_to_chars(start.row(), start.column()),
|
||||
};
|
||||
|
||||
let new_row = start.row() + row_count;
|
||||
@ -176,14 +176,21 @@ pub fn line_end(
|
||||
}
|
||||
|
||||
pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let language = map.buffer_snapshot.language_at(raw_point);
|
||||
|
||||
find_preceding_boundary(map, point, |left, right| {
|
||||
(char_kind(left) != char_kind(right) && !right.is_whitespace()) || left == '\n'
|
||||
(char_kind(language, left) != char_kind(language, right) && !right.is_whitespace())
|
||||
|| left == '\n'
|
||||
})
|
||||
}
|
||||
|
||||
pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let language = map.buffer_snapshot.language_at(raw_point);
|
||||
find_preceding_boundary(map, point, |left, right| {
|
||||
let is_word_start = char_kind(left) != char_kind(right) && !right.is_whitespace();
|
||||
let is_word_start =
|
||||
char_kind(language, left) != char_kind(language, right) && !right.is_whitespace();
|
||||
let is_subword_start =
|
||||
left == '_' && right != '_' || left.is_lowercase() && right.is_uppercase();
|
||||
is_word_start || is_subword_start || left == '\n'
|
||||
@ -191,14 +198,20 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
|
||||
}
|
||||
|
||||
pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let language = map.buffer_snapshot.language_at(raw_point);
|
||||
find_boundary(map, point, |left, right| {
|
||||
(char_kind(left) != char_kind(right) && !left.is_whitespace()) || right == '\n'
|
||||
(char_kind(language, left) != char_kind(language, right) && !left.is_whitespace())
|
||||
|| right == '\n'
|
||||
})
|
||||
}
|
||||
|
||||
pub fn next_subword_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let language = map.buffer_snapshot.language_at(raw_point);
|
||||
find_boundary(map, point, |left, right| {
|
||||
let is_word_end = (char_kind(left) != char_kind(right)) && !left.is_whitespace();
|
||||
let is_word_end =
|
||||
(char_kind(language, left) != char_kind(language, right)) && !left.is_whitespace();
|
||||
let is_subword_end =
|
||||
left != '_' && right == '_' || left.is_lowercase() && right.is_uppercase();
|
||||
is_word_end || is_subword_end || right == '\n'
|
||||
@ -385,10 +398,15 @@ pub fn find_boundary_in_line(
|
||||
}
|
||||
|
||||
pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
|
||||
let raw_point = point.to_point(map);
|
||||
let language = map.buffer_snapshot.language_at(raw_point);
|
||||
let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
|
||||
let text = &map.buffer_snapshot;
|
||||
let next_char_kind = text.chars_at(ix).next().map(char_kind);
|
||||
let prev_char_kind = text.reversed_chars_at(ix).next().map(char_kind);
|
||||
let next_char_kind = text.chars_at(ix).next().map(|c| char_kind(language, c));
|
||||
let prev_char_kind = text
|
||||
.reversed_chars_at(ix)
|
||||
.next()
|
||||
.map(|c| char_kind(language, c));
|
||||
prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
|
||||
}
|
||||
|
||||
|
@ -1394,10 +1394,7 @@ impl MultiBuffer {
|
||||
.map(|state| state.buffer.clone())
|
||||
}
|
||||
|
||||
pub fn is_completion_trigger<T>(&self, position: T, text: &str, cx: &AppContext) -> bool
|
||||
where
|
||||
T: ToOffset,
|
||||
{
|
||||
pub fn is_completion_trigger(&self, position: Anchor, text: &str, cx: &AppContext) -> bool {
|
||||
let mut chars = text.chars();
|
||||
let char = if let Some(char) = chars.next() {
|
||||
char
|
||||
@ -1408,7 +1405,9 @@ impl MultiBuffer {
|
||||
return false;
|
||||
}
|
||||
|
||||
if char.is_alphanumeric() || char == '_' {
|
||||
let language = self.language_at(position.clone(), cx);
|
||||
|
||||
if char_kind(language.as_ref(), char) == CharKind::Word {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1913,13 +1912,16 @@ impl MultiBufferSnapshot {
|
||||
let mut end = start;
|
||||
let mut next_chars = self.chars_at(start).peekable();
|
||||
let mut prev_chars = self.reversed_chars_at(start).peekable();
|
||||
|
||||
let language = self.language_at(start);
|
||||
let kind = |c| char_kind(language, c);
|
||||
let word_kind = cmp::max(
|
||||
prev_chars.peek().copied().map(char_kind),
|
||||
next_chars.peek().copied().map(char_kind),
|
||||
prev_chars.peek().copied().map(kind),
|
||||
next_chars.peek().copied().map(kind),
|
||||
);
|
||||
|
||||
for ch in prev_chars {
|
||||
if Some(char_kind(ch)) == word_kind && ch != '\n' {
|
||||
if Some(kind(ch)) == word_kind && ch != '\n' {
|
||||
start -= ch.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
@ -1927,7 +1929,7 @@ impl MultiBufferSnapshot {
|
||||
}
|
||||
|
||||
for ch in next_chars {
|
||||
if Some(char_kind(ch)) == word_kind && ch != '\n' {
|
||||
if Some(kind(ch)) == word_kind && ch != '\n' {
|
||||
end += ch.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
|
@ -29,6 +29,7 @@ use self::{
|
||||
};
|
||||
|
||||
pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
|
||||
pub const VERTICAL_SCROLL_MARGIN: f32 = 3.;
|
||||
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
#[derive(Default)]
|
||||
@ -136,7 +137,7 @@ pub struct ScrollManager {
|
||||
impl ScrollManager {
|
||||
pub fn new() -> Self {
|
||||
ScrollManager {
|
||||
vertical_scroll_margin: 3.0,
|
||||
vertical_scroll_margin: VERTICAL_SCROLL_MARGIN,
|
||||
anchor: ScrollAnchor::new(),
|
||||
ongoing: OngoingScroll::new(),
|
||||
autoscroll_request: None,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::{
|
||||
cell::Ref,
|
||||
cmp, iter, mem,
|
||||
ops::{Deref, Range, Sub},
|
||||
ops::{Deref, DerefMut, Range, Sub},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
@ -53,7 +53,7 @@ impl SelectionsCollection {
|
||||
}
|
||||
}
|
||||
|
||||
fn display_map(&self, cx: &mut AppContext) -> DisplaySnapshot {
|
||||
pub fn display_map(&self, cx: &mut AppContext) -> DisplaySnapshot {
|
||||
self.display_map.update(cx, |map, cx| map.snapshot(cx))
|
||||
}
|
||||
|
||||
@ -250,6 +250,10 @@ impl SelectionsCollection {
|
||||
resolve(self.oldest_anchor(), &self.buffer(cx))
|
||||
}
|
||||
|
||||
pub fn first_anchor(&self) -> Selection<Anchor> {
|
||||
self.disjoint[0].clone()
|
||||
}
|
||||
|
||||
pub fn first<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &AppContext,
|
||||
@ -352,7 +356,7 @@ pub struct MutableSelectionsCollection<'a> {
|
||||
}
|
||||
|
||||
impl<'a> MutableSelectionsCollection<'a> {
|
||||
fn display_map(&mut self) -> DisplaySnapshot {
|
||||
pub fn display_map(&mut self) -> DisplaySnapshot {
|
||||
self.collection.display_map(self.cx)
|
||||
}
|
||||
|
||||
@ -607,6 +611,10 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
self.select_anchors(selections)
|
||||
}
|
||||
|
||||
pub fn new_selection_id(&mut self) -> usize {
|
||||
post_inc(&mut self.next_selection_id)
|
||||
}
|
||||
|
||||
pub fn select_display_ranges<T>(&mut self, ranges: T)
|
||||
where
|
||||
T: IntoIterator<Item = Range<DisplayPoint>>,
|
||||
@ -831,6 +839,12 @@ impl<'a> Deref for MutableSelectionsCollection<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DerefMut for MutableSelectionsCollection<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.collection
|
||||
}
|
||||
}
|
||||
|
||||
// Panics if passed selections are not in order
|
||||
pub fn resolve_multiple<'a, D, I>(
|
||||
selections: I,
|
||||
|
@ -6,6 +6,7 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use collections::HashSet;
|
||||
use futures::Future;
|
||||
use gpui::{json, ViewContext, ViewHandle};
|
||||
use indoc::indoc;
|
||||
@ -154,10 +155,23 @@ impl<'a> EditorLspTestContext<'a> {
|
||||
capabilities: lsp::ServerCapabilities,
|
||||
cx: &'a mut gpui::TestAppContext,
|
||||
) -> EditorLspTestContext<'a> {
|
||||
let mut word_characters: HashSet<char> = Default::default();
|
||||
word_characters.insert('$');
|
||||
word_characters.insert('#');
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Typescript".into(),
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
brackets: language::BracketPairConfig {
|
||||
pairs: vec![language::BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
newline: true,
|
||||
}],
|
||||
disabled_scopes_by_bracket_ix: Default::default(),
|
||||
},
|
||||
word_characters,
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::language_typescript()),
|
||||
@ -169,6 +183,23 @@ impl<'a> EditorLspTestContext<'a> {
|
||||
("{" @open "}" @close)
|
||||
("<" @open ">" @close)
|
||||
("\"" @open "\"" @close)"#})),
|
||||
indents: Some(Cow::from(indoc! {r#"
|
||||
[
|
||||
(call_expression)
|
||||
(assignment_expression)
|
||||
(member_expression)
|
||||
(lexical_declaration)
|
||||
(variable_declaration)
|
||||
(assignment_expression)
|
||||
(if_statement)
|
||||
(for_statement)
|
||||
] @indent
|
||||
|
||||
(_ "[" "]" @end) @indent
|
||||
(_ "<" ">" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
"#})),
|
||||
..Default::default()
|
||||
})
|
||||
.expect("Could not parse queries");
|
||||
|
@ -268,7 +268,7 @@ impl Item for FeedbackEditor {
|
||||
Some("Send Feedback".into())
|
||||
}
|
||||
|
||||
fn tab_content<T: View>(
|
||||
fn tab_content<T: 'static>(
|
||||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
|
@ -39,6 +39,7 @@ pathfinder_color = "0.5"
|
||||
pathfinder_geometry = "0.5"
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
refineable.workspace = true
|
||||
resvg = "0.14"
|
||||
schemars = "0.8"
|
||||
seahash = "4.1"
|
||||
@ -47,6 +48,7 @@ serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
taffy = { git = "https://github.com/DioxusLabs/taffy", rev = "dab541d6104d58e2e10ce90c4a1dad0b703160cd", features = ["flexbox"] }
|
||||
time.workspace = true
|
||||
tiny-skia = "0.5"
|
||||
usvg = { version = "0.14", features = [] }
|
||||
|
@ -2,7 +2,7 @@ use button_component::Button;
|
||||
|
||||
use gpui::{
|
||||
color::Color,
|
||||
elements::{Component, ContainerStyle, Flex, Label, ParentElement},
|
||||
elements::{ContainerStyle, Flex, Label, ParentElement, StatefulComponent},
|
||||
fonts::{self, TextStyle},
|
||||
platform::WindowOptions,
|
||||
AnyElement, App, Element, Entity, View, ViewContext,
|
||||
@ -72,7 +72,7 @@ impl View for TestView {
|
||||
TextStyle::for_color(Color::blue()),
|
||||
)
|
||||
.with_style(ButtonStyle::fill(Color::yellow()))
|
||||
.into_element(),
|
||||
.element(),
|
||||
)
|
||||
.with_child(
|
||||
ToggleableButton::new(self.is_doubling, move |_, v: &mut Self, cx| {
|
||||
@ -84,7 +84,7 @@ impl View for TestView {
|
||||
inactive: ButtonStyle::fill(Color::red()),
|
||||
active: ButtonStyle::fill(Color::green()),
|
||||
})
|
||||
.into_element(),
|
||||
.element(),
|
||||
)
|
||||
.expanded()
|
||||
.contained()
|
||||
@ -114,7 +114,7 @@ mod theme {
|
||||
// Component creation:
|
||||
mod toggleable_button {
|
||||
use gpui::{
|
||||
elements::{Component, ContainerStyle, LabelStyle},
|
||||
elements::{ContainerStyle, LabelStyle, StatefulComponent},
|
||||
scene::MouseClick,
|
||||
EventContext, View,
|
||||
};
|
||||
@ -156,7 +156,7 @@ mod toggleable_button {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: View> Component<V> for ToggleableButton<V> {
|
||||
impl<V: View> StatefulComponent<V> for ToggleableButton<V> {
|
||||
fn render(self, v: &mut V, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
|
||||
let button = if let Some(style) = self.style {
|
||||
self.button.with_style(*style.style_for(self.active))
|
||||
@ -171,7 +171,7 @@ mod toggleable_button {
|
||||
mod button_component {
|
||||
|
||||
use gpui::{
|
||||
elements::{Component, ContainerStyle, Label, LabelStyle, MouseEventHandler},
|
||||
elements::{ContainerStyle, Label, LabelStyle, MouseEventHandler, StatefulComponent},
|
||||
platform::MouseButton,
|
||||
scene::MouseClick,
|
||||
AnyElement, Element, EventContext, TypeTag, View, ViewContext,
|
||||
@ -212,7 +212,7 @@ mod button_component {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: View> Component<V> for Button<V> {
|
||||
impl<V: View> StatefulComponent<V> for Button<V> {
|
||||
fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
|
||||
let click_handler = self.click_handler;
|
||||
|
||||
|
@ -58,6 +58,7 @@ impl gpui::View for TextView {
|
||||
font_family_id: family,
|
||||
underline: Default::default(),
|
||||
font_properties: Default::default(),
|
||||
soft_wrap: false,
|
||||
},
|
||||
)
|
||||
.with_highlights(vec![(17..26, underline), (34..40, underline)])
|
||||
|
2919
crates/gpui/playground/Cargo.lock
generated
Normal file
2919
crates/gpui/playground/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user