open zed urls (#9081)

Release Notes:

- Added support for opening files on the zed protocol `open
zed:///Users/example/Desktop/a.txt`
([#8482](https://github.com/zed-industries/zed/issues/8482)).
This commit is contained in:
Conrad Irwin 2024-03-08 13:44:01 -07:00 committed by GitHub
parent 1756c1fc1e
commit 977af37cfe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 253 additions and 222 deletions

1
Cargo.lock generated
View File

@ -12938,7 +12938,6 @@ dependencies = [
"gpui", "gpui",
"install_cli", "install_cli",
"isahc", "isahc",
"itertools 0.11.0",
"journal", "journal",
"language", "language",
"language_selector", "language_selector",

View File

@ -54,7 +54,6 @@ go_to_line.workspace = true
gpui.workspace = true gpui.workspace = true
install_cli.workspace = true install_cli.workspace = true
isahc.workspace = true isahc.workspace = true
itertools.workspace = true
journal.workspace = true journal.workspace = true
language.workspace = true language.workspace = true
language_selector.workspace = true language_selector.workspace = true

View File

@ -13,7 +13,7 @@ use env_logger::Builder;
use fs::RealFs; use fs::RealFs;
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
use fsevent::StreamFlags; use fsevent::StreamFlags;
use futures::StreamExt; use futures::{future, StreamExt};
use gpui::{App, AppContext, AsyncAppContext, Context, SemanticVersion, Task}; use gpui::{App, AppContext, AsyncAppContext, Context, SemanticVersion, Task};
use isahc::{prelude::Configurable, Request}; use isahc::{prelude::Configurable, Request};
use language::LanguageRegistry; use language::LanguageRegistry;
@ -36,7 +36,7 @@ use std::{
fs::OpenOptions, fs::OpenOptions,
io::{IsTerminal, Write}, io::{IsTerminal, Write},
panic, panic,
path::{Path, PathBuf}, path::Path,
sync::{ sync::{
atomic::{AtomicU32, Ordering}, atomic::{AtomicU32, Ordering},
Arc, Arc,
@ -48,14 +48,15 @@ use util::{
async_maybe, async_maybe,
http::{HttpClient, HttpClientWithUrl}, http::{HttpClient, HttpClientWithUrl},
paths::{self, CRASHES_DIR, CRASHES_RETIRED_DIR}, paths::{self, CRASHES_DIR, CRASHES_RETIRED_DIR},
ResultExt, ResultExt, TryFutureExt,
}; };
use uuid::Uuid; use uuid::Uuid;
use welcome::{show_welcome_view, BaseKeymap, FIRST_OPEN}; use welcome::{show_welcome_view, BaseKeymap, FIRST_OPEN};
use workspace::{AppState, WorkspaceStore}; use workspace::{AppState, WorkspaceStore};
use zed::{ use zed::{
app_menus, build_window_options, ensure_only_instance, handle_cli_connection, app_menus, build_window_options, ensure_only_instance, handle_cli_connection,
handle_keymap_file_changes, initialize_workspace, IsOnlyInstance, OpenListener, OpenRequest, handle_keymap_file_changes, initialize_workspace, open_paths_with_positions, IsOnlyInstance,
OpenListener, OpenRequest,
}; };
#[global_allocator] #[global_allocator]
@ -325,68 +326,82 @@ fn main() {
}); });
} }
fn open_paths_and_log_errs(paths: &[PathBuf], app_state: Arc<AppState>, cx: &mut AppContext) {
let task = workspace::open_paths(&paths, app_state, None, cx);
cx.spawn(|_| async move {
if let Some((_window, results)) = task.await.log_err() {
for result in results.into_iter().flatten() {
if let Err(err) = result {
log::error!("Error opening path: {err}",);
}
}
}
})
.detach();
}
fn handle_open_request( fn handle_open_request(
request: OpenRequest, request: OpenRequest,
app_state: Arc<AppState>, app_state: Arc<AppState>,
cx: &mut AppContext, cx: &mut AppContext,
) -> bool { ) -> bool {
let mut triggered_authentication = false; if let Some(connection) = request.cli_connection {
match request { let app_state = app_state.clone();
OpenRequest::Paths { paths } => open_paths_and_log_errs(&paths, app_state, cx), cx.spawn(move |cx| handle_cli_connection(connection, app_state, cx))
OpenRequest::CliConnection { connection } => { .detach();
let app_state = app_state.clone(); return false;
cx.spawn(move |cx| handle_cli_connection(connection, app_state, cx)) }
.detach();
} let mut task = None;
OpenRequest::JoinChannel { channel_id } => { if !request.open_paths.is_empty() {
triggered_authentication = true; let app_state = app_state.clone();
cx.spawn(|cx| async move { task = Some(cx.spawn(|mut cx| async move {
// ignore errors here, we'll show a generic "not signed in" let (_window, results) =
let _ = authenticate(app_state.client.clone(), &cx).await; open_paths_with_positions(&request.open_paths, app_state, &mut cx).await?;
cx.update(|cx| { for result in results.into_iter().flatten() {
workspace::join_channel(client::ChannelId(channel_id), app_state, None, cx) if let Err(err) = result {
})? log::error!("Error opening path: {err}",);
.await?; }
anyhow::Ok(()) }
}) anyhow::Ok(())
.detach_and_log_err(cx); }));
} }
OpenRequest::OpenChannelNotes {
channel_id, if !request.open_channel_notes.is_empty() || request.join_channel.is_some() {
heading, cx.spawn(|mut cx| async move {
} => { if let Some(task) = task {
triggered_authentication = true; task.await?;
let client = app_state.client.clone(); }
cx.spawn(|mut cx| async move { let client = app_state.client.clone();
// ignore errors here, we'll show a generic "not signed in" // we continue even if authentication fails as join_channel/ open channel notes will
let _ = authenticate(client, &cx).await; // show a visible error message.
let workspace_window = authenticate(client, &cx).await.log_err();
workspace::get_any_active_workspace(app_state, cx.clone()).await?;
let workspace = workspace_window.root_view(&cx)?; if let Some(channel_id) = request.join_channel {
cx.update_window(workspace_window.into(), |_, cx| { cx.update(|cx| {
ChannelView::open(client::ChannelId(channel_id), heading, workspace, cx) workspace::join_channel(
})? client::ChannelId(channel_id),
.await?; app_state.clone(),
anyhow::Ok(()) None,
}) cx,
.detach_and_log_err(cx); )
} })?
.await?;
}
let workspace_window =
workspace::get_any_active_workspace(app_state, cx.clone()).await?;
let workspace = workspace_window.root_view(&cx)?;
let mut promises = Vec::new();
for (channel_id, heading) in request.open_channel_notes {
promises.push(cx.update_window(workspace_window.into(), |_, cx| {
ChannelView::open(
client::ChannelId(channel_id),
heading,
workspace.clone(),
cx,
)
.log_err()
})?)
}
future::join_all(promises).await;
anyhow::Ok(())
})
.detach_and_log_err(cx);
true
} else {
if let Some(task) = task {
task.detach_and_log_err(cx)
}
false
} }
triggered_authentication
} }
async fn authenticate(client: Arc<Client>, cx: &AsyncAppContext) -> Result<()> { async fn authenticate(client: Arc<Client>, cx: &AsyncAppContext) -> Result<()> {
@ -888,7 +903,9 @@ fn collect_url_args(cx: &AppContext) -> Vec<String> {
.filter_map(|arg| match std::fs::canonicalize(Path::new(&arg)) { .filter_map(|arg| match std::fs::canonicalize(Path::new(&arg)) {
Ok(path) => Some(format!("file://{}", path.to_string_lossy())), Ok(path) => Some(format!("file://{}", path.to_string_lossy())),
Err(error) => { Err(error) => {
if let Some(_) = parse_zed_link(&arg, cx) { if arg.starts_with("file://") {
Some(arg)
} else if let Some(_) = parse_zed_link(&arg, cx) {
Some(arg) Some(arg)
} else { } else {
log::error!("error parsing path argument: {}", error); log::error!("error parsing path argument: {}", error);

View File

@ -8,8 +8,7 @@ use editor::Editor;
use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
use futures::channel::{mpsc, oneshot}; use futures::channel::{mpsc, oneshot};
use futures::{FutureExt, SinkExt, StreamExt}; use futures::{FutureExt, SinkExt, StreamExt};
use gpui::{AppContext, AsyncAppContext, Global}; use gpui::{AppContext, AsyncAppContext, Global, WindowHandle};
use itertools::Itertools;
use language::{Bias, Point}; use language::{Bias, Point};
use std::path::Path; use std::path::Path;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
@ -17,62 +16,68 @@ use std::sync::Arc;
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
use std::{path::PathBuf, sync::atomic::AtomicBool}; use std::{path::PathBuf, sync::atomic::AtomicBool};
use util::paths::{PathExt, PathLikeWithPosition}; use util::paths::PathLikeWithPosition;
use util::ResultExt; use util::ResultExt;
use workspace::AppState; use workspace::item::ItemHandle;
use workspace::{AppState, Workspace};
pub enum OpenRequest { #[derive(Default, Debug)]
Paths { pub struct OpenRequest {
paths: Vec<PathBuf>, pub cli_connection: Option<(mpsc::Receiver<CliRequest>, IpcSender<CliResponse>)>,
}, pub open_paths: Vec<PathLikeWithPosition<PathBuf>>,
CliConnection { pub open_channel_notes: Vec<(u64, Option<String>)>,
connection: (mpsc::Receiver<CliRequest>, IpcSender<CliResponse>), pub join_channel: Option<u64>,
},
JoinChannel {
channel_id: u64,
},
OpenChannelNotes {
channel_id: u64,
heading: Option<String>,
},
} }
impl OpenRequest { impl OpenRequest {
pub fn parse(urls: Vec<String>, cx: &AppContext) -> Result<Self> { pub fn parse(urls: Vec<String>, cx: &AppContext) -> Result<Self> {
if let Some(server_name) = urls.first().and_then(|url| url.strip_prefix("zed-cli://")) { let mut this = Self::default();
Self::parse_cli_connection(server_name) for url in urls {
} else if let Some(request_path) = urls.first().and_then(|url| parse_zed_link(url, cx)) { if let Some(server_name) = url.strip_prefix("zed-cli://") {
Self::parse_zed_url(request_path) this.cli_connection = Some(connect_to_cli(server_name)?);
} else { } else if let Some(file) = url.strip_prefix("file://") {
Ok(Self::parse_file_urls(urls)) this.parse_file_path(file)
} else if let Some(file) = url.strip_prefix("zed://file") {
this.parse_file_path(file)
} else if let Some(request_path) = parse_zed_link(&url, cx) {
this.parse_request_path(request_path).log_err();
} else {
log::error!("unhandled url: {}", url);
}
}
Ok(this)
}
fn parse_file_path(&mut self, file: &str) {
if let Some(decoded) = urlencoding::decode(file).log_err() {
if let Some(path_buf) =
PathLikeWithPosition::parse_str(&decoded, |s| PathBuf::try_from(s)).log_err()
{
self.open_paths.push(path_buf)
}
} }
} }
fn parse_cli_connection(server_name: &str) -> Result<OpenRequest> { fn parse_request_path(&mut self, request_path: &str) -> Result<()> {
let connection = connect_to_cli(server_name)?;
Ok(OpenRequest::CliConnection { connection })
}
fn parse_zed_url(request_path: &str) -> Result<OpenRequest> {
let mut parts = request_path.split('/'); let mut parts = request_path.split('/');
if parts.next() == Some("channel") { if parts.next() == Some("channel") {
if let Some(slug) = parts.next() { if let Some(slug) = parts.next() {
if let Some(id_str) = slug.split('-').last() { if let Some(id_str) = slug.split('-').last() {
if let Ok(channel_id) = id_str.parse::<u64>() { if let Ok(channel_id) = id_str.parse::<u64>() {
let Some(next) = parts.next() else { let Some(next) = parts.next() else {
return Ok(OpenRequest::JoinChannel { channel_id }); self.join_channel = Some(channel_id);
return Ok(());
}; };
if let Some(heading) = next.strip_prefix("notes#") { if let Some(heading) = next.strip_prefix("notes#") {
return Ok(OpenRequest::OpenChannelNotes { self.open_channel_notes
channel_id, .push((channel_id, Some(heading.to_string())));
heading: Some([heading].into_iter().chain(parts).join("/")), return Ok(());
}); }
} else if next == "notes" { if next == "notes" {
return Ok(OpenRequest::OpenChannelNotes { self.open_channel_notes.push((channel_id, None));
channel_id, return Ok(());
heading: None,
});
} }
} }
} }
@ -80,19 +85,6 @@ impl OpenRequest {
} }
Err(anyhow!("invalid zed url: {}", request_path)) Err(anyhow!("invalid zed url: {}", request_path))
} }
fn parse_file_urls(urls: Vec<String>) -> OpenRequest {
let paths: Vec<_> = urls
.iter()
.flat_map(|url| url.strip_prefix("file://"))
.flat_map(|url| {
let decoded = urlencoding::decode_binary(url.as_bytes());
PathBuf::try_from_bytes(decoded.as_ref()).log_err()
})
.collect();
OpenRequest::Paths { paths }
}
} }
pub struct OpenListener { pub struct OpenListener {
@ -162,6 +154,60 @@ fn connect_to_cli(
Ok((async_request_rx, response_tx)) Ok((async_request_rx, response_tx))
} }
pub async fn open_paths_with_positions(
path_likes: &Vec<PathLikeWithPosition<PathBuf>>,
app_state: Arc<AppState>,
cx: &mut AsyncAppContext,
) -> Result<(
WindowHandle<Workspace>,
Vec<Option<Result<Box<dyn ItemHandle>>>>,
)> {
let mut caret_positions = HashMap::default();
let paths = path_likes
.iter()
.map(|path_with_position| {
let path = path_with_position.path_like.clone();
if let Some(row) = path_with_position.row {
if path.is_file() {
let row = row.saturating_sub(1);
let col = path_with_position.column.unwrap_or(0).saturating_sub(1);
caret_positions.insert(path.clone(), Point::new(row, col));
}
}
path
})
.collect::<Vec<_>>();
let (workspace, items) = cx
.update(|cx| workspace::open_paths(&paths, app_state, None, cx))?
.await?;
for (item, path) in items.iter().zip(&paths) {
let Some(Ok(item)) = item else {
continue;
};
let Some(point) = caret_positions.remove(path) else {
continue;
};
if let Some(active_editor) = item.downcast::<Editor>() {
workspace
.update(cx, |_, cx| {
active_editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([point..point])
});
});
})
.log_err();
}
}
Ok((workspace, items))
}
pub async fn handle_cli_connection( pub async fn handle_cli_connection(
(mut requests, responses): (mpsc::Receiver<CliRequest>, IpcSender<CliResponse>), (mut requests, responses): (mpsc::Receiver<CliRequest>, IpcSender<CliResponse>),
app_state: Arc<AppState>, app_state: Arc<AppState>,
@ -170,18 +216,26 @@ pub async fn handle_cli_connection(
if let Some(request) = requests.next().await { if let Some(request) = requests.next().await {
match request { match request {
CliRequest::Open { paths, wait } => { CliRequest::Open { paths, wait } => {
let mut caret_positions = HashMap::default();
let paths = if paths.is_empty() { let paths = if paths.is_empty() {
workspace::last_opened_workspace_paths() workspace::last_opened_workspace_paths()
.await .await
.map(|location| location.paths().to_vec()) .map(|location| {
location
.paths()
.iter()
.map(|path| PathLikeWithPosition {
path_like: path.clone(),
row: None,
column: None,
})
.collect::<Vec<_>>()
})
.unwrap_or_default() .unwrap_or_default()
} else { } else {
paths paths
.into_iter() .into_iter()
.map(|path_with_position_string| { .map(|path_with_position_string| {
let path_with_position = PathLikeWithPosition::parse_str( PathLikeWithPosition::parse_str(
&path_with_position_string, &path_with_position_string,
|path_str| { |path_str| {
Ok::<_, std::convert::Infallible>( Ok::<_, std::convert::Infallible>(
@ -189,125 +243,87 @@ pub async fn handle_cli_connection(
) )
}, },
) )
.expect("Infallible"); .expect("Infallible")
let path = path_with_position.path_like;
if let Some(row) = path_with_position.row {
if path.is_file() {
let row = row.saturating_sub(1);
let col =
path_with_position.column.unwrap_or(0).saturating_sub(1);
caret_positions.insert(path.clone(), Point::new(row, col));
}
}
path
}) })
.collect() .collect()
}; };
let mut errored = false; let mut errored = false;
match cx.update(|cx| workspace::open_paths(&paths, app_state, None, cx)) { match open_paths_with_positions(&paths, app_state, &mut cx).await {
Ok(task) => match task.await { Ok((workspace, items)) => {
Ok((workspace, items)) => { let mut item_release_futures = Vec::new();
let mut item_release_futures = Vec::new();
for (item, path) in items.into_iter().zip(&paths) { for (item, path) in items.into_iter().zip(&paths) {
match item { match item {
Some(Ok(item)) => { Some(Ok(item)) => {
if let Some(point) = caret_positions.remove(path) { cx.update(|cx| {
if let Some(active_editor) = item.downcast::<Editor>() { let released = oneshot::channel();
workspace item.on_release(
.update(&mut cx, |_, cx| { cx,
active_editor.update(cx, |editor, cx| { Box::new(move |_| {
let snapshot = editor let _ = released.0.send(());
.snapshot(cx) }),
.display_snapshot; )
let point = snapshot .detach();
.buffer_snapshot item_release_futures.push(released.1);
.clip_point(point, Bias::Left); })
editor.change_selections( .log_err();
Some(Autoscroll::center()), }
cx, Some(Err(err)) => {
|s| s.select_ranges([point..point]), responses
); .send(CliResponse::Stderr {
}); message: format!("error opening {:?}: {}", path, err),
})
.log_err();
}
}
cx.update(|cx| {
let released = oneshot::channel();
item.on_release(
cx,
Box::new(move |_| {
let _ = released.0.send(());
}),
)
.detach();
item_release_futures.push(released.1);
}) })
.log_err(); .log_err();
} errored = true;
Some(Err(err)) => {
responses
.send(CliResponse::Stderr {
message: format!(
"error opening {:?}: {}",
path, err
),
})
.log_err();
errored = true;
}
None => {}
} }
None => {}
} }
}
if wait { if wait {
let background = cx.background_executor().clone(); let background = cx.background_executor().clone();
let wait = async move { let wait = async move {
if paths.is_empty() { if paths.is_empty() {
let (done_tx, done_rx) = oneshot::channel(); let (done_tx, done_rx) = oneshot::channel();
let _subscription = workspace.update(&mut cx, |_, cx| { let _subscription = workspace.update(&mut cx, |_, cx| {
cx.on_release(move |_, _, _| { cx.on_release(move |_, _, _| {
let _ = done_tx.send(()); let _ = done_tx.send(());
}) })
}); });
let _ = done_rx.await; let _ = done_rx.await;
} else { } else {
let _ = futures::future::try_join_all(item_release_futures) let _ =
.await; futures::future::try_join_all(item_release_futures).await;
}; };
} }
.fuse(); .fuse();
futures::pin_mut!(wait); futures::pin_mut!(wait);
loop { loop {
// Repeatedly check if CLI is still open to avoid wasting resources // Repeatedly check if CLI is still open to avoid wasting resources
// waiting for files or workspaces to close. // waiting for files or workspaces to close.
let mut timer = background.timer(Duration::from_secs(1)).fuse(); let mut timer = background.timer(Duration::from_secs(1)).fuse();
futures::select_biased! { futures::select_biased! {
_ = wait => break, _ = wait => break,
_ = timer => { _ = timer => {
if responses.send(CliResponse::Ping).is_err() { if responses.send(CliResponse::Ping).is_err() {
break; break;
}
} }
} }
} }
} }
} }
Err(error) => { }
errored = true; Err(error) => {
responses errored = true;
.send(CliResponse::Stderr { responses
message: format!("error opening {:?}: {}", paths, error), .send(CliResponse::Stderr {
}) message: format!("error opening {:?}: {}", paths, error),
.log_err(); })
} .log_err();
}, }
Err(_) => errored = true,
} }
responses responses

View File

@ -179,7 +179,7 @@ fi
# Note: The app identifier for our development builds is the same as the app identifier for nightly. # Note: The app identifier for our development builds is the same as the app identifier for nightly.
cp crates/${zed_crate}/contents/$channel/embedded.provisionprofile "${app_path}/Contents/" cp crates/${zed_crate}/contents/$channel/embedded.provisionprofile "${app_path}/Contents/"
if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then if [[ -n "${MACOS_CERTIFICATE:-}" && -n "${MACOS_CERTIFICATE_PASSWORD:-}" && -n "${APPLE_NOTARIZATION_USERNAME:-}" && -n "${APPLE_NOTARIZATION_PASSWORD:-}" ]]; then
echo "Signing bundle with Apple-issued certificate" echo "Signing bundle with Apple-issued certificate"
security create-keychain -p "$MACOS_CERTIFICATE_PASSWORD" zed.keychain || echo "" security create-keychain -p "$MACOS_CERTIFICATE_PASSWORD" zed.keychain || echo ""
security default-keychain -s zed.keychain security default-keychain -s zed.keychain