Upload crashes to collab directly (#8649)

This lets us run rustc_demangle on the backtrace, which helps the Slack
view significantly.

We're also now uploading files to digital ocean's S3 equivalent (with a
1 month expiry) instead of to Slack.

This PR paves the way for (but does not yet implement) sending this data
to clickhouse too.

Release Notes:

- N/A
This commit is contained in:
Conrad Irwin 2024-03-01 13:23:44 -07:00 committed by GitHub
parent cdf702aeff
commit 64460e492a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 657 additions and 27 deletions

1
Cargo.lock generated
View File

@ -2065,6 +2065,7 @@ dependencies = [
"release_channel",
"reqwest",
"rpc",
"rustc-demangle",
"scrypt",
"sea-orm",
"semver",

View File

@ -269,6 +269,7 @@ tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod"
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
tree-sitter-haskell = { git = "https://github.com/tree-sitter/tree-sitter-haskell", rev = "8a99848fc734f9c4ea523b3f2a07df133cbbcec2" }
tree-sitter-hcl = { git = "https://github.com/MichaHoffmann/tree-sitter-hcl", rev = "v1.1.0" }
rustc-demangle = "0.1.23"
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-html = "0.19.0"
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }

View File

@ -19,5 +19,7 @@ ZED_CLIENT_CHECKSUM_SEED = "development-checksum-seed"
# CLICKHOUSE_PASSWORD = ""
# CLICKHOUSE_DATABASE = "default"
# SLACK_PANICS_WEBHOOK = ""
# RUST_LOG=info
# LOG_JSON=true

View File

@ -7,15 +7,11 @@ version = "0.44.0"
publish = false
license = "AGPL-3.0-or-later"
[features]
seed-support = ["reqwest"]
[[bin]]
name = "collab"
[[bin]]
name = "seed"
required-features = ["seed-support"]
[dependencies]
anyhow.workspace = true
@ -40,7 +36,7 @@ parking_lot.workspace = true
prometheus = "0.13"
prost.workspace = true
rand.workspace = true
reqwest = { version = "0.11", features = ["json"], optional = true }
reqwest = { version = "0.11", features = ["json"] }
rpc.workspace = true
scrypt = "0.7"
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
@ -50,6 +46,7 @@ serde_derive.workspace = true
serde_json.workspace = true
sha2.workspace = true
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
rustc-demangle.workspace = true
telemetry_events.workspace = true
text.workspace = true
time.workspace = true

View File

@ -156,6 +156,11 @@ spec:
secretKeyRef:
name: clickhouse
key: database
- name: SLACK_PANICS_WEBHOOK
valueFrom:
secretKeyRef:
name: slack
key: panics_webhook
- name: INVITE_LINK_PREFIX
value: ${INVITE_LINK_PREFIX}
- name: RUST_BACKTRACE

View File

@ -1,5 +1,7 @@
pub mod events;
pub mod extensions;
pub mod ips_file;
pub mod slack;
use crate::{
auth,
@ -21,7 +23,6 @@ use chrono::SecondsFormat;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tower::ServiceBuilder;
use tracing::instrument;
pub use extensions::fetch_extensions_from_blob_store_periodically;
@ -29,7 +30,6 @@ pub fn routes(rpc_server: Option<Arc<rpc::Server>>, state: Arc<AppState>) -> Rou
Router::new()
.route("/user", get(get_authenticated_user))
.route("/users/:id/access_tokens", post(create_access_token))
.route("/panic", post(trace_panic))
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
.route("/contributors", get(get_contributors).post(add_contributor))
.route("/contributor", get(check_is_contributor))
@ -120,20 +120,6 @@ struct CreateUserResponse {
metrics_id: String,
}
#[derive(Debug, Deserialize)]
struct Panic {
version: String,
release_channel: String,
backtrace_hash: String,
text: String,
}
#[instrument(skip(panic))]
async fn trace_panic(panic: Json<Panic>) -> Result<()> {
tracing::error!(version = %panic.version, release_channel = %panic.release_channel, backtrace_hash = %panic.backtrace_hash, text = %panic.text, "panic report");
Ok(())
}
async fn get_rpc_server_snapshot(
Extension(rpc_server): Extension<Option<Arc<rpc::Server>>>,
) -> Result<ErasedJson> {

View File

@ -1,21 +1,27 @@
use std::sync::{Arc, OnceLock};
use anyhow::{anyhow, Context};
use aws_sdk_s3::primitives::ByteStream;
use axum::{
body::Bytes, headers::Header, http::HeaderName, routing::post, Extension, Router, TypedHeader,
};
use hyper::StatusCode;
use hyper::{HeaderMap, StatusCode};
use serde::{Serialize, Serializer};
use sha2::{Digest, Sha256};
use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, CallEvent, CopilotEvent, CpuEvent, EditEvent,
EditorEvent, Event, EventRequestBody, EventWrapper, MemoryEvent, SettingEvent,
};
use util::SemanticVersion;
use crate::{AppState, Error, Result};
use crate::{api::slack, AppState, Error, Result};
use super::ips_file::IpsFile;
pub fn router() -> Router {
Router::new().route("/telemetry/events", post(post_events))
Router::new()
.route("/telemetry/events", post(post_events))
.route("/telemetry/crashes", post(post_crash))
}
pub struct ZedChecksumHeader(Vec<u8>);
@ -73,6 +79,140 @@ impl Header for CloudflareIpCountryHeader {
}
}
pub async fn post_crash(
Extension(app): Extension<Arc<AppState>>,
body: Bytes,
headers: HeaderMap,
) -> Result<()> {
static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports";
let report = IpsFile::parse(&body)?;
let version_threshold = SemanticVersion::new(0, 123, 0);
let bundle_id = &report.header.bundle_id;
let app_version = &report.app_version();
if bundle_id == "dev.zed.Zed-Dev" {
log::error!("Crash uploads from {} are ignored.", bundle_id);
return Ok(());
}
if app_version.is_none() || app_version.unwrap() < version_threshold {
log::error!(
"Crash uploads from {} are ignored.",
report.header.app_version
);
return Ok(());
}
let app_version = app_version.unwrap();
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
let response = blob_store_client
.head_object()
.bucket(CRASH_REPORTS_BUCKET)
.key(report.header.incident_id.clone() + ".ips")
.send()
.await;
if response.is_ok() {
log::info!("We've already uploaded this crash");
return Ok(());
}
blob_store_client
.put_object()
.bucket(CRASH_REPORTS_BUCKET)
.key(report.header.incident_id.clone() + ".ips")
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
.body(ByteStream::from(body.to_vec()))
.send()
.await
.map_err(|e| log::error!("Failed to upload crash: {}", e))
.ok();
}
let recent_panic_on: Option<i64> = headers
.get("x-zed-panicked-on")
.and_then(|h| h.to_str().ok())
.and_then(|s| s.parse().ok());
let mut recent_panic = None;
if let Some(recent_panic_on) = recent_panic_on {
let crashed_at = match report.timestamp() {
Ok(t) => Some(t),
Err(e) => {
log::error!("Can't parse {}: {}", report.header.timestamp, e);
None
}
};
if crashed_at.is_some_and(|t| (t.timestamp_millis() - recent_panic_on).abs() <= 30000) {
recent_panic = headers.get("x-zed-panic").and_then(|h| h.to_str().ok());
}
}
let description = report.description(recent_panic);
let summary = report.backtrace_summary();
tracing::error!(
service = "client",
version = %report.header.app_version,
os_version = %report.header.os_version,
bundle_id = %report.header.bundle_id,
incident_id = %report.header.incident_id,
description = %description,
backtrace = %summary,
"crash report");
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
let payload = slack::WebhookBody::new(|w| {
w.add_section(|s| s.text(slack::Text::markdown(description)))
.add_section(|s| {
s.add_field(slack::Text::markdown(format!(
"*Version:*\n{} ({})",
bundle_id, app_version
)))
.add_field({
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
hostname.strip_prefix("http://").unwrap_or_default()
});
slack::Text::markdown(format!(
"*Incident:*\n<https://{}.{}/{}.ips|{}…>",
CRASH_REPORTS_BUCKET,
hostname,
report.header.incident_id,
report
.header
.incident_id
.chars()
.take(8)
.collect::<String>(),
))
})
})
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(summary)))
});
let payload_json = serde_json::to_string(&payload).map_err(|err| {
log::error!("Failed to serialize payload to JSON: {err}");
Error::Internal(anyhow!(err))
})?;
reqwest::Client::new()
.post(slack_panics_webhook)
.header("Content-Type", "application/json")
.body(payload_json)
.send()
.await
.map_err(|err| {
log::error!("Failed to send payload to Slack: {err}");
Error::Internal(anyhow!(err))
})?;
}
Ok(())
}
pub async fn post_events(
Extension(app): Extension<Arc<AppState>>,
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
@ -98,7 +238,7 @@ pub async fn post_events(
summer.update(&body);
summer.update(checksum_seed);
if &checksum[..] != &summer.finalize()[..] {
if &checksum != &summer.finalize()[..] {
return Err(Error::Http(
StatusCode::BAD_REQUEST,
"invalid checksum".into(),

View File

@ -0,0 +1,352 @@
use collections::HashMap;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use util::SemanticVersion;
#[derive(Debug)]
pub struct IpsFile {
pub header: Header,
pub body: Body,
}
impl IpsFile {
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
let mut split = bytes.splitn(2, |&b| b == b'\n');
let header_bytes = split
.next()
.ok_or_else(|| anyhow::anyhow!("No header found"))?;
let header: Header = serde_json::from_slice(header_bytes)
.map_err(|e| anyhow::anyhow!("Failed to parse header: {}", e))?;
let body_bytes = split
.next()
.ok_or_else(|| anyhow::anyhow!("No body found"))?;
let body: Body = serde_json::from_slice(body_bytes)
.map_err(|e| anyhow::anyhow!("Failed to parse body: {}", e))?;
Ok(IpsFile { header, body })
}
pub fn faulting_thread(&self) -> Option<&Thread> {
self.body.threads.get(self.body.faulting_thread? as usize)
}
pub fn app_version(&self) -> Option<SemanticVersion> {
self.header.app_version.parse().ok()
}
pub fn timestamp(&self) -> anyhow::Result<chrono::DateTime<chrono::FixedOffset>> {
chrono::DateTime::parse_from_str(&self.header.timestamp, "%Y-%m-%d %H:%M:%S%.f %#z")
.map_err(|e| anyhow::anyhow!(e))
}
pub fn description(&self, panic: Option<&str>) -> String {
let mut desc = if self.body.termination.indicator == "Abort trap: 6" {
match panic {
Some(panic_message) => format!("Panic `{}`", panic_message).into(),
None => "Crash `Abort trap: 6` (possible panic)".into(),
}
} else if let Some(msg) = &self.body.exception.message {
format!("Exception `{}`", msg)
} else {
format!("Crash `{}`", self.body.termination.indicator)
};
if let Some(thread) = self.faulting_thread() {
if let Some(queue) = thread.queue.as_ref() {
desc += &format!(
" on thread {} ({})",
self.body.faulting_thread.unwrap_or_default(),
queue
);
} else {
desc += &format!(
" on thread {} ({})",
self.body.faulting_thread.unwrap_or_default(),
thread.name.clone().unwrap_or_default()
);
}
}
desc
}
pub fn backtrace_summary(&self) -> String {
if let Some(thread) = self.faulting_thread() {
let mut frames = thread
.frames
.iter()
.filter_map(|frame| {
if let Some(name) = &frame.symbol {
if self.is_ignorable_frame(name) {
return None;
}
Some(format!("{:#}", rustc_demangle::demangle(name)))
} else if let Some(image) = self.body.used_images.get(frame.image_index) {
Some(image.name.clone().unwrap_or("<unknown-image>".into()))
} else {
Some("<unknown>".into())
}
})
.collect::<Vec<_>>();
let total = frames.len();
if total > 21 {
frames = frames.into_iter().take(20).collect();
frames.push(format!(" and {} more...", total - 20))
}
frames.join("\n")
} else {
"<no backtrace available>".into()
}
}
fn is_ignorable_frame(&self, symbol: &String) -> bool {
[
"pthread_kill",
"panic",
"backtrace",
"rust_begin_unwind",
"abort",
]
.iter()
.any(|s| symbol.contains(s))
}
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default)]
pub struct Header {
pub app_name: String,
pub timestamp: String,
pub app_version: String,
pub slice_uuid: String,
pub build_version: String,
pub platform: i64,
#[serde(rename = "bundleID", default)]
pub bundle_id: String,
pub share_with_app_devs: i64,
pub is_first_party: i64,
pub bug_type: String,
pub os_version: String,
pub roots_installed: i64,
pub name: String,
pub incident_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Body {
pub uptime: i64,
pub proc_role: String,
pub version: i64,
#[serde(rename = "userID")]
pub user_id: i64,
pub deploy_version: i64,
pub model_code: String,
#[serde(rename = "coalitionID")]
pub coalition_id: i64,
pub os_version: OsVersion,
pub capture_time: String,
pub code_signing_monitor: i64,
pub incident: String,
pub pid: i64,
pub translated: bool,
pub cpu_type: String,
#[serde(rename = "roots_installed")]
pub roots_installed: i64,
#[serde(rename = "bug_type")]
pub bug_type: String,
pub proc_launch: String,
pub proc_start_abs_time: i64,
pub proc_exit_abs_time: i64,
pub proc_name: String,
pub proc_path: String,
pub bundle_info: BundleInfo,
pub store_info: StoreInfo,
pub parent_proc: String,
pub parent_pid: i64,
pub coalition_name: String,
pub crash_reporter_key: String,
#[serde(rename = "codeSigningID")]
pub code_signing_id: String,
#[serde(rename = "codeSigningTeamID")]
pub code_signing_team_id: String,
pub code_signing_flags: i64,
pub code_signing_validation_category: i64,
pub code_signing_trust_level: i64,
pub instruction_byte_stream: InstructionByteStream,
pub sip: String,
pub exception: Exception,
pub termination: Termination,
pub asi: Asi,
pub ext_mods: ExtMods,
pub faulting_thread: Option<i64>,
pub threads: Vec<Thread>,
pub used_images: Vec<UsedImage>,
pub shared_cache: SharedCache,
pub vm_summary: String,
pub legacy_info: LegacyInfo,
pub log_writing_signature: String,
pub trial_info: TrialInfo,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct OsVersion {
pub train: String,
pub build: String,
pub release_type: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct BundleInfo {
#[serde(rename = "CFBundleShortVersionString")]
pub cfbundle_short_version_string: String,
#[serde(rename = "CFBundleVersion")]
pub cfbundle_version: String,
#[serde(rename = "CFBundleIdentifier")]
pub cfbundle_identifier: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct StoreInfo {
pub device_identifier_for_vendor: String,
pub third_party: bool,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct InstructionByteStream {
#[serde(rename = "beforePC")]
pub before_pc: String,
#[serde(rename = "atPC")]
pub at_pc: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Exception {
pub codes: String,
pub raw_codes: Vec<i64>,
#[serde(rename = "type")]
pub type_field: String,
pub subtype: Option<String>,
pub signal: String,
pub port: Option<i64>,
pub guard_id: Option<i64>,
pub message: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Termination {
pub flags: i64,
pub code: i64,
pub namespace: String,
pub indicator: String,
pub by_proc: String,
pub by_pid: i64,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Asi {
#[serde(rename = "libsystem_c.dylib")]
pub libsystem_c_dylib: Vec<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct ExtMods {
pub caller: ExtMod,
pub system: ExtMod,
pub targeted: ExtMod,
pub warnings: i64,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct ExtMod {
#[serde(rename = "thread_create")]
pub thread_create: i64,
#[serde(rename = "thread_set_state")]
pub thread_set_state: i64,
#[serde(rename = "task_for_pid")]
pub task_for_pid: i64,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Thread {
pub thread_state: HashMap<String, Value>,
pub id: i64,
pub triggered: Option<bool>,
pub name: Option<String>,
pub queue: Option<String>,
pub frames: Vec<Frame>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Frame {
pub image_offset: i64,
pub symbol: Option<String>,
pub symbol_location: Option<i64>,
pub image_index: usize,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct UsedImage {
pub source: String,
pub arch: Option<String>,
pub base: i64,
#[serde(rename = "CFBundleShortVersionString")]
pub cfbundle_short_version_string: Option<String>,
#[serde(rename = "CFBundleIdentifier")]
pub cfbundle_identifier: Option<String>,
pub size: i64,
pub uuid: String,
pub path: Option<String>,
pub name: Option<String>,
#[serde(rename = "CFBundleVersion")]
pub cfbundle_version: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct SharedCache {
pub base: i64,
pub size: i64,
pub uuid: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct LegacyInfo {
pub thread_triggered: ThreadTriggered,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct ThreadTriggered {
pub name: String,
pub queue: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct TrialInfo {
pub rollouts: Vec<Rollout>,
pub experiments: Vec<Value>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", default)]
pub struct Rollout {
pub rollout_id: String,
pub factor_pack_ids: HashMap<String, Value>,
pub deployment_id: i64,
}

View File

@ -0,0 +1,144 @@
use serde::{Deserialize, Serialize};
/// https://api.slack.com/reference/messaging/payload
#[derive(Default, Clone, Serialize, Deserialize)]
pub struct WebhookBody {
text: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
blocks: Vec<Block>,
#[serde(skip_serializing_if = "Option::is_none")]
thread_ts: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
mrkdwn: Option<bool>,
}
impl WebhookBody {
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
f(Self::default())
}
pub fn add_section(mut self, build: impl FnOnce(Section) -> Section) -> Self {
self.blocks.push(Block::Section(build(Section::default())));
self
}
pub fn add_rich_text(mut self, build: impl FnOnce(RichText) -> RichText) -> Self {
self.blocks
.push(Block::RichText(build(RichText::default())));
self
}
}
#[derive(Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
/// https://api.slack.com/reference/block-kit/blocks
pub enum Block {
#[serde(rename = "section")]
Section(Section),
#[serde(rename = "rich_text")]
RichText(RichText),
// .... etc.
}
/// https://api.slack.com/reference/block-kit/blocks#section
#[derive(Default, Clone, Serialize, Deserialize)]
pub struct Section {
#[serde(skip_serializing_if = "Option::is_none")]
text: Option<Text>,
#[serde(skip_serializing_if = "Vec::is_empty")]
fields: Vec<Text>,
// fields, accessories...
}
impl Section {
pub fn text(mut self, text: Text) -> Self {
self.text = Some(text);
self
}
pub fn add_field(mut self, field: Text) -> Self {
self.fields.push(field);
self
}
}
/// https://api.slack.com/reference/block-kit/composition-objects#text
#[derive(Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum Text {
#[serde(rename = "plain_text")]
PlainText { text: String, emoji: bool },
#[serde(rename = "mrkdwn")]
Markdown { text: String, verbatim: bool },
}
impl Text {
pub fn plain(s: String) -> Self {
Self::PlainText {
text: s,
emoji: true,
}
}
pub fn markdown(s: String) -> Self {
Self::Markdown {
text: s,
verbatim: false,
}
}
}
#[derive(Default, Clone, Serialize, Deserialize)]
pub struct RichText {
elements: Vec<RichTextObject>,
}
impl RichText {
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
f(Self::default())
}
pub fn add_preformatted(
mut self,
build: impl FnOnce(RichTextPreformatted) -> RichTextPreformatted,
) -> Self {
self.elements.push(RichTextObject::Preformatted(build(
RichTextPreformatted::default(),
)));
self
}
}
/// https://api.slack.com/reference/block-kit/blocks#rich_text
#[derive(Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum RichTextObject {
#[serde(rename = "rich_text_preformatted")]
Preformatted(RichTextPreformatted),
// etc.
}
/// https://api.slack.com/reference/block-kit/blocks#rich_text_preformatted
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct RichTextPreformatted {
#[serde(skip_serializing_if = "Vec::is_empty")]
elements: Vec<RichTextElement>,
#[serde(skip_serializing_if = "Option::is_none")]
border: Option<u8>,
}
impl RichTextPreformatted {
pub fn add_text(mut self, text: String) -> Self {
self.elements.push(RichTextElement::Text { text });
self
}
}
/// https://api.slack.com/reference/block-kit/blocks#element-types
#[derive(Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum RichTextElement {
#[serde(rename = "text")]
Text { text: String },
// etc.
}

View File

@ -127,6 +127,7 @@ pub struct Config {
pub blob_store_bucket: Option<String>,
pub zed_environment: Arc<str>,
pub zed_client_checksum_seed: Option<String>,
pub slack_panics_webhook: Option<String>,
}
impl Config {

View File

@ -507,6 +507,7 @@ impl TestServer {
clickhouse_password: None,
clickhouse_database: None,
zed_client_checksum_seed: None,
slack_panics_webhook: None,
},
})
}

View File

@ -807,7 +807,7 @@ async fn upload_previous_crashes(
.unwrap_or("zed-2024-01-17-221900.ips".to_string()); // don't upload old crash reports from before we had this.
let mut uploaded = last_uploaded.clone();
let crash_report_url = http.build_url("/api/crash");
let crash_report_url = http.build_zed_api_url("/telemetry/crashes");
for dir in [&*CRASHES_DIR, &*CRASHES_RETIRED_DIR] {
let mut children = smol::fs::read_dir(&dir).await?;

View File

@ -1,4 +1,4 @@
#!/bin/bash
set -e
cargo run --quiet --package=collab --features seed-support --bin seed -- $@
cargo run --quiet --package=collab --bin seed -- $@