mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Move project event logic to telemetry.rs (#13166)
I previously put this logic directly into `project.rs`, but it doesn't feel good to pollute that code with telemetry logic, so I've moved it over to `telemetry.rs`. Release Notes: - N/A
This commit is contained in:
parent
71cc95d315
commit
ca035dbdd8
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2300,6 +2300,7 @@ dependencies = [
|
||||
"url",
|
||||
"util",
|
||||
"windows 0.57.0",
|
||||
"worktree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -51,6 +51,7 @@ time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
worktree.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
clock = { workspace = true, features = ["test-support"] }
|
||||
|
@ -3,6 +3,7 @@ mod event_coalescer;
|
||||
use crate::{ChannelId, TelemetrySettings};
|
||||
use chrono::{DateTime, Utc};
|
||||
use clock::SystemClock;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::Future;
|
||||
use gpui::{AppContext, BackgroundExecutor, Task};
|
||||
use http::{self, HttpClient, HttpClientWithUrl, Method};
|
||||
@ -23,6 +24,7 @@ use tempfile::NamedTempFile;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use util::ResultExt;
|
||||
use util::TryFutureExt;
|
||||
use worktree::{UpdatedEntriesSet, WorktreeId};
|
||||
|
||||
use self::event_coalescer::EventCoalescer;
|
||||
|
||||
@ -47,12 +49,31 @@ struct TelemetryState {
|
||||
first_event_date_time: Option<DateTime<Utc>>,
|
||||
event_coalescer: EventCoalescer,
|
||||
max_queue_size: usize,
|
||||
worktree_id_map: WorktreeIdMap,
|
||||
|
||||
os_name: String,
|
||||
app_version: String,
|
||||
os_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct WorktreeIdMap(HashMap<String, ProjectCache>);
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectCache {
|
||||
name: String,
|
||||
worktree_ids_reported: HashSet<WorktreeId>,
|
||||
}
|
||||
|
||||
impl ProjectCache {
|
||||
fn new(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
worktree_ids_reported: HashSet::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const MAX_QUEUE_LEN: usize = 5;
|
||||
|
||||
@ -180,6 +201,16 @@ impl Telemetry {
|
||||
first_event_date_time: None,
|
||||
event_coalescer: EventCoalescer::new(clock.clone()),
|
||||
max_queue_size: MAX_QUEUE_LEN,
|
||||
worktree_id_map: WorktreeIdMap(HashMap::from_iter([
|
||||
(
|
||||
"yarn.lock".to_string(),
|
||||
ProjectCache::new("yarn".to_string()),
|
||||
),
|
||||
(
|
||||
"package.json".to_string(),
|
||||
ProjectCache::new("node".to_string()),
|
||||
),
|
||||
])),
|
||||
|
||||
os_version: None,
|
||||
os_name: os_name(),
|
||||
@ -450,6 +481,52 @@ impl Telemetry {
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_discovered_project_events(
|
||||
self: &Arc<Self>,
|
||||
worktree_id: WorktreeId,
|
||||
updated_entries_set: &UpdatedEntriesSet,
|
||||
) {
|
||||
let project_names: Vec<String> = {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.worktree_id_map
|
||||
.0
|
||||
.iter_mut()
|
||||
.filter_map(|(project_file_name, project_type_telemetry)| {
|
||||
if project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.contains(&worktree_id)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let project_file_found = updated_entries_set.iter().any(|(path, _, _)| {
|
||||
path.as_ref()
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name_str| name_str == project_file_name)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
if !project_file_found {
|
||||
return None;
|
||||
}
|
||||
|
||||
project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.insert(worktree_id);
|
||||
|
||||
Some(project_type_telemetry.name.clone())
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
// Done on purpose to avoid calling `self.state.lock()` multiple times
|
||||
for project_name in project_names {
|
||||
self.report_app_event(format!("open {} project", project_name));
|
||||
}
|
||||
}
|
||||
|
||||
fn report_event(self: &Arc<Self>, event: Event) {
|
||||
let mut state = self.state.lock();
|
||||
|
||||
|
@ -231,40 +231,6 @@ pub struct Project {
|
||||
hosted_project_id: Option<ProjectId>,
|
||||
dev_server_project_id: Option<client::DevServerProjectId>,
|
||||
search_history: SearchHistory,
|
||||
telemetry_worktree_id_map: TelemetryWorktreeIdMap,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TelemetryWorktreeIdMap(HashMap<String, ProjectTypeTelemetry>);
|
||||
|
||||
impl Default for TelemetryWorktreeIdMap {
|
||||
fn default() -> Self {
|
||||
Self(HashMap::from_iter([
|
||||
(
|
||||
"yarn.lock".to_string(),
|
||||
ProjectTypeTelemetry::new("yarn".to_string()),
|
||||
),
|
||||
(
|
||||
"package.json".to_string(),
|
||||
ProjectTypeTelemetry::new("node".to_string()),
|
||||
),
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectTypeTelemetry {
|
||||
name: String,
|
||||
worktree_ids_reported: HashSet<WorktreeId>,
|
||||
}
|
||||
|
||||
impl ProjectTypeTelemetry {
|
||||
fn new(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
worktree_ids_reported: HashSet::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum LanguageServerToQuery {
|
||||
@ -812,7 +778,6 @@ impl Project {
|
||||
hosted_project_id: None,
|
||||
dev_server_project_id: None,
|
||||
search_history: Self::new_search_history(),
|
||||
telemetry_worktree_id_map: TelemetryWorktreeIdMap::default(),
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -977,7 +942,6 @@ impl Project {
|
||||
.dev_server_project_id
|
||||
.map(|dev_server_project_id| DevServerProjectId(dev_server_project_id)),
|
||||
search_history: Self::new_search_history(),
|
||||
telemetry_worktree_id_map: TelemetryWorktreeIdMap::default(),
|
||||
};
|
||||
this.set_role(role, cx);
|
||||
for worktree in worktrees {
|
||||
@ -7798,7 +7762,10 @@ impl Project {
|
||||
changes.clone(),
|
||||
));
|
||||
|
||||
this.report_project_events(&worktree, changes, cx);
|
||||
let worktree_id = worktree.update(cx, |worktree, _| worktree.id());
|
||||
this.client()
|
||||
.telemetry()
|
||||
.report_discovered_project_events(worktree_id, changes);
|
||||
}
|
||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||
if is_local {
|
||||
@ -7851,48 +7818,6 @@ impl Project {
|
||||
self.metadata_changed(cx);
|
||||
}
|
||||
|
||||
fn report_project_events(
|
||||
&mut self,
|
||||
worktree: &Model<Worktree>,
|
||||
updated_entries_set: &UpdatedEntriesSet,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let worktree_id = worktree.update(cx, |worktree, _| worktree.id());
|
||||
|
||||
let client = self.client();
|
||||
|
||||
for (project_file_name, project_type_telemetry) in
|
||||
self.telemetry_worktree_id_map.0.iter_mut()
|
||||
{
|
||||
if project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.contains(&worktree_id)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let project_file_found = updated_entries_set.iter().any(|(path, _, _)| {
|
||||
path.as_ref()
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name_str| name_str == project_file_name)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
if !project_file_found {
|
||||
continue;
|
||||
}
|
||||
|
||||
client
|
||||
.telemetry()
|
||||
.report_app_event(format!("open {} project", project_type_telemetry.name));
|
||||
|
||||
project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.insert(worktree_id);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_local_worktree_buffers(
|
||||
&mut self,
|
||||
worktree_handle: &Model<Worktree>,
|
||||
|
Loading…
Reference in New Issue
Block a user