CHANGE LOCK TO NOT BE DROPPED INSTANTLY. DANG U RUST

co-authored-by: kay@zed.dev
This commit is contained in:
Mikayla Maki 2022-12-02 12:43:02 -08:00
parent 5e240f98f0
commit 5262e8c77e
6 changed files with 39 additions and 29 deletions

View File

@ -27,5 +27,5 @@ smol = "1.2"
[dev-dependencies] [dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
tempdir = { version = "0.3.7" }
env_logger = "0.9.1" env_logger = "0.9.1"
tempdir = { version = "0.3.7" }

View File

@ -58,7 +58,6 @@ pub async fn open_db<M: Migrator + 'static>(wipe_db: bool, db_dir: &Path, releas
let mut db_wiped = DB_WIPED.write(); let mut db_wiped = DB_WIPED.write();
if !*db_wiped { if !*db_wiped {
remove_dir_all(&main_db_dir).ok(); remove_dir_all(&main_db_dir).ok();
*db_wiped = true; *db_wiped = true;
} }
} }
@ -71,7 +70,7 @@ pub async fn open_db<M: Migrator + 'static>(wipe_db: bool, db_dir: &Path, releas
// cause errors in the log and so should be observed by developers while writing // cause errors in the log and so should be observed by developers while writing
// soon-to-be good migrations. If user databases are corrupted, we toss them out // soon-to-be good migrations. If user databases are corrupted, we toss them out
// and try again from a blank. As long as running all migrations from start to end // and try again from a blank. As long as running all migrations from start to end
// is ok, this race condition will never be triggered. // on a blank database is ok, this race condition will never be triggered.
// //
// Basically: Don't ever push invalid migrations to stable or everyone will have // Basically: Don't ever push invalid migrations to stable or everyone will have
// a bad time. // a bad time.
@ -137,7 +136,7 @@ pub async fn open_db<M: Migrator + 'static>(wipe_db: bool, db_dir: &Path, releas
} }
async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> { async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> {
println!("Opening main db"); log::info!("Opening main db");
ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true) ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
.with_db_initialization_query(DB_INITIALIZE_QUERY) .with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
@ -147,7 +146,7 @@ async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnec
} }
async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> { async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
println!("Opening fallback db"); log::info!("Opening fallback db");
ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false) ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false)
.with_db_initialization_query(DB_INITIALIZE_QUERY) .with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)

View File

@ -266,12 +266,10 @@ pub fn background_thread_queue() -> WriteQueueConstructor {
pub fn locking_queue() -> WriteQueueConstructor { pub fn locking_queue() -> WriteQueueConstructor {
Box::new(|| { Box::new(|| {
let mutex = Mutex::new(()); let write_mutex = Mutex::new(());
Box::new(move |queued_write| { Box::new(move |queued_write| {
eprintln!("Write started"); let _lock = write_mutex.lock();
let _ = mutex.lock();
queued_write(); queued_write();
eprintln!("Write finished");
}) })
}) })
} }

View File

@ -10,9 +10,37 @@ lazy_static::lazy_static! {
#[proc_macro] #[proc_macro]
pub fn sql(tokens: TokenStream) -> TokenStream { pub fn sql(tokens: TokenStream) -> TokenStream {
let (spans, sql) = make_sql(tokens);
let error = SQLITE.sql_has_syntax_error(sql.trim());
let formatted_sql = sqlformat::format(&sql, &sqlformat::QueryParams::None, Default::default());
if let Some((error, error_offset)) = error {
create_error(spans, error_offset, error, &formatted_sql)
} else {
format!("r#\"{}\"#", &formatted_sql).parse().unwrap()
}
}
fn create_error(
spans: Vec<(usize, Span)>,
error_offset: usize,
error: String,
formatted_sql: &String,
) -> TokenStream {
let error_span = spans
.into_iter()
.skip_while(|(offset, _)| offset <= &error_offset)
.map(|(_, span)| span)
.next()
.unwrap_or(Span::call_site());
let error_text = format!("Sql Error: {}\nFor Query: {}", error, formatted_sql);
TokenStream::from(Error::new(error_span.into(), error_text).into_compile_error())
}
fn make_sql(tokens: TokenStream) -> (Vec<(usize, Span)>, String) {
let mut sql_tokens = vec![]; let mut sql_tokens = vec![];
flatten_stream(tokens.clone(), &mut sql_tokens); flatten_stream(tokens.clone(), &mut sql_tokens);
// Lookup of spans by offset at the end of the token // Lookup of spans by offset at the end of the token
let mut spans: Vec<(usize, Span)> = Vec::new(); let mut spans: Vec<(usize, Span)> = Vec::new();
let mut sql = String::new(); let mut sql = String::new();
@ -20,23 +48,7 @@ pub fn sql(tokens: TokenStream) -> TokenStream {
sql.push_str(&token_text); sql.push_str(&token_text);
spans.push((sql.len(), span)); spans.push((sql.len(), span));
} }
(spans, sql)
let error = SQLITE.sql_has_syntax_error(sql.trim());
let formatted_sql = sqlformat::format(&sql, &sqlformat::QueryParams::None, Default::default());
if let Some((error, error_offset)) = error {
let error_span = spans
.into_iter()
.skip_while(|(offset, _)| offset <= &error_offset)
.map(|(_, span)| span)
.next()
.unwrap_or(Span::call_site());
let error_text = format!("Sql Error: {}\nFor Query: {}", error, formatted_sql);
TokenStream::from(Error::new(error_span.into(), error_text).into_compile_error())
} else {
format!("r#\"{}\"#", &formatted_sql).parse().unwrap()
}
} }
/// This method exists to normalize the representation of groups /// This method exists to normalize the representation of groups

View File

@ -54,3 +54,4 @@ gpui = { path = "../gpui", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] } project = { path = "../project", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] }
db = { path = "../db", features = ["test-support"] }