Migrate from scrypt to sha256. (#8969)

This reduces the server time to compute the hash from 40ms to 5µs,
which should remove this as a noticable chunk of CPU time in production.

(An attacker who has access to our database will now need only 10^54
years of CPU time instead of 10^58 to brute force a token).

Release Notes:

- Improved sign in latency by 40ms.
This commit is contained in:
Conrad Irwin 2024-03-06 20:51:43 -07:00 committed by GitHub
parent 4d2156e2ad
commit 75a42c27db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 197 additions and 27 deletions

2
Cargo.lock generated
View File

@ -2223,6 +2223,7 @@ dependencies = [
"aws-sdk-s3", "aws-sdk-s3",
"axum", "axum",
"axum-extra", "axum-extra",
"base64 0.13.1",
"call", "call",
"channel", "channel",
"chrono", "chrono",
@ -2272,6 +2273,7 @@ dependencies = [
"settings", "settings",
"sha2 0.10.7", "sha2 0.10.7",
"sqlx", "sqlx",
"subtle",
"telemetry_events", "telemetry_events",
"text", "text",
"theme", "theme",

View File

@ -105,6 +105,7 @@ assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" } assistant = { path = "crates/assistant" }
audio = { path = "crates/audio" } audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" } auto_update = { path = "crates/auto_update" }
base64 = "0.13"
breadcrumbs = { path = "crates/breadcrumbs" } breadcrumbs = { path = "crates/breadcrumbs" }
call = { path = "crates/call" } call = { path = "crates/call" }
channel = { path = "crates/channel" } channel = { path = "crates/channel" }
@ -252,6 +253,7 @@ shellexpand = "2.1.0"
smallvec = { version = "1.6", features = ["union"] } smallvec = { version = "1.6", features = ["union"] }
smol = "1.2" smol = "1.2"
strum = { version = "0.25.0", features = ["derive"] } strum = { version = "0.25.0", features = ["derive"] }
subtle = "2.5.0"
sysinfo = "0.29.10" sysinfo = "0.29.10"
tempfile = "3.9.0" tempfile = "3.9.0"
thiserror = "1.0.29" thiserror = "1.0.29"

View File

@ -23,6 +23,7 @@ aws-config = { version = "1.1.5" }
aws-sdk-s3 = { version = "1.15.0" } aws-sdk-s3 = { version = "1.15.0" }
axum = { version = "0.6", features = ["json", "headers", "ws"] } axum = { version = "0.6", features = ["json", "headers", "ws"] }
axum-extra = { version = "0.4", features = ["erased-json"] } axum-extra = { version = "0.4", features = ["erased-json"] }
base64.workspace = true
chrono.workspace = true chrono.workspace = true
clock.workspace = true clock.workspace = true
clickhouse.workspace = true clickhouse.workspace = true
@ -48,6 +49,7 @@ serde_derive.workspace = true
serde_json.workspace = true serde_json.workspace = true
sha2.workspace = true sha2.workspace = true
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] } sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
subtle.workspace = true
rustc-demangle.workspace = true rustc-demangle.workspace = true
telemetry_events.workspace = true telemetry_events.workspace = true
text.workspace = true text.workspace = true

View File

@ -9,14 +9,15 @@ use axum::{
response::IntoResponse, response::IntoResponse,
}; };
use prometheus::{exponential_buckets, register_histogram, Histogram}; use prometheus::{exponential_buckets, register_histogram, Histogram};
use rand::thread_rng;
use scrypt::{ use scrypt::{
password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString}, password_hash::{PasswordHash, PasswordVerifier},
Scrypt, Scrypt,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::Digest;
use std::sync::OnceLock; use std::sync::OnceLock;
use std::{sync::Arc, time::Instant}; use std::{sync::Arc, time::Instant};
use subtle::ConstantTimeEq;
#[derive(Clone, Debug, Default, PartialEq, Eq)] #[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct Impersonator(pub Option<db::User>); pub struct Impersonator(pub Option<db::User>);
@ -115,8 +116,7 @@ pub async fn create_access_token(
) -> Result<String> { ) -> Result<String> {
const VERSION: usize = 1; const VERSION: usize = 1;
let access_token = rpc::auth::random_token(); let access_token = rpc::auth::random_token();
let access_token_hash = let access_token_hash = hash_access_token(&access_token);
hash_access_token(&access_token).context("failed to hash access token")?;
let id = db let id = db
.create_access_token( .create_access_token(
user_id, user_id,
@ -132,23 +132,15 @@ pub async fn create_access_token(
})?) })?)
} }
fn hash_access_token(token: &str) -> Result<String> { /// Hashing prevents anyone with access to the database being able to login.
// Avoid slow hashing in debug mode. /// As the token is randomly generated, we don't need to worry about scrypt-style
let params = if cfg!(debug_assertions) { /// protection.
scrypt::Params::new(1, 1, 1).unwrap() fn hash_access_token(token: &str) -> String {
} else { let digest = sha2::Sha256::digest(token);
scrypt::Params::new(14, 8, 1).unwrap() format!(
}; "$sha256${}",
base64::encode_config(digest, base64::URL_SAFE)
Ok(Scrypt
.hash_password(
token.as_bytes(),
None,
params,
&SaltString::generate(thread_rng()),
) )
.map_err(anyhow::Error::new)?
.to_string())
} }
/// Encrypts the given access token with the given public key to avoid leaking it on the way /// Encrypts the given access token with the given public key to avoid leaking it on the way
@ -190,15 +182,27 @@ pub async fn verify_access_token(
if token_user_id != user_id { if token_user_id != user_id {
return Err(anyhow!("no such access token"))?; return Err(anyhow!("no such access token"))?;
} }
let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?;
let t0 = Instant::now(); let t0 = Instant::now();
let is_valid = Scrypt
let is_valid = if db_token.hash.starts_with("$scrypt$") {
let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?;
Scrypt
.verify_password(token.token.as_bytes(), &db_hash) .verify_password(token.token.as_bytes(), &db_hash)
.is_ok(); .is_ok()
} else {
let token_hash = hash_access_token(&token.token);
db_token.hash.as_bytes().ct_eq(token_hash.as_ref()).into()
};
let duration = t0.elapsed(); let duration = t0.elapsed();
log::info!("hashed access token in {:?}", duration); log::info!("hashed access token in {:?}", duration);
metric_access_token_hashing_time.observe(duration.as_millis() as f64); metric_access_token_hashing_time.observe(duration.as_millis() as f64);
if is_valid && db_token.hash.starts_with("$scrypt$") {
let new_hash = hash_access_token(&token.token);
db.update_access_token_hash(db_token.id, &new_hash).await?;
}
Ok(VerifyAccessTokenResult { Ok(VerifyAccessTokenResult {
is_valid, is_valid,
impersonator_id: if db_token.impersonated_user_id.is_some() { impersonator_id: if db_token.impersonated_user_id.is_some() {
@ -208,3 +212,145 @@ pub async fn verify_access_token(
}, },
}) })
} }
#[cfg(test)]
mod test {
use rand::thread_rng;
use scrypt::password_hash::{PasswordHasher, SaltString};
use sea_orm::EntityTrait;
use super::*;
use crate::db::{access_token, NewUserParams};
#[gpui::test]
async fn test_verify_access_token(cx: &mut gpui::TestAppContext) {
let test_db = crate::db::TestDb::postgres(cx.executor().clone());
let db = test_db.db();
let user = db
.create_user(
"example@example.com",
false,
NewUserParams {
github_login: "example".into(),
github_user_id: 1,
},
)
.await
.unwrap();
let token = create_access_token(&db, user.user_id, None).await.unwrap();
assert!(matches!(
verify_access_token(&token, user.user_id, &db)
.await
.unwrap(),
VerifyAccessTokenResult {
is_valid: true,
impersonator_id: None,
}
));
let old_token = create_previous_access_token(user.user_id, None, &db)
.await
.unwrap();
let old_token_id = serde_json::from_str::<AccessTokenJson>(&old_token)
.unwrap()
.id;
let hash = db
.transaction(|tx| async move {
Ok(access_token::Entity::find_by_id(old_token_id)
.one(&*tx)
.await?)
})
.await
.unwrap()
.unwrap()
.hash;
assert!(hash.starts_with("$scrypt$"));
assert!(matches!(
verify_access_token(&old_token, user.user_id, &db)
.await
.unwrap(),
VerifyAccessTokenResult {
is_valid: true,
impersonator_id: None,
}
));
let hash = db
.transaction(|tx| async move {
Ok(access_token::Entity::find_by_id(old_token_id)
.one(&*tx)
.await?)
})
.await
.unwrap()
.unwrap()
.hash;
assert!(hash.starts_with("$sha256$"));
assert!(matches!(
verify_access_token(&old_token, user.user_id, &db)
.await
.unwrap(),
VerifyAccessTokenResult {
is_valid: true,
impersonator_id: None,
}
));
assert!(matches!(
verify_access_token(&token, user.user_id, &db)
.await
.unwrap(),
VerifyAccessTokenResult {
is_valid: true,
impersonator_id: None,
}
));
}
async fn create_previous_access_token(
user_id: UserId,
impersonated_user_id: Option<UserId>,
db: &Database,
) -> Result<String> {
let access_token = rpc::auth::random_token();
let access_token_hash = previous_hash_access_token(&access_token)?;
let id = db
.create_access_token(
user_id,
impersonated_user_id,
&access_token_hash,
MAX_ACCESS_TOKENS_TO_STORE,
)
.await?;
Ok(serde_json::to_string(&AccessTokenJson {
version: 1,
id,
token: access_token,
})?)
}
fn previous_hash_access_token(token: &str) -> Result<String> {
// Avoid slow hashing in debug mode.
let params = if cfg!(debug_assertions) {
scrypt::Params::new(1, 1, 1).unwrap()
} else {
scrypt::Params::new(14, 8, 1).unwrap()
};
Ok(Scrypt
.hash_password(
token.as_bytes(),
None,
params,
&SaltString::generate(thread_rng()),
)
.map_err(anyhow::Error::new)?
.to_string())
}
}

View File

@ -55,4 +55,22 @@ impl Database {
}) })
.await .await
} }
/// Retrieves the access token with the given ID.
pub async fn update_access_token_hash(
&self,
id: AccessTokenId,
new_hash: &str,
) -> Result<access_token::Model> {
self.transaction(|tx| async move {
Ok(access_token::Entity::update(access_token::ActiveModel {
id: ActiveValue::unchanged(id),
hash: ActiveValue::set(new_hash.into()),
..Default::default()
})
.exec(&*tx)
.await?)
})
.await
}
} }

View File

@ -19,7 +19,7 @@ test-support = ["collections/test-support", "gpui/test-support"]
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
async-tungstenite = "0.16" async-tungstenite = "0.16"
base64 = "0.13" base64.workspace = true
collections.workspace = true collections.workspace = true
futures.workspace = true futures.workspace = true
gpui = { workspace = true, optional = true } gpui = { workspace = true, optional = true }