repo_factory: teach it to open long_running_requests_queue

Summary: This way implementing MegarepoApi is more convenient.

Reviewed By: krallin

Differential Revision: D28355487

fbshipit-source-id: e7643e854ee46fe6cb9c4a882f6c677bf4e77262
This commit is contained in:
Kostia Balytskyi 2021-05-12 05:18:23 -07:00 committed by Facebook GitHub Bot
parent 8a158fae83
commit 501246bbd4
4 changed files with 20 additions and 0 deletions

View File

@ -9,6 +9,7 @@ license = "GPLv2+"
anyhow = "1.0"
async-trait = "0.1.45"
context = { version = "0.1.0", path = "../../server/context" }
facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
sql = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
sql_construct = { version = "0.1.0", path = "../../common/sql_construct" }

View File

@ -26,6 +26,7 @@ pub use crate::types::{
/// and later checks if the result is ready.
/// Another service handles the processing and
/// state updates for individual requests.
#[facet::facet]
#[async_trait]
pub trait LongRunningRequestsQueue: Send + Sync {
/// Schedule an execution of a request, given

View File

@ -38,6 +38,7 @@ redactedblobstore = { version = "0.1.0", path = "../blobstore/redactedblobstore"
repo_blobstore = { version = "0.1.0", path = "../blobrepo/repo_blobstore" }
repo_derived_data = { version = "0.1.0", path = "../repo_attributes/repo_derived_data" }
repo_identity = { version = "0.1.0", path = "../repo_attributes/repo_identity" }
requests_table = { version = "0.1.0", path = "../megarepo_api/requests_table" }
scuba_ext = { version = "0.1.0", path = "../common/scuba_ext" }
segmented_changelog = { version = "0.1.0", path = "../segmented_changelog" }
segmented_changelog_types = { version = "0.1.0", path = "../segmented_changelog/types" }

View File

@ -59,6 +59,7 @@ use redactedblobstore::{RedactedMetadata, SqlRedactedContentStore};
use repo_blobstore::{ArcRepoBlobstore, RepoBlobstoreArgs};
use repo_derived_data::{ArcRepoDerivedData, RepoDerivedData};
use repo_identity::{ArcRepoIdentity, RepoIdentity};
use requests_table::{ArcLongRunningRequestsQueue, SqlLongRunningRequestsQueue};
use scuba_ext::MononokeScubaSampleBuilder;
use segmented_changelog::{new_server_segmented_changelog, SegmentedChangelogSqlConnections};
use segmented_changelog_types::ArcSegmentedChangelog;
@ -349,6 +350,9 @@ pub enum RepoFactoryError {
#[error("Missing cache pool: {0}")]
MissingCachePool(String),
#[error("Error opening long-running request queue")]
LongRunningRequestsQueue,
}
#[facet::factory(name: String, config: RepoConfig)]
@ -483,6 +487,19 @@ impl RepoFactory {
Ok(Arc::new(bonsai_git_mapping))
}
pub async fn long_running_requests_queue(
&self,
repo_config: &ArcRepoConfig,
) -> Result<ArcLongRunningRequestsQueue> {
let sql_factory = self
.sql_factory(&repo_config.storage_config.metadata)
.await?;
let long_running_requests_queue = sql_factory
.open::<SqlLongRunningRequestsQueue>()
.context(RepoFactoryError::LongRunningRequestsQueue)?;
Ok(Arc::new(long_running_requests_queue))
}
pub async fn bonsai_globalrev_mapping(
&self,
repo_config: &ArcRepoConfig,