refactor actors to simple struct

Summary: While I was working on `actix-srserver`, I realized the current design of the API server is quite unnecessary. The "MononokeActor" and "MononokeRepoActor" are only returning futures without much CPU computation cost. So it don't need to be placed in a separate thread.

Reviewed By: jsgf

Differential Revision: D9472848

fbshipit-source-id: 618ec39c42d90717fa6985fee7d6308420962d3f
This commit is contained in:
Arun Kulshreshtha 2018-08-31 13:53:54 -07:00 committed by Facebook Github Bot
parent ed34b17e1a
commit 2dc93d6a5f
8 changed files with 96 additions and 142 deletions

View File

@ -11,10 +11,8 @@ mod response;
use std::collections::HashMap; use std::collections::HashMap;
use actix::{Actor, Addr, Context, Handler}; use futures::IntoFuture;
use actix::dev::Request; use futures_ext::{BoxFuture, FutureExt};
use failure::Error;
use futures::{Future, IntoFuture};
use slog::Logger; use slog::Logger;
use tokio::runtime::TaskExecutor; use tokio::runtime::TaskExecutor;
@ -23,64 +21,38 @@ use metaconfig::repoconfig::RepoConfigs;
use errors::ErrorKind; use errors::ErrorKind;
pub use self::query::{MononokeQuery, MononokeRepoQuery}; pub use self::query::{MononokeQuery, MononokeRepoQuery};
pub use self::repo::MononokeRepoActor; pub use self::repo::MononokeRepo;
pub use self::response::MononokeRepoResponse; pub use self::response::MononokeRepoResponse;
pub struct MononokeActor { pub struct Mononoke {
repos: HashMap<String, Addr<MononokeRepoActor>>, repos: HashMap<String, MononokeRepo>,
} }
impl MononokeActor { impl Mononoke {
pub fn new(logger: Logger, config: RepoConfigs, executor: TaskExecutor) -> Self { pub fn new(logger: Logger, config: RepoConfigs, executor: TaskExecutor) -> Self {
let logger = logger.clone(); let logger = logger.clone();
let repos = config let repos = config
.repos .repos
.into_iter() .into_iter()
.filter(move |&(_, ref config)| config.enabled) .filter(move |&(_, ref config)| config.enabled)
.map(move |(reponame, config)| { .map(move |(name, config)| {
cloned!(logger, executor); cloned!(logger, executor);
( let repo =
reponame, MononokeRepo::new(logger, config, executor).expect("Unable to initialize repo");
MononokeRepoActor::create(move |_| { (name, repo)
MononokeRepoActor::new(logger, config, executor)
.expect("Unable to initialize repo")
}),
)
}) })
.collect(); .collect();
Self { repos } Self { repos }
} }
}
impl Actor for MononokeActor { pub fn send_query(
type Context = Context<Self>; &self,
}
impl Handler<MononokeQuery> for MononokeActor {
type Result = Result<Request<MononokeRepoActor, MononokeRepoQuery>, Error>;
fn handle(
&mut self,
MononokeQuery { repo, kind, .. }: MononokeQuery, MononokeQuery { repo, kind, .. }: MononokeQuery,
_ctx: &mut Context<Self>, ) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
) -> Self::Result {
match self.repos.get(&repo) { match self.repos.get(&repo) {
Some(repo) => Ok(repo.send(kind)), Some(repo) => repo.send_query(kind),
None => Err(ErrorKind::NotFound(repo, None).into()), None => Err(ErrorKind::NotFound(repo, None)).into_future().boxify(),
} }
} }
} }
pub fn unwrap_request(
request: Request<MononokeActor, MononokeQuery>,
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
request
.into_future()
.from_err()
.and_then(|result| result) // use flatten here will blind the compiler.
.and_then(|result| result.map_err(From::from))
.flatten()
.flatten()
.from_err()
}

View File

@ -6,15 +6,10 @@
use std::convert::TryFrom; use std::convert::TryFrom;
use actix::Message;
use actix::dev::Request;
use failure::Error; use failure::Error;
use futures_ext::BoxFuture;
use apiserver_thrift::types::MononokeGetRawParams; use apiserver_thrift::types::MononokeGetRawParams;
use super::{MononokeRepoActor, MononokeRepoResponse};
#[derive(Debug)] #[derive(Debug)]
pub enum MononokeRepoQuery { pub enum MononokeRepoQuery {
GetRawFile { GetRawFile {
@ -40,19 +35,11 @@ pub enum MononokeRepoQuery {
}, },
} }
impl Message for MononokeRepoQuery {
type Result = Result<BoxFuture<MononokeRepoResponse, Error>, Error>;
}
pub struct MononokeQuery { pub struct MononokeQuery {
pub kind: MononokeRepoQuery, pub kind: MononokeRepoQuery,
pub repo: String, pub repo: String,
} }
impl Message for MononokeQuery {
type Result = Result<Request<MononokeRepoActor, MononokeRepoQuery>, Error>;
}
impl TryFrom<MononokeGetRawParams> for MononokeQuery { impl TryFrom<MononokeGetRawParams> for MononokeQuery {
type Error = Error; type Error = Error;

View File

@ -7,8 +7,7 @@
use std::convert::TryInto; use std::convert::TryInto;
use std::sync::Arc; use std::sync::Arc;
use actix::{Actor, Context, Handler}; use failure::{err_msg, Error};
use failure::{err_msg, Error, Result};
use futures::{Future, IntoFuture}; use futures::{Future, IntoFuture};
use futures::sync::oneshot; use futures::sync::oneshot;
use futures_ext::BoxFuture; use futures_ext::BoxFuture;
@ -31,14 +30,14 @@ use from_string as FS;
use super::{MononokeRepoQuery, MononokeRepoResponse}; use super::{MononokeRepoQuery, MononokeRepoResponse};
use super::model::Entry; use super::model::Entry;
pub struct MononokeRepoActor { pub struct MononokeRepo {
repo: Arc<BlobRepo>, repo: Arc<BlobRepo>,
logger: Logger, logger: Logger,
executor: TaskExecutor, executor: TaskExecutor,
} }
impl MononokeRepoActor { impl MononokeRepo {
pub fn new(logger: Logger, config: RepoConfig, executor: TaskExecutor) -> Result<Self> { pub fn new(logger: Logger, config: RepoConfig, executor: TaskExecutor) -> Result<Self, Error> {
let repoid = RepositoryId::new(config.repoid); let repoid = RepositoryId::new(config.repoid);
let repo = match config.repotype { let repo = match config.repotype {
BlobRocks(ref path) => BlobRepo::new_rocksdb(logger.clone(), &path, repoid), BlobRocks(ref path) => BlobRepo::new_rocksdb(logger.clone(), &path, repoid),
@ -57,17 +56,17 @@ impl MononokeRepoActor {
&self, &self,
changeset: String, changeset: String,
path: String, path: String,
) -> Result<BoxFuture<MononokeRepoResponse, Error>> { ) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
debug!( debug!(
self.logger, self.logger,
"Retrieving file content of {} at changeset {}.", path, changeset "Retrieving file content of {} at changeset {}.", path, changeset
); );
let mpath = FS::get_mpath(path.clone())?; let mpath = try_boxfuture!(FS::get_mpath(path.clone()));
let changesetid = FS::get_changeset_id(changeset)?; let changesetid = try_boxfuture!(FS::get_changeset_id(changeset));
let repo = self.repo.clone(); let repo = self.repo.clone();
Ok(api::get_content_by_path(repo, changesetid, Some(mpath)) api::get_content_by_path(repo, changesetid, Some(mpath))
.and_then(move |content| match content { .and_then(move |content| match content {
Content::File(content) Content::File(content)
| Content::Executable(content) | Content::Executable(content)
@ -77,14 +76,14 @@ impl MononokeRepoActor {
_ => Err(ErrorKind::InvalidInput(path.to_string(), None).into()), _ => Err(ErrorKind::InvalidInput(path.to_string(), None).into()),
}) })
.from_err() .from_err()
.boxify()) .boxify()
} }
fn is_ancestor( fn is_ancestor(
&self, &self,
proposed_ancestor: String, proposed_ancestor: String,
proposed_descendent: String, proposed_descendent: String,
) -> Result<BoxFuture<MononokeRepoResponse, Error>> { ) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
let genbfs = GenerationNumberBFS::new(); let genbfs = GenerationNumberBFS::new();
let src_hash_maybe = FS::get_nodehash(&proposed_descendent); let src_hash_maybe = FS::get_nodehash(&proposed_descendent);
let dst_hash_maybe = FS::get_nodehash(&proposed_ancestor); let dst_hash_maybe = FS::get_nodehash(&proposed_ancestor);
@ -103,28 +102,27 @@ impl MononokeRepoActor {
} }
}); });
let (tx, rx) = oneshot::channel::<Result<bool>>(); let (tx, rx) = oneshot::channel::<Result<bool, ErrorKind>>();
self.executor.spawn( self.executor.spawn(
src_hash_future src_hash_future
.and_then(|src| dst_hash_future.map(move |dst| (src, dst))) .and_then(|src| dst_hash_future.map(move |dst| (src, dst)))
.and_then({ .and_then({
cloned!(self.repo); cloned!(self.repo);
move |(src, dst)| genbfs.query_reachability(repo, src, dst) move |(src, dst)| genbfs.query_reachability(repo, src, dst).from_err()
}) })
.then(|r| tx.send(r).map_err(|_| ())), .then(|r| tx.send(r).map_err(|_| ())),
); );
Ok(rx.flatten() rx.flatten()
.map(|answer| MononokeRepoResponse::IsAncestor { answer }) .map(|answer| MononokeRepoResponse::IsAncestor { answer })
.from_err() .boxify()
.boxify())
} }
fn get_blob_content(&self, hash: String) -> Result<BoxFuture<MononokeRepoResponse, Error>> { fn get_blob_content(&self, hash: String) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
let blobhash = FS::get_nodehash(&hash)?; let blobhash = try_boxfuture!(FS::get_nodehash(&hash));
Ok(self.repo self.repo
.get_file_content(&blobhash) .get_file_content(&blobhash)
.and_then(move |content| match content { .and_then(move |content| match content {
FileContents::Bytes(content) => { FileContents::Bytes(content) => {
@ -132,23 +130,23 @@ impl MononokeRepoActor {
} }
}) })
.from_err() .from_err()
.boxify()) .boxify()
} }
fn list_directory( fn list_directory(
&self, &self,
changeset: String, changeset: String,
path: String, path: String,
) -> Result<BoxFuture<MononokeRepoResponse, Error>> { ) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
let mpath = if path.is_empty() { let mpath = if path.is_empty() {
None None
} else { } else {
Some(FS::get_mpath(path.clone())?) Some(try_boxfuture!(FS::get_mpath(path.clone())))
}; };
let changesetid = FS::get_changeset_id(changeset)?; let changesetid = try_boxfuture!(FS::get_changeset_id(changeset));
let repo = self.repo.clone(); let repo = self.repo.clone();
Ok(api::get_content_by_path(repo, changesetid, mpath) api::get_content_by_path(repo, changesetid, mpath)
.and_then(move |content| match content { .and_then(move |content| match content {
Content::Tree(tree) => Ok(tree), Content::Tree(tree) => Ok(tree),
_ => Err(ErrorKind::InvalidInput(path.to_string(), None).into()), _ => Err(ErrorKind::InvalidInput(path.to_string(), None).into()),
@ -161,13 +159,13 @@ impl MononokeRepoActor {
files: Box::new(files), files: Box::new(files),
}) })
.from_err() .from_err()
.boxify()) .boxify()
} }
fn get_tree(&self, hash: String) -> Result<BoxFuture<MononokeRepoResponse, Error>> { fn get_tree(&self, hash: String) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
let treehash = FS::get_nodehash(&hash)?; let treehash = try_boxfuture!(FS::get_nodehash(&hash));
Ok(self.repo self.repo
.get_manifest_by_nodeid(&treehash) .get_manifest_by_nodeid(&treehash)
.map(|tree| { .map(|tree| {
tree.list() tree.list()
@ -177,29 +175,21 @@ impl MononokeRepoActor {
files: Box::new(files), files: Box::new(files),
}) })
.from_err() .from_err()
.boxify()) .boxify()
} }
fn get_changeset(&self, hash: String) -> Result<BoxFuture<MononokeRepoResponse, Error>> { fn get_changeset(&self, hash: String) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
let changesetid = FS::get_changeset_id(hash)?; let changesetid = try_boxfuture!(FS::get_changeset_id(hash));
Ok(self.repo self.repo
.get_changeset_by_changesetid(&changesetid) .get_changeset_by_changesetid(&changesetid)
.and_then(|changeset| changeset.try_into().map_err(From::from)) .and_then(|changeset| changeset.try_into().map_err(From::from))
.map(|changeset| MononokeRepoResponse::GetChangeset { changeset }) .map(|changeset| MononokeRepoResponse::GetChangeset { changeset })
.from_err() .from_err()
.boxify()) .boxify()
} }
}
impl Actor for MononokeRepoActor { pub fn send_query(&self, msg: MononokeRepoQuery) -> BoxFuture<MononokeRepoResponse, ErrorKind> {
type Context = Context<Self>;
}
impl Handler<MononokeRepoQuery> for MononokeRepoActor {
type Result = Result<BoxFuture<MononokeRepoResponse, Error>>;
fn handle(&mut self, msg: MononokeRepoQuery, _ctx: &mut Context<Self>) -> Self::Result {
use MononokeRepoQuery::*; use MononokeRepoQuery::*;
match msg { match msg {

View File

@ -11,10 +11,10 @@ use actix_web::HttpResponse;
use actix_web::error::ResponseError; use actix_web::error::ResponseError;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use failure::{Context, Error, Fail}; use failure::{Context, Error, Fail};
use futures::Canceled;
use apiserver_thrift::types::{MononokeAPIException, MononokeAPIExceptionKind};
use api::errors::ErrorKind as ApiError; use api::errors::ErrorKind as ApiError;
use apiserver_thrift::types::{MononokeAPIException, MononokeAPIExceptionKind};
use blobrepo::ErrorKind as BlobRepoError; use blobrepo::ErrorKind as BlobRepoError;
use reachabilityindex::errors::ErrorKind as ReachabilityIndexError; use reachabilityindex::errors::ErrorKind as ReachabilityIndexError;
@ -105,6 +105,13 @@ impl From<Error> for ErrorKind {
} }
} }
impl From<Canceled> for ErrorKind {
fn from(e: Canceled) -> ErrorKind {
let error = Error::from_boxed_compat(Box::new(e));
ErrorKind::InternalError(error)
}
}
impl From<MailboxError> for ErrorKind { impl From<MailboxError> for ErrorKind {
fn from(e: MailboxError) -> ErrorKind { fn from(e: MailboxError) -> ErrorKind {
ErrorKind::InternalError(e.into()) ErrorKind::InternalError(e.into())

View File

@ -10,7 +10,6 @@ use std::convert::TryFrom;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use failure::{Error, Result};
use futures::{Future, IntoFuture}; use futures::{Future, IntoFuture};
use futures_ext::{BoxFuture, FutureExt}; use futures_ext::{BoxFuture, FutureExt};
@ -22,23 +21,21 @@ use mononoke_types::MPath;
use errors::ErrorKind; use errors::ErrorKind;
pub fn get_mpath(path: String) -> Result<MPath> { pub fn get_mpath(path: String) -> Result<MPath, ErrorKind> {
MPath::try_from(&*path).map_err(|e| ErrorKind::InvalidInput(path, Some(e)).into()) MPath::try_from(&*path).map_err(|e| ErrorKind::InvalidInput(path, Some(e)))
} }
pub fn get_changeset_id(changesetid: String) -> Result<HgChangesetId> { pub fn get_changeset_id(changesetid: String) -> Result<HgChangesetId, ErrorKind> {
HgChangesetId::from_str(&changesetid) HgChangesetId::from_str(&changesetid).map_err(|e| ErrorKind::InvalidInput(changesetid, Some(e)))
.map_err(|e| ErrorKind::InvalidInput(changesetid, Some(e)).into())
} }
pub fn get_bookmark(bookmark: String) -> Result<Bookmark> { pub fn get_bookmark(bookmark: String) -> Result<Bookmark, ErrorKind> {
Bookmark::new(bookmark.clone()) Bookmark::new(bookmark.clone())
.map_err(|e| ErrorKind::InvalidInput(bookmark.to_string(), Some(e)).into()) .map_err(|e| ErrorKind::InvalidInput(bookmark.to_string(), Some(e)))
} }
pub fn get_nodehash(hash: &str) -> Result<HgNodeHash> { pub fn get_nodehash(hash: &str) -> Result<HgNodeHash, ErrorKind> {
HgNodeHash::from_str(hash) HgNodeHash::from_str(hash).map_err(|e| ErrorKind::InvalidInput(hash.to_string(), Some(e)))
.map_err(|e| ErrorKind::InvalidInput(hash.to_string(), Some(e)).into())
} }
// interpret a string as a bookmark and find the corresponding changeset id. // interpret a string as a bookmark and find the corresponding changeset id.
@ -47,10 +44,10 @@ pub fn get_nodehash(hash: &str) -> Result<HgNodeHash> {
pub fn string_to_bookmark_changeset_id( pub fn string_to_bookmark_changeset_id(
node_string: String, node_string: String,
repo: Arc<BlobRepo>, repo: Arc<BlobRepo>,
) -> BoxFuture<HgChangesetId, Error> { ) -> BoxFuture<HgChangesetId, ErrorKind> {
get_bookmark(node_string.clone()) get_bookmark(node_string.clone())
.into_future() .into_future()
.and_then({ move |bookmark| api::get_changeset_by_bookmark(repo, bookmark).from_err() }) .and_then({ move |bookmark| api::get_changeset_by_bookmark(repo, bookmark).from_err() })
.map_err(move |e| ErrorKind::InvalidInput(node_string.to_string(), Some(e)).into()) .map_err(move |e| ErrorKind::InvalidInput(node_string.to_string(), Some(e.into())))
.boxify() .boxify()
} }

View File

@ -23,6 +23,7 @@ extern crate cmdlib;
extern crate failure_ext as failure; extern crate failure_ext as failure;
extern crate fb303; extern crate fb303;
extern crate futures; extern crate futures;
#[macro_use]
extern crate futures_ext; extern crate futures_ext;
extern crate mercurial_types; extern crate mercurial_types;
extern crate metaconfig; extern crate metaconfig;
@ -59,8 +60,8 @@ mod thrift;
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc;
use actix::{Actor, Addr};
use actix_web::{http, server, App, HttpRequest, HttpResponse, State}; use actix_web::{http, server, App, HttpRequest, HttpResponse, State};
use blobrepo::BlobRepo; use blobrepo::BlobRepo;
use bookmarks::Bookmark; use bookmarks::Bookmark;
@ -79,7 +80,7 @@ use mercurial_types::nodehash::HgChangesetId;
use metaconfig::RepoConfigs; use metaconfig::RepoConfigs;
use scuba_ext::ScubaSampleBuilder; use scuba_ext::ScubaSampleBuilder;
use actor::{unwrap_request, MononokeActor, MononokeQuery, MononokeRepoQuery, MononokeRepoResponse}; use actor::{Mononoke, MononokeQuery, MononokeRepoQuery, MononokeRepoResponse};
use errors::ErrorKind; use errors::ErrorKind;
mod config { mod config {
@ -114,13 +115,13 @@ struct HashQueryInfo {
fn get_raw_file( fn get_raw_file(
(state, info): (State<HttpServerState>, actix_web::Path<QueryInfo>), (state, info): (State<HttpServerState>, actix_web::Path<QueryInfo>),
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> { ) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::GetRawFile { kind: MononokeRepoQuery::GetRawFile {
changeset: info.changeset.clone(), changeset: info.changeset.clone(),
path: info.path.clone(), path: info.path.clone(),
}, },
})) })
} }
fn is_ancestor( fn is_ancestor(
@ -132,58 +133,58 @@ fn is_ancestor(
let proposed_descendent_parsed = percent_decode(info.proposed_descendent.as_bytes()) let proposed_descendent_parsed = percent_decode(info.proposed_descendent.as_bytes())
.decode_utf8_lossy() .decode_utf8_lossy()
.to_string(); .to_string();
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::IsAncestor { kind: MononokeRepoQuery::IsAncestor {
proposed_ancestor: proposed_ancestor_parsed, proposed_ancestor: proposed_ancestor_parsed,
proposed_descendent: proposed_descendent_parsed, proposed_descendent: proposed_descendent_parsed,
}, },
})) })
} }
fn list_directory( fn list_directory(
(state, info): (State<HttpServerState>, actix_web::Path<QueryInfo>), (state, info): (State<HttpServerState>, actix_web::Path<QueryInfo>),
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> { ) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::ListDirectory { kind: MononokeRepoQuery::ListDirectory {
changeset: info.changeset.clone(), changeset: info.changeset.clone(),
path: info.path.clone(), path: info.path.clone(),
}, },
})) })
} }
fn get_blob_content( fn get_blob_content(
(state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>), (state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>),
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> { ) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::GetBlobContent { kind: MononokeRepoQuery::GetBlobContent {
hash: info.hash.clone(), hash: info.hash.clone(),
}, },
})) })
} }
fn get_tree( fn get_tree(
(state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>), (state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>),
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> { ) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::GetTree { kind: MononokeRepoQuery::GetTree {
hash: info.hash.clone(), hash: info.hash.clone(),
}, },
})) })
} }
fn get_changeset( fn get_changeset(
(state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>), (state, info): (State<HttpServerState>, actix_web::Path<HashQueryInfo>),
) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> { ) -> impl Future<Item = MononokeRepoResponse, Error = ErrorKind> {
unwrap_request(state.mononoke.send(MononokeQuery { state.mononoke.send_query(MononokeQuery {
repo: info.repo.clone(), repo: info.repo.clone(),
kind: MononokeRepoQuery::GetChangeset { kind: MononokeRepoQuery::GetChangeset {
hash: info.hash.clone(), hash: info.hash.clone(),
}, },
})) })
} }
fn setup_logger(debug: bool) -> Logger { fn setup_logger(debug: bool) -> Logger {
@ -238,7 +239,7 @@ fn create_config<P: AsRef<Path>>(
#[derive(Clone)] #[derive(Clone)]
struct HttpServerState { struct HttpServerState {
mononoke: Addr<MononokeActor>, mononoke: Arc<Mononoke>,
logger: Logger, logger: Logger,
} }
@ -382,17 +383,15 @@ fn main() -> Result<()> {
let use_ssl = ssl_acceptor.is_some(); let use_ssl = ssl_acceptor.is_some();
let sys = actix::System::new("mononoke-apiserver"); let sys = actix::System::new("mononoke-apiserver");
let executor = runtime.executor(); let executor = runtime.executor();
let mononoke = Mononoke::new(mononoke_logger.clone(), repo_configs, executor);
let addr = MononokeActor::create(move |_| { let mononoke = Arc::new(mononoke);
MononokeActor::new(mononoke_logger.clone(), repo_configs, executor)
});
if let Ok(port) = thrift_port { if let Ok(port) = thrift_port {
thrift::make_thrift(thrift_logger, host.to_string(), port, addr.clone()); thrift::make_thrift(thrift_logger, host.to_string(), port, mononoke.clone());
} }
let state = HttpServerState { let state = HttpServerState {
mononoke: addr, mononoke,
logger: actix_logger.clone(), logger: actix_logger.clone(),
}; };

View File

@ -8,7 +8,9 @@ mod dispatcher;
mod fb303; mod fb303;
mod mononoke; mod mononoke;
use actix::{Addr, Arbiter}; use std::sync::Arc;
use actix::Arbiter;
use slog::Logger; use slog::Logger;
use apiserver_thrift::server::make_MononokeAPIService_server; use apiserver_thrift::server::make_MononokeAPIService_server;
@ -18,9 +20,9 @@ use srserver::ThriftServerBuilder;
use self::dispatcher::ThriftDispatcher; use self::dispatcher::ThriftDispatcher;
use self::fb303::FacebookServiceImpl; use self::fb303::FacebookServiceImpl;
use self::mononoke::MononokeAPIServiceImpl; use self::mononoke::MononokeAPIServiceImpl;
use super::actor::MononokeActor; use super::actor::Mononoke;
pub fn make_thrift(logger: Logger, host: String, port: i32, addr: Addr<MononokeActor>) { pub fn make_thrift(logger: Logger, host: String, port: i32, addr: Arc<Mononoke>) {
let dispatcher = ThriftDispatcher(Arbiter::new("thrift-worker")); let dispatcher = ThriftDispatcher(Arbiter::new("thrift-worker"));
dispatcher.start({ dispatcher.start({

View File

@ -5,8 +5,8 @@
// GNU General Public License version 2 or any later version. // GNU General Public License version 2 or any later version.
use std::convert::TryInto; use std::convert::TryInto;
use std::sync::Arc;
use actix::Addr;
use futures::{Future, IntoFuture}; use futures::{Future, IntoFuture};
use futures_ext::{BoxFuture, FutureExt}; use futures_ext::{BoxFuture, FutureExt};
use slog::Logger; use slog::Logger;
@ -15,16 +15,16 @@ use apiserver_thrift::server::MononokeApiservice;
use apiserver_thrift::services::mononoke_apiservice::GetRawExn; use apiserver_thrift::services::mononoke_apiservice::GetRawExn;
use apiserver_thrift::types::MononokeGetRawParams; use apiserver_thrift::types::MononokeGetRawParams;
use super::super::actor::{unwrap_request, MononokeActor, MononokeRepoResponse}; use super::super::actor::{Mononoke, MononokeRepoResponse};
#[derive(Clone)] #[derive(Clone)]
pub struct MononokeAPIServiceImpl { pub struct MononokeAPIServiceImpl {
addr: Addr<MononokeActor>, addr: Arc<Mononoke>,
logger: Logger, logger: Logger,
} }
impl MononokeAPIServiceImpl { impl MononokeAPIServiceImpl {
pub fn new(addr: Addr<MononokeActor>, logger: Logger) -> Self { pub fn new(addr: Arc<Mononoke>, logger: Logger) -> Self {
Self { addr, logger } Self { addr, logger }
} }
} }
@ -37,7 +37,7 @@ impl MononokeApiservice for MononokeAPIServiceImpl {
.from_err() .from_err()
.and_then({ .and_then({
cloned!(self.addr); cloned!(self.addr);
move |param| unwrap_request(addr.send(param)) move |param| addr.send_query(param)
}) })
.and_then(|resp: MononokeRepoResponse| match resp { .and_then(|resp: MononokeRepoResponse| match resp {
MononokeRepoResponse::GetRawFile { content } => Ok(content.to_vec()), MononokeRepoResponse::GetRawFile { content } => Ok(content.to_vec()),