derived_data: make derivation options a parameter to derive_from_parents

Summary:
Currently, data derivation for types that have options (currrently unode
version and blame filesize limit) take the value of the option from the
repository configuration.

This is a side-effect, and means it's not possible to have data derivation
types with different configs active in the same repository (e.g. to
server unodes v1 while backfilling unodes v2).  To have data derivation
with different options, e.g. in tests, we must use `repo.dangerous_override`.

The first step to resolve this is to make the data derivation options a parameter.
Depending on the type of derived data, these options are passed into
`derive_from_parents` so that the right kind of derivation can happen.

The mapping is responsible for storing the options and providing it at the time
of derivation.  In this diff it just gets it from the repository config, the same
as was done previously.  In a future diff we will change this so that there
can be multiple configurations.

Reviewed By: krallin

Differential Revision: D25371967

fbshipit-source-id: 1cf4c06a4598fccbfa93367fc1f1c2fa00fd8235
This commit is contained in:
Mark Juggurnauth-Thomas 2020-12-14 09:22:57 -08:00 committed by Facebook GitHub Bot
parent 64be2e8d87
commit 05225bcf90
18 changed files with 170 additions and 41 deletions

View File

@ -8,7 +8,7 @@
use crate::error::SubcommandError;
use anyhow::{format_err, Error};
use blame::{fetch_blame, fetch_file_full_content};
use blame::{fetch_blame, fetch_file_full_content, BlameRoot};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
@ -20,7 +20,7 @@ use cmdlib::{
helpers,
};
use context::CoreContext;
use derived_data::BonsaiDerived;
use derived_data::{BonsaiDerived, BonsaiDerivedMapping};
use fbinit::FacebookInit;
use futures::{
compat::Future01CompatExt,
@ -301,9 +301,10 @@ fn diff(
old: FileUnodeId,
) -> impl Future<Item = String, Error = Error> {
async move {
let f1 = fetch_file_full_content(&ctx, &repo, new)
let options = BlameRoot::default_mapping(&ctx, &repo)?.options();
let f1 = fetch_file_full_content(&ctx, &repo, new, options)
.and_then(|result| ready(result.map_err(Error::from)));
let f2 = fetch_file_full_content(&ctx, &repo, old)
let f2 = fetch_file_full_content(&ctx, &repo, old, options)
.and_then(|result| ready(result.map_err(Error::from)));
try_join(f1, f2).await
}
@ -341,10 +342,14 @@ fn subcommand_compute_blame(
line_number: bool,
) -> impl Future<Item = (), Error = Error> {
let blobstore = repo.get_blobstore().boxed();
find_leaf(ctx.clone(), repo.clone(), csid, path.clone())
BlameRoot::default_mapping(&ctx, &repo)
.into_future()
.from_err()
.join(find_leaf(ctx.clone(), repo.clone(), csid, path.clone()))
.and_then({
cloned!(ctx, repo);
move |file_unode_id| {
move |(blame_mapping, file_unode_id)| {
let blame_options = blame_mapping.options();
bounded_traversal_dag(
256,
(file_unode_id, path),
@ -414,12 +419,19 @@ fn subcommand_compute_blame(
{
cloned!(ctx, repo);
async move {
fetch_file_full_content(&ctx, &repo, file_unode_id).await
fetch_file_full_content(
&ctx,
&repo,
file_unode_id,
blame_options,
)
.await
}
}
.boxed()
.compat()
.and_then(move |content| match content {
.and_then(move |content| {
match content {
Err(rejected) => Ok(Err(rejected)),
Ok(content) => {
let parents = parents
@ -434,7 +446,8 @@ fn subcommand_compute_blame(
)
.map(move |blame| Ok((content, blame)))
}
})
}
})
}
},
)

View File

@ -11,7 +11,7 @@ use blobrepo::BlobRepo;
use blobstore::Loadable;
use context::CoreContext;
use deleted_files_manifest::RootDeletedManifestId;
use derived_data::{BonsaiDerivable, BonsaiDerived};
use derived_data::{BonsaiDerivable, BonsaiDerived, BonsaiDerivedMapping};
use fastlog::{fetch_parent_root_unodes, RootFastlog};
use fsnodes::{prefetch_content_metadata, RootFsnodeId};
use futures::{
@ -161,17 +161,18 @@ async fn prefetch_content(
let repo = &repo;
let blobstore = repo.blobstore();
let file_unode = file_unode_id.load(ctx, blobstore).await?;
let options = BlameRoot::default_mapping(ctx, repo)?.options();
let parents_content: Vec<_> = file_unode
.parents()
.iter()
.cloned()
.chain(rename)
.map(|file_unode_id| fetch_file_full_content(ctx, repo, file_unode_id))
.map(|file_unode_id| fetch_file_full_content(ctx, repo, file_unode_id, options))
.collect();
// the assignment is needed to avoid unused_must_use warnings
let _ = future::try_join(
fetch_file_full_content(ctx, repo, file_unode_id),
fetch_file_full_content(ctx, repo, file_unode_id, options),
future::try_join_all(parents_content),
)
.await?;

View File

@ -37,17 +37,33 @@ impl From<ChangesetId> for BlameRoot {
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct BlameDeriveOptions {
filesize_limit: u64,
}
impl Default for BlameDeriveOptions {
fn default() -> Self {
BlameDeriveOptions {
filesize_limit: BLAME_FILESIZE_LIMIT,
}
}
}
#[async_trait]
impl BonsaiDerivable for BlameRoot {
const NAME: &'static str = "blame";
type Options = BlameDeriveOptions;
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
_parents: Vec<Self>,
options: &Self::Options,
) -> Result<Self, Error> {
let blame_options = *options;
let csid = bonsai.get_changeset_id();
let root_manifest = RootUnodeManifestId::derive(&ctx, &repo, csid)
.map_ok(|root_id| root_id.manifest_unode_id().clone());
@ -78,7 +94,8 @@ impl BonsaiDerivable for BlameRoot {
let (path, file) = v?;
Result::<_>::Ok(
tokio::spawn(async move {
create_blame(&ctx, &repo, renames, csid, path, file).await
create_blame(&ctx, &repo, renames, csid, path, file, blame_options)
.await
})
.await??,
)
@ -95,6 +112,7 @@ impl BonsaiDerivable for BlameRoot {
#[derive(Clone)]
pub struct BlameRootMapping {
blobstore: Arc<dyn Blobstore>,
options: BlameDeriveOptions,
}
#[async_trait]
@ -102,8 +120,15 @@ impl BlobstoreExistsMapping for BlameRootMapping {
type Value = BlameRoot;
fn new(repo: &BlobRepo) -> Result<Self> {
let options = BlameDeriveOptions {
filesize_limit: repo
.get_derived_data_config()
.override_blame_filesize_limit
.unwrap_or(BLAME_FILESIZE_LIMIT),
};
Ok(Self {
blobstore: repo.get_blobstore().boxed(),
options,
})
}
@ -114,6 +139,10 @@ impl BlobstoreExistsMapping for BlameRootMapping {
fn prefix(&self) -> &'static str {
"derived_rootblame.v1."
}
fn options(&self) -> BlameDeriveOptions {
self.options
}
}
impl_bonsai_derived_mapping!(BlameRootMapping, BlobstoreExistsMapping, BlameRoot);
@ -125,6 +154,7 @@ async fn create_blame(
csid: ChangesetId,
path: MPath,
file_unode_id: FileUnodeId,
options: BlameDeriveOptions,
) -> Result<BlameId, Error> {
let blobstore = repo.blobstore();
@ -137,14 +167,14 @@ async fn create_blame(
.chain(renames.get(&path).cloned())
.map(|file_unode_id| {
future::try_join(
fetch_file_full_content(ctx, repo, file_unode_id),
fetch_file_full_content(ctx, repo, file_unode_id, options),
async move { BlameId::from(file_unode_id).load(ctx, blobstore).await }.err_into(),
)
})
.collect();
let (content, parents_content) = future::try_join(
fetch_file_full_content(ctx, repo, file_unode_id),
fetch_file_full_content(ctx, repo, file_unode_id, options),
future::try_join_all(parents_content_and_blame),
)
.await?;
@ -169,6 +199,7 @@ pub async fn fetch_file_full_content(
ctx: &CoreContext,
repo: &BlobRepo,
file_unode_id: FileUnodeId,
options: BlameDeriveOptions,
) -> Result<Result<Bytes, BlameRejected>, Error> {
let blobstore = repo.blobstore();
let file_unode = file_unode_id
@ -177,7 +208,7 @@ pub async fn fetch_file_full_content(
.await?;
let content_id = *file_unode.content_id();
let result = fetch_from_filestore(ctx, repo, content_id).await;
let result = fetch_from_filestore(ctx, repo, content_id, options).await;
match result {
Err(FetchError::Error(error)) => Err(error),
@ -206,6 +237,7 @@ async fn fetch_from_filestore(
ctx: &CoreContext,
repo: &BlobRepo,
content_id: ContentId,
options: BlameDeriveOptions,
) -> Result<Bytes, FetchError> {
let result = filestore::fetch_with_size(
repo.get_blobstore(),
@ -221,11 +253,7 @@ async fn fetch_from_filestore(
Err(error)
}
Some((stream, size)) => {
let config = repo.get_derived_data_config();
let filesize_limit = config
.override_blame_filesize_limit
.unwrap_or(BLAME_FILESIZE_LIMIT);
if size > filesize_limit {
if size > options.filesize_limit {
return Err(FetchError::Rejected(BlameRejected::TooBig));
}
let v = Vec::with_capacity(size as usize);

View File

@ -19,7 +19,7 @@ use blobrepo::BlobRepo;
use blobstore::{Loadable, LoadableError};
use bytes::Bytes;
use context::CoreContext;
use derived_data::{BonsaiDerived, DeriveError};
use derived_data::{BonsaiDerived, BonsaiDerivedMapping, DeriveError};
use manifest::ManifestOps;
use mononoke_types::{
blame::{Blame, BlameId, BlameMaybeRejected, BlameRejected},
@ -65,7 +65,10 @@ pub async fn fetch_blame(
}
}
};
let content = derived::fetch_file_full_content(ctx, repo, blame_id.into())
let mapping = BlameRoot::default_mapping(ctx, repo)?;
// TODO(mbthomas): remove file content fetching - the caller can fetch the
// content if they want it.
let content = derived::fetch_file_full_content(ctx, repo, blame_id.into(), mapping.options())
.await
.map_err(BlameError::Error)?
.map_err(BlameError::Rejected)?;

View File

@ -21,12 +21,14 @@ use crate::ChangesetInfo;
impl BonsaiDerivable for ChangesetInfo {
const NAME: &'static str = "changeset_info";
type Options = ();
async fn derive_from_parents(
_ctx: CoreContext,
_repo: BlobRepo,
bonsai: BonsaiChangeset,
_parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let csid = bonsai.get_changeset_id();
Ok(ChangesetInfo::new(csid, bonsai))
@ -55,6 +57,8 @@ impl BlobstoreRootIdMapping for ChangesetInfoMapping {
fn prefix(&self) -> &'static str {
"changeset_info.blake2."
}
fn options(&self) {}
}
impl_bonsai_derived_mapping!(ChangesetInfoMapping, BlobstoreRootIdMapping, ChangesetInfo);

View File

@ -50,12 +50,14 @@ impl From<RootDeletedManifestId> for BlobstoreBytes {
impl BonsaiDerivable for RootDeletedManifestId {
const NAME: &'static str = "deleted_manifest";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let bcs_id = bonsai.get_changeset_id();
let changes = get_changes(&ctx, &repo, bonsai).await?;
@ -96,6 +98,8 @@ impl BlobstoreRootIdMapping for RootDeletedManifestMapping {
fn prefix(&self) -> &'static str {
"derived_root_deleted_manifest."
}
fn options(&self) {}
}
impl_bonsai_derived_mapping!(

View File

@ -56,12 +56,14 @@ impl From<ChangesetId> for RootFastlog {
impl BonsaiDerivable for RootFastlog {
const NAME: &'static str = "fastlog";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
_parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let bcs_id = bonsai.get_changeset_id();
let (root_unode_mf_id, parents) = future::try_join(
@ -155,6 +157,8 @@ impl BlobstoreExistsMapping for RootFastlogMapping {
fn prefix(&self) -> &'static str {
"derived_rootfastlog."
}
fn options(&self) {}
}
impl_bonsai_derived_mapping!(RootFastlogMapping, BlobstoreExistsMapping, RootFastlog);

View File

@ -106,12 +106,14 @@ pub enum FilenodesOnlyPublic {
impl BonsaiDerivable for FilenodesOnlyPublic {
const NAME: &'static str = "filenodes";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
_parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let filenodes = generate_all_filenodes(&ctx, &repo, bonsai.get_changeset_id()).await?;
@ -390,6 +392,8 @@ impl BonsaiDerivedMapping for FilenodesOnlyPublicMapping {
None => Ok(()),
}
}
fn options(&self) {}
}
async fn fetch_root_filenode(

View File

@ -65,12 +65,14 @@ impl From<RootFsnodeId> for BlobstoreBytes {
impl BonsaiDerivable for RootFsnodeId {
const NAME: &'static str = "fsnodes";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let fsnode_id = derive_fsnode(
&ctx,
@ -134,6 +136,8 @@ impl BlobstoreRootIdMapping for RootFsnodeMapping {
fn prefix(&self) -> &'static str {
"derived_root_fsnode."
}
fn options(&self) {}
}
impl_bonsai_derived_mapping!(RootFsnodeMapping, BlobstoreRootIdMapping, RootFsnodeId);

View File

@ -25,12 +25,14 @@ pub struct MappedHgChangesetId(pub HgChangesetId);
impl BonsaiDerivable for MappedHgChangesetId {
const NAME: &'static str = "hgchangesets";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
crate::derive_hg_changeset::derive_from_parents(ctx, repo, bonsai, parents).await
}
@ -85,6 +87,8 @@ impl BonsaiDerivedMapping for HgChangesetIdMapping {
.await?;
Ok(())
}
fn options(&self) {}
}
#[async_trait]

View File

@ -67,12 +67,14 @@ impl From<RootSkeletonManifestId> for BlobstoreBytes {
impl BonsaiDerivable for RootSkeletonManifestId {
const NAME: &'static str = "skeleton_manifests";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let skeleton_manifest_id = derive_skeleton_manifest(
&ctx,
@ -136,6 +138,8 @@ impl BlobstoreRootIdMapping for RootSkeletonManifestMapping {
fn prefix(&self) -> &'static str {
"derived_root_skeletonmanifest."
}
fn options(&self) {}
}
impl_bonsai_derived_mapping!(

View File

@ -368,21 +368,27 @@ where
let ctx = ctx.clone_and_reset();
let deriver = async {
let derived =
Derivable::derive_from_parents(ctx.clone(), repo.clone(), bcs, parents)
.boxed()
.compat()
.traced_with_id(
&ctx.trace(),
"derive::derive_from_parents",
trace_args! {
"csid" => bcs_id.to_hex().to_string(),
"type" => Derivable::NAME
},
event_id,
)
.compat()
.await?;
let options = mapping.options();
let derived = Derivable::derive_from_parents(
ctx.clone(),
repo.clone(),
bcs,
parents,
&options,
)
.boxed()
.compat()
.traced_with_id(
&ctx.trace(),
"derive::derive_from_parents",
trace_args! {
"csid" => bcs_id.to_hex().to_string(),
"type" => Derivable::NAME
},
event_id,
)
.compat()
.await?;
mapping.put(ctx.clone(), bcs_id, derived).await?;
let res: Result<_, Error> = Ok(());
res
@ -607,12 +613,14 @@ mod test {
impl BonsaiDerivable for TestGenNum {
const NAME: &'static str = "test_gen_num";
type Options = ();
async fn derive_from_parents(
_ctx: CoreContext,
_repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let parent_commits = parents.iter().map(|x| x.1).collect();
@ -688,6 +696,8 @@ mod test {
}
Ok(())
}
fn options(&self) {}
}
async fn derive_for_master(ctx: CoreContext, repo: BlobRepo) {

View File

@ -46,6 +46,8 @@ pub trait BonsaiDerivable: Sized + 'static + Send + Sync + Clone {
/// name data (for example lease keys) assoicated with particular derived data type.
const NAME: &'static str;
/// Type for additional options to derivation
type Options: Send + Sync + 'static;
/// Defines how to derive new representation for bonsai having derivations
/// for parents and having a current bonsai object.
@ -60,6 +62,7 @@ pub trait BonsaiDerivable: Sized + 'static + Send + Sync + Clone {
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
options: &Self::Options,
) -> Result<Self, Error>;
/// This method might be overridden by BonsaiDerivable implementors if there's a more efficient
@ -208,6 +211,9 @@ pub trait BonsaiDerivedMapping: Send + Sync + Clone {
/// Saves mapping between bonsai changeset and derived data id
async fn put(&self, ctx: CoreContext, csid: ChangesetId, id: Self::Value) -> Result<(), Error>;
/// Get the derivation options that apply for this mapping.
fn options(&self) -> <Self::Value as BonsaiDerivable>::Options;
}
/// This mapping can be used when we want to ignore values before it was put
@ -253,4 +259,8 @@ where
self.regenerate.with(|regenerate| regenerate.remove(&csid));
self.base.put(ctx, csid, id).await
}
fn options(&self) -> <M::Value as BonsaiDerivable>::Options {
self.base.options()
}
}

View File

@ -16,7 +16,7 @@ use context::CoreContext;
use futures::stream::{FuturesUnordered, TryStreamExt};
use mononoke_types::ChangesetId;
use crate::BonsaiDerived;
use crate::{BonsaiDerivable, BonsaiDerived};
/// Implementation of a derived data mapping where the root id is stored
/// in the blobstore.
@ -80,6 +80,9 @@ pub trait BlobstoreRootIdMapping {
.put(ctx, self.format_key(cs_id), value.into())
.await
}
/// Fetch the options for this mapping implementation.
fn options(&self) -> <Self::Value as BonsaiDerivable>::Options;
}
/// Implementation of a derived data mapping where the fact that derivation
@ -145,6 +148,9 @@ pub trait BlobstoreExistsMapping {
.put(ctx, self.format_key(cs_id), BlobstoreBytes::empty())
.await
}
/// Fetch the options for this mapping implementation.
fn options(&self) -> <Self::Value as BonsaiDerivable>::Options;
}
/// Macro to implement a bonsai derived mapping using a mapping implementation
@ -192,6 +198,10 @@ macro_rules! impl_bonsai_derived_mapping {
) -> ::anyhow::Result<()> {
self.store(&ctx, csid, id).await
}
fn options(&self) -> <Self::Value as $crate::BonsaiDerivable>::Options {
<$mapping as $mapping_impl>::options(self)
}
}
#[::async_trait::async_trait]

View File

@ -39,9 +39,8 @@ pub(crate) async fn derive_unode_manifest(
cs_id: ChangesetId,
parents: Vec<ManifestUnodeId>,
changes: Vec<(MPath, Option<(ContentId, FileType)>)>,
unode_version: UnodeVersion,
) -> Result<ManifestUnodeId, Error> {
let unode_version = repo.get_derived_data_config().unode_version;
let parents: Vec<_> = parents.into_iter().collect();
let blobstore = repo.get_blobstore();
@ -420,6 +419,7 @@ mod tests {
bcs_id,
vec![],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
@ -452,6 +452,7 @@ mod tests {
bcs_id,
vec![parent_unode_id.clone()],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
@ -503,6 +504,7 @@ mod tests {
bcs_id,
vec![],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
let unode_mf = unode_id.load(&ctx, repo.blobstore()).await?;
@ -706,6 +708,7 @@ mod tests {
bcs_id,
vec![p1_root_unode_id, p2_root_unode_id],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
@ -716,6 +719,7 @@ mod tests {
bcs_id,
vec![p1_root_unode_id, p2_root_unode_id],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
assert_eq!(root_unode, same_root_unode);
@ -727,6 +731,7 @@ mod tests {
bcs_id,
vec![p2_root_unode_id, p1_root_unode_id],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
@ -750,6 +755,7 @@ mod tests {
bcs_id,
vec![],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await
}
@ -773,6 +779,7 @@ mod tests {
first_bcs_id,
vec![],
get_file_changes(&bcs),
UnodeVersion::V2,
)
.await?;
@ -793,6 +800,7 @@ mod tests {
merge_p1_id,
vec![first_unode_id.clone()],
get_file_changes(&merge_p1),
UnodeVersion::V2,
)
.await?;
(merge_p1, merge_p1_unode_id)
@ -816,6 +824,7 @@ mod tests {
merge_p2_id,
vec![first_unode_id.clone()],
get_file_changes(&merge_p2),
UnodeVersion::V2,
)
.await?;
(merge_p2, merge_p2_unode_id)
@ -837,6 +846,7 @@ mod tests {
merge_id,
vec![merge_p1_unode_id, merge_p2_unode_id],
get_file_changes(&merge),
UnodeVersion::V2,
)
.await
}

View File

@ -56,13 +56,16 @@ impl From<RootUnodeManifestId> for BlobstoreBytes {
impl BonsaiDerivable for RootUnodeManifestId {
const NAME: &'static str = "unodes";
type Options = UnodeVersion;
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
options: &Self::Options,
) -> Result<Self, Error> {
let unode_version = *options;
let bcs_id = bonsai.get_changeset_id();
derive_unode_manifest(
ctx,
@ -73,6 +76,7 @@ impl BonsaiDerivable for RootUnodeManifestId {
.map(|root_mf_id| root_mf_id.manifest_unode_id().clone())
.collect(),
get_file_changes(&bonsai),
unode_version,
)
.map_ok(RootUnodeManifestId)
.await
@ -107,6 +111,10 @@ impl BlobstoreRootIdMapping for RootUnodeManifestMapping {
UnodeVersion::V2 => "derived_root_unode_v2.",
}
}
fn options(&self) -> UnodeVersion {
self.unode_version
}
}
impl_bonsai_derived_mapping!(

View File

@ -393,6 +393,10 @@ where
buffer.insert(csid, id);
Ok(())
}
fn options(&self) -> <M::Value as BonsaiDerivable>::Options {
self.mapping.options()
}
}
pub fn derived_data_utils(

View File

@ -82,18 +82,22 @@ impl BonsaiDerivedMapping for TreeMapping {
.put(&ctx, self.root_key(csid), root.into())
.await
}
fn options(&self) {}
}
#[async_trait]
impl BonsaiDerivable for TreeHandle {
const NAME: &'static str = "git_trees";
type Options = ();
async fn derive_from_parents(
ctx: CoreContext,
repo: BlobRepo,
bonsai: BonsaiChangeset,
parents: Vec<Self>,
_options: &Self::Options,
) -> Result<Self, Error> {
let blobstore = repo.get_blobstore();
let changes = get_file_changes(&blobstore, &ctx, bonsai).await?;