bookmarks: extract BundleReplayData from BookmarkUpdateReason

Summary:
Separate out the `BundleReplayData` from the `BookmarkUpdateReason` enum.  There's
no real need for this to be part of the reason, and removing it means we can
abstract away the remaining dependency on Mercurial changeset IDs from
the main bookmarks traits.

Reviewed By: mitrandir77, ikostia

Differential Revision: D22417659

fbshipit-source-id: c8e5af7ba57d10a90c86437b59c0d48e587e730e
This commit is contained in:
Mark Thomas 2020-07-10 04:44:58 -07:00 committed by Facebook GitHub Bot
parent fa4dce16f7
commit 3afceb0e2c
32 changed files with 655 additions and 956 deletions

View File

@ -233,6 +233,7 @@ members = [
"manifest",
"manifest/test_utils",
"megarepolib",
"mercurial/bundle_replay_data",
"mercurial/bundles",
"mercurial/mutation",
"mercurial/revlog",

View File

@ -30,7 +30,7 @@ use blobrepo_factory::ReadOnlyStorage;
use blobstore_factory::make_metadata_sql_factory;
use bookmarks::{
BookmarkTransactionError, BookmarkUpdateLog, BookmarkUpdateLogEntry, BookmarkUpdateReason,
Bookmarks, Freshness,
Bookmarks, BundleReplay, Freshness,
};
use cloned::cloned;
use context::CoreContext;
@ -230,6 +230,7 @@ where
debug!(ctx.logger(), "bookmark was renamed into {:?}", bookmark);
let from_cs_id = log_entry.from_changeset_id;
let to_cs_id = log_entry.to_changeset_id;
let bundle_replay_data = log_entry.bundle_replay_data;
let get_commit_sync_outcome = |maybe_cs_id: Option<ChangesetId>| {
cloned!(ctx);
@ -306,31 +307,47 @@ where
ctx.logger(),
"syncing bookmark {} to {:?}", bookmark, to_cs_id
);
let reason = BookmarkUpdateReason::Backsyncer {
bundle_replay_data: log_entry.reason.get_bundle_replay_data().cloned(),
};
let bundle_replay = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
match (from_cs_id, to_cs_id) {
(Some(from), Some(to)) => {
debug!(
ctx.logger(),
"updating bookmark {:?} from {:?} to {:?}", bookmark, from, to
);
bookmark_txn.update(&bookmark, to, from, reason)?;
bookmark_txn.update(
&bookmark,
to,
from,
BookmarkUpdateReason::Backsyncer,
bundle_replay,
)?;
}
(Some(from), None) => {
debug!(
ctx.logger(),
"deleting bookmark {:?} with original position {:?}", bookmark, from
);
bookmark_txn.delete(&bookmark, from, reason)?;
bookmark_txn.delete(
&bookmark,
from,
BookmarkUpdateReason::Backsyncer,
bundle_replay,
)?;
}
(None, Some(to)) => {
debug!(
ctx.logger(),
"creating bookmark {:?} to point to {:?}", bookmark, to
);
bookmark_txn.create(&bookmark, to, reason)?;
bookmark_txn.create(
&bookmark,
to,
BookmarkUpdateReason::Backsyncer,
bundle_replay,
)?;
}
(None, None) => {
bail!("unexpected bookmark move");

View File

@ -1469,19 +1469,12 @@ async fn move_bookmark(
bookmark,
bcs_id,
prev_bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)?;
}
None => {
txn.create(
bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)?;
txn.create(bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)?;
}
}

View File

@ -137,7 +137,7 @@ pub fn upload_bookmarks(
let bookmark_name = bookmark_name_transformer(bookmark_name);
if mononoke_bookmarks.get(&bookmark_name) != Some(&value) {
count += 1;
try_boxfuture!(transaction.force_set(&bookmark_name, value, BookmarkUpdateReason::Blobimport))
try_boxfuture!(transaction.force_set(&bookmark_name, value, BookmarkUpdateReason::Blobimport, None))
}
}

View File

@ -9,7 +9,6 @@ include = ["src/**/*.rs"]
[dependencies]
bookmarks_types = { path = "bookmarks_types" }
context = { path = "../server/context" }
mercurial_types = { path = "../mercurial/types" }
mononoke_types = { path = "../mononoke_types" }
sql = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -23,10 +23,8 @@ sql = { git = "https://github.com/facebookexperimental/rust-shed.git", branch =
stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
anyhow = "1.0"
futures = { version = "0.3.5", features = ["async-await", "compat"] }
serde_json = "1.0"
[dev-dependencies]
mercurial_types-mocks = { path = "../../mercurial/types/mocks" }
mononoke_types-mocks = { path = "../../mononoke_types/mocks" }
ascii_ext = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
async_unit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -7,11 +7,11 @@
#![deny(warnings)]
use anyhow::{anyhow, bail, Error, Result};
use anyhow::{anyhow, Error, Result};
use bookmarks::{
Bookmark, BookmarkKind, BookmarkName, BookmarkPagination, BookmarkPrefix, BookmarkTransaction,
BookmarkTransactionError, BookmarkTransactionHook, BookmarkUpdateLog, BookmarkUpdateLogEntry,
BookmarkUpdateReason, Bookmarks, BundleReplayData, Freshness,
BookmarkUpdateReason, Bookmarks, BundleReplay, Freshness, RawBundleReplayData,
};
use context::{CoreContext, PerfCounterType};
use futures::compat::Future01CompatExt;
@ -545,21 +545,20 @@ impl BookmarkUpdateLog for SqlBookmarks {
reason,
timestamp,
bundle_handle,
commit_timestamps,
commit_timestamps_json,
) = entry;
get_bundle_replay_data(bundle_handle, commit_timestamps).and_then(
|replay_data| {
Ok(BookmarkUpdateLogEntry {
id,
repo_id,
bookmark_name: name,
to_changeset_id: to_cs_id,
from_changeset_id: from_cs_id,
reason: reason.update_bundle_replay_data(replay_data)?,
timestamp,
})
},
)
let bundle_replay_data =
RawBundleReplayData::maybe_new(bundle_handle, commit_timestamps_json)?;
Ok(BookmarkUpdateLogEntry {
id,
repo_id,
bookmark_name: name,
to_changeset_id: to_cs_id,
from_changeset_id: from_cs_id,
reason,
timestamp,
bundle_replay_data,
})
})
})
.try_flatten_stream()
@ -597,21 +596,20 @@ impl BookmarkUpdateLog for SqlBookmarks {
reason,
timestamp,
bundle_handle,
commit_timestamps,
commit_timestamps_json,
) = entry;
get_bundle_replay_data(bundle_handle, commit_timestamps).and_then(
|replay_data| {
Ok(BookmarkUpdateLogEntry {
id,
repo_id,
bookmark_name: name,
to_changeset_id: to_cs_id,
from_changeset_id: from_cs_id,
reason: reason.update_bundle_replay_data(replay_data)?,
timestamp,
})
},
)
let bundle_replay_data =
RawBundleReplayData::maybe_new(bundle_handle, commit_timestamps_json)?;
Ok(BookmarkUpdateLogEntry {
id,
repo_id,
bookmark_name: name,
to_changeset_id: to_cs_id,
from_changeset_id: from_cs_id,
reason,
timestamp,
bundle_replay_data,
})
})
})
.try_flatten_stream()
@ -629,6 +627,9 @@ struct NewUpdateLogEntry {
/// The reason for the update.
reason: BookmarkUpdateReason,
/// Bundle replay information if this update is replayable.
bundle_replay_data: Option<RawBundleReplayData>,
}
impl NewUpdateLogEntry {
@ -636,8 +637,14 @@ impl NewUpdateLogEntry {
old: Option<ChangesetId>,
new: Option<ChangesetId>,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<NewUpdateLogEntry> {
Ok(NewUpdateLogEntry { old, new, reason })
Ok(NewUpdateLogEntry {
old,
new,
reason,
bundle_replay_data: bundle_replay.map(BundleReplay::to_raw).transpose()?,
})
}
}
@ -679,58 +686,6 @@ impl SqlBookmarksTransactionPayload {
}
}
async fn log_bundle_replay_data(
id: u64,
reason: BookmarkUpdateReason,
sql_transaction: SqlTransaction,
) -> Result<SqlTransaction> {
use BookmarkUpdateReason::*;
let sql_transaction = match reason {
Pushrebase {
bundle_replay_data: Some(bundle_replay_data),
}
| Push {
bundle_replay_data: Some(bundle_replay_data),
}
| TestMove {
bundle_replay_data: Some(bundle_replay_data),
}
| Backsyncer {
bundle_replay_data: Some(bundle_replay_data),
} => {
let BundleReplayData {
bundle_handle,
commit_timestamps,
} = bundle_replay_data;
let commit_timestamps = serde_json::to_string(&commit_timestamps)?;
AddBundleReplayData::query_with_transaction(
sql_transaction,
&[(&id, &bundle_handle, &commit_timestamps)],
)
.compat()
.await?
.0
}
Pushrebase {
bundle_replay_data: None,
}
| Push {
bundle_replay_data: None,
}
| TestMove {
bundle_replay_data: None,
}
| Backsyncer {
bundle_replay_data: None,
}
| ManualMove
| Blobimport
| XRepoSync => sql_transaction,
};
Ok(sql_transaction)
}
async fn find_next_update_log_id(txn: SqlTransaction) -> Result<(SqlTransaction, u64)> {
let (txn, max_id_entries) = FindMaxBookmarkLogId::query_with_transaction(txn)
.compat()
@ -766,7 +721,15 @@ impl SqlBookmarksTransactionPayload {
.compat()
.await?
.0;
txn = Self::log_bundle_replay_data(next_id, log_entry.reason.clone(), txn).await?;
if let Some(data) = &log_entry.bundle_replay_data {
txn = AddBundleReplayData::query_with_transaction(
txn,
&[(&next_id, &data.bundle_handle, &data.commit_timestamps_json)],
)
.compat()
.await?
.0;
}
next_id += 1;
}
Ok(txn)
@ -926,6 +889,7 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
new_cs: ChangesetId,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.check_not_seen(bookmark)?;
self.payload.updates.insert(
@ -934,7 +898,7 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
);
self.payload.log.insert(
bookmark.clone(),
NewUpdateLogEntry::new(Some(old_cs), Some(new_cs), reason)?,
NewUpdateLogEntry::new(Some(old_cs), Some(new_cs), reason, bundle_replay)?,
);
Ok(())
}
@ -944,6 +908,7 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.check_not_seen(bookmark)?;
self.payload.creates.insert(
@ -952,7 +917,7 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
);
self.payload.log.insert(
bookmark.clone(),
NewUpdateLogEntry::new(None, Some(new_cs), reason)?,
NewUpdateLogEntry::new(None, Some(new_cs), reason, bundle_replay)?,
);
Ok(())
}
@ -962,12 +927,13 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.check_not_seen(bookmark)?;
self.payload.force_sets.insert(bookmark.clone(), new_cs);
self.payload.log.insert(
bookmark.clone(),
NewUpdateLogEntry::new(None, Some(new_cs), reason)?,
NewUpdateLogEntry::new(None, Some(new_cs), reason, bundle_replay)?,
);
Ok(())
}
@ -977,12 +943,13 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
bookmark: &BookmarkName,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.check_not_seen(bookmark)?;
self.payload.deletes.insert(bookmark.clone(), old_cs);
self.payload.log.insert(
bookmark.clone(),
NewUpdateLogEntry::new(Some(old_cs), None, reason)?,
NewUpdateLogEntry::new(Some(old_cs), None, reason, bundle_replay)?,
);
Ok(())
}
@ -991,12 +958,13 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
&mut self,
bookmark: &BookmarkName,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.check_not_seen(bookmark)?;
self.payload.force_deletes.insert(bookmark.clone());
self.payload.log.insert(
bookmark.clone(),
NewUpdateLogEntry::new(None, None, reason)?,
NewUpdateLogEntry::new(None, None, reason, bundle_replay)?,
);
Ok(())
}
@ -1113,23 +1081,6 @@ impl BookmarkTransaction for SqlBookmarksTransaction {
}
}
fn get_bundle_replay_data(
bundle_handle: Option<String>,
commit_timestamps: Option<String>,
) -> Result<Option<BundleReplayData>> {
match (bundle_handle, commit_timestamps) {
(Some(bundle_handle), Some(commit_timestamps)) => {
let replay_data = BundleReplayData {
bundle_handle,
commit_timestamps: serde_json::from_str(&commit_timestamps)?,
};
Ok(Some(replay_data))
}
(None, None) => Ok(None),
_ => bail!("inconsistent replay data"),
}
}
#[cfg(test)]
mod test {
use super::*;
@ -1149,10 +1100,6 @@ mod test {
#[fbinit::compat_test]
async fn test_update_kind_compatibility(fb: FacebookInit) -> Result<()> {
let data = BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
};
let ctx = CoreContext::test_mock(fb);
let store = SqlBookmarks::with_sqlite_in_memory().unwrap();
let scratch_name = create_bookmark_name("book1");
@ -1191,7 +1138,12 @@ mod test {
// Using 'create' to replace a scratch bookmark should fail.
let mut txn = store.create_transaction(ctx.clone(), REPO_ZERO);
txn.create(&scratch_name, ONES_CSID, data.clone())?;
txn.create(
&scratch_name,
ONES_CSID,
BookmarkUpdateReason::TestMove,
None,
)?;
assert!(!txn.commit().await?);
// Using 'update_scratch' to update a publishing bookmark should fail.
@ -1206,17 +1158,35 @@ mod test {
// Using 'update' to update a publishing bookmark should succeed.
let mut txn = store.create_transaction(ctx.clone(), REPO_ZERO);
txn.update(&publishing_name, TWOS_CSID, ONES_CSID, data.clone())?;
txn.update(
&publishing_name,
TWOS_CSID,
ONES_CSID,
BookmarkUpdateReason::TestMove,
None,
)?;
assert!(txn.commit().await?);
// Using 'update' to update a pull-default bookmark should succeed.
let mut txn = store.create_transaction(ctx.clone(), REPO_ZERO);
txn.update(&pull_default_name, TWOS_CSID, ONES_CSID, data.clone())?;
txn.update(
&pull_default_name,
TWOS_CSID,
ONES_CSID,
BookmarkUpdateReason::TestMove,
None,
)?;
assert!(txn.commit().await?);
// Using 'update' to update a scratch bookmark should fail.
let mut txn = store.create_transaction(ctx.clone(), REPO_ZERO);
txn.update(&scratch_name, TWOS_CSID, ONES_CSID, data.clone())?;
txn.update(
&scratch_name,
TWOS_CSID,
ONES_CSID,
BookmarkUpdateReason::TestMove,
None,
)?;
assert!(!txn.commit().await?);
// Using 'update_scratch' to update a scratch bookmark should succeed.

File diff suppressed because it is too large Load Diff

View File

@ -19,7 +19,7 @@ use futures::stream::{self, BoxStream, StreamExt, TryStreamExt};
use mononoke_types::{ChangesetId, RepositoryId};
use stats::prelude::*;
use crate::log::BookmarkUpdateReason;
use crate::log::{BookmarkUpdateReason, BundleReplay};
use crate::transaction::{BookmarkTransaction, BookmarkTransactionHook};
use crate::Bookmarks;
@ -313,9 +313,11 @@ impl BookmarkTransaction for CachedBookmarksTransaction {
new_cs: ChangesetId,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.dirty = true;
self.transaction.update(bookmark, new_cs, old_cs, reason)
self.transaction
.update(bookmark, new_cs, old_cs, reason, bundle_replay)
}
fn create(
@ -323,9 +325,11 @@ impl BookmarkTransaction for CachedBookmarksTransaction {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.dirty = true;
self.transaction.create(bookmark, new_cs, reason)
self.transaction
.create(bookmark, new_cs, reason, bundle_replay)
}
fn force_set(
@ -333,9 +337,11 @@ impl BookmarkTransaction for CachedBookmarksTransaction {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.dirty = true;
self.transaction.force_set(bookmark, new_cs, reason)
self.transaction
.force_set(bookmark, new_cs, reason, bundle_replay)
}
fn delete(
@ -343,18 +349,22 @@ impl BookmarkTransaction for CachedBookmarksTransaction {
bookmark: &BookmarkName,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.dirty = true;
self.transaction.delete(bookmark, old_cs, reason)
self.transaction
.delete(bookmark, old_cs, reason, bundle_replay)
}
fn force_delete(
&mut self,
bookmark: &BookmarkName,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
self.dirty = true;
self.transaction.force_delete(bookmark, reason)
self.transaction
.force_delete(bookmark, reason, bundle_replay)
}
fn update_scratch(
@ -471,9 +481,8 @@ mod tests {
transaction
.force_delete(
&BookmarkName::new("".to_string()).unwrap(),
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)
.unwrap();
@ -538,6 +547,7 @@ mod tests {
_new_cs: ChangesetId,
_old_cs: ChangesetId,
_reason: BookmarkUpdateReason,
_bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
Ok(())
}
@ -547,6 +557,7 @@ mod tests {
_key: &BookmarkName,
_new_cs: ChangesetId,
_reason: BookmarkUpdateReason,
_bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
Ok(())
}
@ -556,6 +567,7 @@ mod tests {
_key: &BookmarkName,
_new_cs: ChangesetId,
_reason: BookmarkUpdateReason,
_bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
Ok(())
}
@ -565,6 +577,7 @@ mod tests {
_key: &BookmarkName,
_old_cs: ChangesetId,
_reason: BookmarkUpdateReason,
_bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
Ok(())
}
@ -573,6 +586,7 @@ mod tests {
&mut self,
_key: &BookmarkName,
_reason: BookmarkUpdateReason,
_bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()> {
Ok(())
}

View File

@ -23,7 +23,10 @@ pub use bookmarks_types::{
Freshness,
};
pub use cache::CachedBookmarks;
pub use log::{BookmarkUpdateLog, BookmarkUpdateLogEntry, BookmarkUpdateReason, BundleReplayData};
pub use log::{
BookmarkUpdateLog, BookmarkUpdateLogEntry, BookmarkUpdateReason, BundleReplay,
RawBundleReplayData,
};
pub use transaction::{BookmarkTransaction, BookmarkTransactionError, BookmarkTransactionHook};
pub trait Bookmarks: Send + Sync + 'static {

View File

@ -5,16 +5,14 @@
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::fmt;
use anyhow::{bail, Result};
use anyhow::{anyhow, Result};
use bookmarks_types::{BookmarkName, Freshness};
use context::CoreContext;
use futures::future::BoxFuture;
use futures::stream::BoxStream;
use mercurial_types::HgChangesetId;
use mononoke_types::{ChangesetId, RawBundle2Id, RepositoryId, Timestamp};
use mononoke_types::{ChangesetId, RepositoryId, Timestamp};
use sql::mysql_async::prelude::{ConvIr, FromValue};
use sql::mysql_async::{FromValueError, Value};
@ -30,13 +28,15 @@ pub struct BookmarkUpdateLogEntry {
pub bookmark_name: BookmarkName,
/// Previous position of bookmark if it's known. It might not be known if a bookmark was
/// force set or if a bookmark didn't exist
pub to_changeset_id: Option<ChangesetId>,
/// New position of a bookmark. It can be None if the bookmark was deleted
pub from_changeset_id: Option<ChangesetId>,
/// New position of a bookmark. It can be None if the bookmark was deleted
pub to_changeset_id: Option<ChangesetId>,
/// Reason for a bookmark update
pub reason: BookmarkUpdateReason,
/// When update happened
pub timestamp: Timestamp,
/// Raw bundle replay data
pub bundle_replay_data: Option<RawBundleReplayData>,
}
pub trait BookmarkUpdateLog: Send + Sync + 'static {
@ -102,29 +102,29 @@ pub trait BookmarkUpdateLog: Send + Sync + 'static {
}
/// Describes why a bookmark was moved
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum BookmarkUpdateReason {
Pushrebase {
/// For now, let the bundle handle be not specified.
/// We may change it later
bundle_replay_data: Option<BundleReplayData>,
},
Push {
/// For now, let the bundle handle be not specified.
/// We may change it later
bundle_replay_data: Option<BundleReplayData>,
},
/// Bookmark was updated by a pushrebase.
Pushrebase,
/// Bookmark was update by a plain push.
Push,
/// Bookmark was updated by blobimport.
Blobimport,
/// Bookmark was moved manually i.e. via mononoke_admin tool
ManualMove,
/// Bookmark was moved by test code.
///
/// Only used for tests, should never be used in production
TestMove {
bundle_replay_data: Option<BundleReplayData>,
},
/// Used during sync from a large repo into small repo.
Backsyncer {
bundle_replay_data: Option<BundleReplayData>,
},
TestMove,
/// Bookmark was moved during a back-sync from a large repo into a small repo.
Backsyncer,
/// Bookmark was moved during a sync from a small repo into a large repo.
XRepoSync,
}
@ -133,85 +133,30 @@ impl std::fmt::Display for BookmarkUpdateReason {
use BookmarkUpdateReason::*;
let s = match self {
Pushrebase { .. } => "pushrebase",
Push { .. } => "push",
Pushrebase => "pushrebase",
Push => "push",
Blobimport => "blobimport",
ManualMove => "manualmove",
TestMove { .. } => "testmove",
Backsyncer { .. } => "backsyncer",
XRepoSync { .. } => "xreposync",
TestMove => "testmove",
Backsyncer => "backsyncer",
XRepoSync => "xreposync",
};
write!(f, "{}", s)
}
}
impl BookmarkUpdateReason {
pub fn update_bundle_replay_data(
self,
bundle_replay_data: Option<BundleReplayData>,
) -> Result<Self> {
use BookmarkUpdateReason::*;
match self {
Pushrebase { .. } => Ok(Pushrebase { bundle_replay_data }),
Push { .. } => Ok(Push { bundle_replay_data }),
Blobimport | ManualMove | XRepoSync => match bundle_replay_data {
Some(..) => bail!("internal error: bundle replay data can not be specified"),
None => Ok(self),
},
TestMove { .. } => Ok(TestMove { bundle_replay_data }),
Backsyncer { .. } => Ok(Backsyncer { bundle_replay_data }),
}
}
pub fn into_bundle_replay_data(self) -> Option<BundleReplayData> {
use BookmarkUpdateReason::*;
match self {
Pushrebase { bundle_replay_data }
| Push { bundle_replay_data }
| TestMove { bundle_replay_data }
| Backsyncer { bundle_replay_data } => bundle_replay_data,
Blobimport | ManualMove | XRepoSync => None,
}
}
pub fn get_bundle_replay_data(&self) -> Option<&BundleReplayData> {
use BookmarkUpdateReason::*;
match self {
Pushrebase {
ref bundle_replay_data,
}
| Push {
ref bundle_replay_data,
}
| TestMove {
ref bundle_replay_data,
}
| Backsyncer {
ref bundle_replay_data,
} => bundle_replay_data.as_ref(),
Blobimport | ManualMove | XRepoSync => None,
}
}
}
impl ConvIr<BookmarkUpdateReason> for BookmarkUpdateReason {
fn new(v: Value) -> Result<Self, FromValueError> {
use BookmarkUpdateReason::*;
match v {
Value::Bytes(ref b) if b == &b"pushrebase" => Ok(BookmarkUpdateReason::Pushrebase {
bundle_replay_data: None,
}),
Value::Bytes(ref b) if b == &b"push" => Ok(BookmarkUpdateReason::Push {
bundle_replay_data: None,
}),
Value::Bytes(ref b) if b == &b"blobimport" => Ok(BookmarkUpdateReason::Blobimport),
Value::Bytes(ref b) if b == &b"manualmove" => Ok(BookmarkUpdateReason::ManualMove),
Value::Bytes(ref b) if b == &b"testmove" => Ok(BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
}),
Value::Bytes(ref b) if b == &b"backsyncer" => Ok(BookmarkUpdateReason::Backsyncer {
bundle_replay_data: None,
}),
Value::Bytes(ref b) if b == &b"xreposync" => Ok(BookmarkUpdateReason::XRepoSync),
Value::Bytes(ref b) if b == &b"pushrebase" => Ok(Pushrebase),
Value::Bytes(ref b) if b == &b"push" => Ok(Push),
Value::Bytes(ref b) if b == &b"blobimport" => Ok(Blobimport),
Value::Bytes(ref b) if b == &b"manualmove" => Ok(ManualMove),
Value::Bytes(ref b) if b == &b"testmove" => Ok(TestMove),
Value::Bytes(ref b) if b == &b"backsyncer" => Ok(Backsyncer),
Value::Bytes(ref b) if b == &b"xreposync" => Ok(XRepoSync),
v => Err(FromValueError(v)),
}
}
@ -231,35 +176,49 @@ impl FromValue for BookmarkUpdateReason {
impl From<BookmarkUpdateReason> for Value {
fn from(bookmark_update_reason: BookmarkUpdateReason) -> Self {
use BookmarkUpdateReason::*;
match bookmark_update_reason {
BookmarkUpdateReason::Pushrebase { .. } => Value::Bytes(b"pushrebase".to_vec()),
BookmarkUpdateReason::Push { .. } => Value::Bytes(b"push".to_vec()),
BookmarkUpdateReason::Blobimport { .. } => Value::Bytes(b"blobimport".to_vec()),
BookmarkUpdateReason::ManualMove { .. } => Value::Bytes(b"manualmove".to_vec()),
BookmarkUpdateReason::TestMove { .. } => Value::Bytes(b"testmove".to_vec()),
BookmarkUpdateReason::Backsyncer { .. } => Value::Bytes(b"backsyncer".to_vec()),
BookmarkUpdateReason::XRepoSync { .. } => Value::Bytes(b"xreposync".to_vec()),
Pushrebase => Value::Bytes(b"pushrebase".to_vec()),
Push => Value::Bytes(b"push".to_vec()),
Blobimport => Value::Bytes(b"blobimport".to_vec()),
ManualMove => Value::Bytes(b"manualmove".to_vec()),
TestMove => Value::Bytes(b"testmove".to_vec()),
Backsyncer => Value::Bytes(b"backsyncer".to_vec()),
XRepoSync => Value::Bytes(b"xreposync".to_vec()),
}
}
}
/// Encapsulation of the data required to replay a Mercurial bundle.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct BundleReplayData {
pub struct RawBundleReplayData {
pub bundle_handle: String,
pub commit_timestamps: HashMap<HgChangesetId, Timestamp>,
pub commit_timestamps_json: String,
}
impl BundleReplayData {
pub fn new(raw_bundle2_id: RawBundle2Id) -> Self {
Self {
bundle_handle: raw_bundle2_id.to_hex().as_str().to_owned(),
commit_timestamps: HashMap::new(),
impl RawBundleReplayData {
pub fn maybe_new(
bundle_handle: Option<String>,
commit_timestamps_json: Option<String>,
) -> Result<Option<Self>> {
match (bundle_handle, commit_timestamps_json) {
(Some(bundle_handle), Some(commit_timestamps_json)) => Ok(Some(RawBundleReplayData {
bundle_handle,
commit_timestamps_json,
})),
(None, None) => Ok(None),
_ => Err(anyhow!("inconsistent replay data")),
}
}
}
pub fn with_timestamps(mut self, commit_timestamps: HashMap<HgChangesetId, Timestamp>) -> Self {
self.commit_timestamps = commit_timestamps;
self
pub trait BundleReplay: Sync {
fn to_raw(&self) -> Result<RawBundleReplayData>;
}
impl BundleReplay for RawBundleReplayData {
fn to_raw(&self) -> Result<RawBundleReplayData> {
Ok(self.clone())
}
}

View File

@ -15,7 +15,7 @@ use mononoke_types::ChangesetId;
use sql::Transaction;
use thiserror::Error;
use crate::log::BookmarkUpdateReason;
use crate::log::{BookmarkUpdateReason, BundleReplay};
#[derive(Debug, Error)]
pub enum BookmarkTransactionError {
@ -52,6 +52,7 @@ pub trait BookmarkTransaction: Send + Sync + 'static {
new_cs: ChangesetId,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()>;
/// Adds create() operation to the transaction set.
@ -62,6 +63,7 @@ pub trait BookmarkTransaction: Send + Sync + 'static {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()>;
/// Adds force_set() operation to the transaction set.
@ -72,6 +74,7 @@ pub trait BookmarkTransaction: Send + Sync + 'static {
bookmark: &BookmarkName,
new_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()>;
/// Adds delete operation to the transaction set.
@ -81,12 +84,17 @@ pub trait BookmarkTransaction: Send + Sync + 'static {
bookmark: &BookmarkName,
old_cs: ChangesetId,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()>;
/// Adds force_delete operation to the transaction set.
/// Deletes bookmark unconditionally.
fn force_delete(&mut self, bookmark: &BookmarkName, reason: BookmarkUpdateReason)
-> Result<()>;
fn force_delete(
&mut self,
bookmark: &BookmarkName,
reason: BookmarkUpdateReason,
bundle_replay: Option<&dyn BundleReplay>,
) -> Result<()>;
/// Adds a scratch bookmark update operation to the transaction set.
/// Updates the changeset referenced by the bookmark, if it is already a scratch bookmark.

View File

@ -341,14 +341,16 @@ fn handle_set<'a>(
&bookmark,
new_bcs.get_changeset_id(),
old_bcs_id,
BookmarkUpdateReason::ManualMove
BookmarkUpdateReason::ManualMove,
None,
));
}
None => {
try_boxfuture!(transaction.create(
&bookmark,
new_bcs.get_changeset_id(),
BookmarkUpdateReason::ManualMove
BookmarkUpdateReason::ManualMove,
None,
));
}
}
@ -386,7 +388,8 @@ fn handle_delete<'a>(
try_boxfuture!(transaction.delete(
&bookmark,
bcs_id,
BookmarkUpdateReason::ManualMove
BookmarkUpdateReason::ManualMove,
None,
));
transaction
.commit()

View File

@ -302,7 +302,7 @@ async fn update_large_repo_bookmarks(
))?;
info!(ctx.logger(), "setting {} {}", large_bookmark, large_cs_id);
book_txn.force_set(&large_bookmark, large_cs_id, reason)?;
book_txn.force_set(&large_bookmark, large_cs_id, reason, None)?;
} else {
warn!(
ctx.logger(),
@ -323,7 +323,7 @@ async fn update_large_repo_bookmarks(
))?;
let reason = BookmarkUpdateReason::XRepoSync;
info!(ctx.logger(), "deleting {}", large_bookmark);
book_txn.force_delete(&large_bookmark, reason)?;
book_txn.force_delete(&large_bookmark, reason, None)?;
}
NoSyncOutcome { target_bookmark } => {
warn!(

View File

@ -17,6 +17,7 @@ use dbbookmarks::SqlBookmarks;
use fbinit::FacebookInit;
use futures::stream::StreamExt;
use futures::{compat::Future01CompatExt, future};
use mercurial_bundle_replay_data::BundleReplayData;
use mononoke_hg_sync_job_helper_lib::save_bundle_to_file;
use mononoke_types::{BonsaiChangeset, ChangesetId, RepositoryId};
use mutable_counters::{MutableCounters, SqlMutableCounters};
@ -375,16 +376,15 @@ async fn fetch_bundle(
let log_entry = get_entry_by_id(ctx, repo.get_repoid(), bookmarks, id).await?;
let bundle_handle = &log_entry
.reason
.get_bundle_replay_data()
let bundle_replay_data: BundleReplayData = log_entry
.bundle_replay_data
.ok_or_else(|| Error::msg("no bundle found"))?
.bundle_handle;
.try_into()?;
save_bundle_to_file(
&ctx,
repo.blobstore(),
bundle_handle,
bundle_replay_data.bundle2_id,
output_file,
true, /* create */
)

View File

@ -83,14 +83,8 @@ async fn create_initial_commit(ctx: CoreContext, repo: &BlobRepo) -> ChangesetId
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx.clone());
txn.force_set(
&bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)
.unwrap();
txn.commit().await.unwrap();
bcs_id
}
@ -124,14 +118,8 @@ async fn create_empty_commit(ctx: CoreContext, repo: &BlobRepo) -> ChangesetId {
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx.clone());
txn.force_set(
&bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)
.unwrap();
txn.commit().await.unwrap();
bcs_id
}
@ -334,14 +322,8 @@ async fn update_master_file(ctx: CoreContext, repo: &BlobRepo) -> ChangesetId {
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx.clone());
txn.force_set(
&bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)
.unwrap();
txn.commit().await.unwrap();
bcs_id
}
@ -543,14 +525,8 @@ async fn megarepo_copy_file(
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx.clone());
txn.force_set(
&bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)
.unwrap();
txn.commit().await.unwrap();
bcs_id
}
@ -912,14 +888,8 @@ async fn update_linear_1_file(ctx: CoreContext, repo: &BlobRepo) -> ChangesetId
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx.clone());
txn.force_set(
&bookmark,
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&bookmark, bcs_id, BookmarkUpdateReason::TestMove, None)
.unwrap();
txn.commit().await.unwrap();
bcs_id

View File

@ -109,9 +109,8 @@ where
txn.force_set(
&bookmark_name,
target_bcs.get_changeset_id(),
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)
.unwrap();
txn.commit().await.unwrap();

View File

@ -879,9 +879,8 @@ fn test_file_hooks_with_blob_store(fb: FacebookInit) {
txn.force_set(
&BookmarkName::new("master").unwrap(),
bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)
.unwrap();
txn.commit().await.unwrap();

View File

@ -99,7 +99,7 @@ async fn create_bookmark(
"Setting bookmark {:?} to point to {:?}", bookmark, bcs_id
);
let mut transaction = repo.update_bookmark_transaction(ctx.clone());
transaction.force_set(&bookmark, bcs_id, BookmarkUpdateReason::ManualMove)?;
transaction.force_set(&bookmark, bcs_id, BookmarkUpdateReason::ManualMove, None)?;
let commit_result = transaction.commit().await?;

View File

@ -0,0 +1,14 @@
[package]
name = "mercurial_bundle_replay_data"
edition = "2018"
version = "0.1.0"
authors = ['Facebook']
license = "GPLv2+"
include = ["src/**/*.rs"]
[dependencies]
bookmarks = { path = "../../bookmarks" }
mercurial_types = { path = "../types" }
mononoke_types = { path = "../../mononoke_types" }
anyhow = "1.0"
serde_json = "1.0"

View File

@ -0,0 +1,74 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
//! Encapsulation of data required to replay Mercurial bundles in the bookmark
//! update log.
use std::collections::HashMap;
use std::convert::TryFrom;
use anyhow::Result;
use bookmarks::{BundleReplay, RawBundleReplayData};
use mercurial_types::HgChangesetId;
use mononoke_types::{RawBundle2Id, Timestamp};
pub struct BundleReplayData {
pub bundle2_id: RawBundle2Id,
pub timestamps: HashMap<HgChangesetId, Timestamp>,
}
impl BundleReplayData {
pub fn new(bundle2_id: RawBundle2Id) -> Self {
BundleReplayData {
bundle2_id,
timestamps: HashMap::new(),
}
}
pub fn new_with_timestamps(
bundle2_id: RawBundle2Id,
timestamps: HashMap<HgChangesetId, Timestamp>,
) -> Self {
BundleReplayData {
bundle2_id,
timestamps,
}
}
}
impl BundleReplay for BundleReplayData {
fn to_raw(&self) -> Result<RawBundleReplayData> {
Ok(RawBundleReplayData {
bundle_handle: self.bundle2_id.to_hex().to_string(),
commit_timestamps_json: serde_json::to_string(&self.timestamps)?,
})
}
}
impl TryFrom<&RawBundleReplayData> for BundleReplayData {
type Error = anyhow::Error;
fn try_from(raw: &RawBundleReplayData) -> Result<Self> {
let bundle2_id = RawBundle2Id::from_str(&raw.bundle_handle)?;
let timestamps = serde_json::from_str(&raw.commit_timestamps_json)?;
Ok(BundleReplayData {
bundle2_id,
timestamps,
})
}
}
impl TryFrom<RawBundleReplayData> for BundleReplayData {
type Error = anyhow::Error;
fn try_from(raw: RawBundleReplayData) -> Result<Self> {
BundleReplayData::try_from(&raw)
}
}

View File

@ -34,9 +34,8 @@ async fn init_repo(ctx: &CoreContext) -> Result<(RepoContext, BTreeMap<String, C
txn.force_set(
&BookmarkName::new("trunk")?,
changesets["C"],
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)?;
txn.commit().await?;
@ -90,24 +89,9 @@ async fn move_bookmark(fb: FacebookInit) -> Result<()> {
assert_eq!(
entries,
vec![
(
Some(changesets["G"]),
BookmarkUpdateReason::Pushrebase {
bundle_replay_data: None
},
),
(
Some(changesets["E"]),
BookmarkUpdateReason::Pushrebase {
bundle_replay_data: None
},
),
(
Some(changesets["C"]),
BookmarkUpdateReason::TestMove {
bundle_replay_data: None
},
),
(Some(changesets["G"]), BookmarkUpdateReason::Pushrebase),
(Some(changesets["E"]), BookmarkUpdateReason::Pushrebase),
(Some(changesets["C"]), BookmarkUpdateReason::TestMove),
]
);

View File

@ -36,13 +36,8 @@ fn delete_all_publishing_bookmarks(rt: &mut Runtime, ctx: CoreContext, repo: Blo
let mut txn = repo.update_bookmark_transaction(ctx);
for (bookmark, _) in bookmarks {
txn.force_delete(
bookmark.name(),
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_delete(bookmark.name(), BookmarkUpdateReason::TestMove, None)
.unwrap();
}
assert!(rt.block_on(txn.commit().compat()).unwrap());
@ -60,14 +55,8 @@ fn set_bookmark(
.unwrap()
.unwrap();
let mut txn = repo.update_bookmark_transaction(ctx);
txn.force_set(
&book,
head,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)
.unwrap();
txn.force_set(&book, head, BookmarkUpdateReason::TestMove, None)
.unwrap();
assert!(rt.block_on(txn.commit().compat()).unwrap());
}

View File

@ -16,6 +16,7 @@ context = { path = "../server/context" }
derived_data = { path = "../derived_data" }
derived_data_filenodes = { path = "../derived_data/filenodes" }
manifest = { path = "../manifest" }
mercurial_bundle_replay_data = { path = "../mercurial/bundle_replay_data" }
mercurial_types = { path = "../mercurial/types" }
metaconfig_types = { path = "../metaconfig/types" }
mononoke_types = { path = "../mononoke_types" }

View File

@ -53,7 +53,7 @@ use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobrepo_utils::convert_diff_result_into_file_change_for_diamond_merge;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateReason, BundleReplayData};
use bookmarks::{BookmarkName, BookmarkUpdateReason, BundleReplay};
use cloned::cloned;
use context::CoreContext;
use derived_data::BonsaiDerived;
@ -67,6 +67,7 @@ use futures_ext::{BoxFuture, FutureExt as Futures01FutureExt, StreamExt as Futur
use futures_old::{stream, Future, Stream};
use manifest::{bonsai_diff, BonsaiDiffFileChange, ManifestOps};
use maplit::hashmap;
use mercurial_bundle_replay_data::BundleReplayData;
use mercurial_types::{HgChangesetId, HgFileNodeId, HgManifestId, MPath};
use metaconfig_types::PushrebaseFlags;
use mononoke_types::{
@ -171,8 +172,25 @@ impl HgReplayData {
self.cs_id_convertor = cs_id_convertor;
}
pub fn get_raw_bundle2_id(&self) -> RawBundle2Id {
self.bundle2_id.clone()
pub async fn to_bundle_replay_data(
&self,
rebased_changesets: Option<&RebasedChangesets>,
) -> Result<BundleReplayData> {
let bundle2_id = self.bundle2_id.clone();
if let Some(rebased_changesets) = rebased_changesets {
let timestamps = rebased_changesets.iter().map({
|(cs_id, (_, timestamp))| async move {
let hg_cs_id = (self.cs_id_convertor)(*cs_id).compat().await?;
Ok::<_, Error>((hg_cs_id, *timestamp))
}
});
let timestamps = try_join_all(timestamps).await?.into_iter().collect();
Ok(BundleReplayData::new_with_timestamps(
bundle2_id, timestamps,
))
} else {
Ok(BundleReplayData::new(bundle2_id))
}
}
}
@ -1220,14 +1238,35 @@ async fn try_move_bookmark(
let bookmark_name = &bookmark.bookmark;
let mut txn = repo.update_bookmark_transaction(ctx);
let reason = create_bookmark_update_reason(maybe_hg_replay_data, &rebased_changesets).await?;
let bundle_replay_data = match maybe_hg_replay_data {
Some(hg_replay_data) => Some(
hg_replay_data
.to_bundle_replay_data(Some(&rebased_changesets))
.await?,
),
None => None,
};
let bundle_replay = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
match old_value {
Some(old_value) => {
txn.update(&bookmark_name, new_value, old_value, reason)?;
txn.update(
&bookmark_name,
new_value,
old_value,
BookmarkUpdateReason::Pushrebase,
bundle_replay,
)?;
}
None => {
txn.create(&bookmark_name, new_value, reason)?;
txn.create(
&bookmark_name,
new_value,
BookmarkUpdateReason::Pushrebase,
bundle_replay,
)?;
}
}
@ -1255,42 +1294,6 @@ async fn try_move_bookmark(
Ok(ret)
}
async fn create_bookmark_update_reason(
maybe_hg_replay_data: &Option<HgReplayData>,
rebased_changesets: &RebasedChangesets,
) -> Result<BookmarkUpdateReason, Error> {
let hg_replay_data = match maybe_hg_replay_data {
Some(hg_replay_data) => hg_replay_data,
None => {
return Ok(BookmarkUpdateReason::Pushrebase {
bundle_replay_data: None,
});
}
};
let HgReplayData {
bundle2_id,
cs_id_convertor,
} = hg_replay_data;
let bundle_replay_data = BundleReplayData::new(*bundle2_id);
let timestamps = rebased_changesets.iter().map({
|(id_old, (_, timestamp))| async move {
let hg_cs_id = cs_id_convertor(*id_old).compat().await?;
Result::<_, Error>::Ok((hg_cs_id, *timestamp))
}
});
let timestamps = try_join_all(timestamps).await?.into_iter().collect();
let reason = BookmarkUpdateReason::Pushrebase {
bundle_replay_data: Some(bundle_replay_data.with_timestamps(timestamps)),
};
Ok(reason)
}
#[cfg(test)]
mod tests {
use super::*;
@ -1388,13 +1391,7 @@ mod tests {
.ok_or(Error::msg(format_err!("Head not found: {:?}", cs_id)))?;
let mut txn = repo.update_bookmark_transaction(ctx);
txn.force_set(
&book,
head,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)?;
txn.force_set(&book, head, BookmarkUpdateReason::TestMove, None)?;
txn.commit().await?;
Ok(())
}

View File

@ -22,6 +22,7 @@ globalrev_pushrebase_hook = { path = "../../bonsai_globalrev_mapping/globalrev_p
hooks = { path = "../../hooks" }
limits = { path = "../../../../configerator/structs/scm/mononoke/loadshedding" }
live_commit_sync_config = { path = "../../commit_rewriting/live_commit_sync_config" }
mercurial_bundle_replay_data = { path = "../../mercurial/bundle_replay_data" }
mercurial_bundles = { path = "../../mercurial/bundles" }
mercurial_mutation = { path = "../../mercurial/mutation" }
mercurial_revlog = { path = "../../mercurial/revlog" }

View File

@ -18,8 +18,7 @@ use bonsai_git_mapping::{
extract_git_sha1_from_bonsai_extra, BonsaiGitMapping, BonsaiGitMappingEntry,
};
use bookmarks::{
BookmarkName, BookmarkTransaction, BookmarkTransactionHook, BookmarkUpdateReason,
BundleReplayData,
BookmarkName, BookmarkTransaction, BookmarkTransactionHook, BookmarkUpdateReason, BundleReplay,
};
use context::CoreContext;
use futures::{
@ -32,6 +31,7 @@ use futures_stats::TimedFutureExt;
use git_mapping_pushrebase_hook::GitMappingPushrebaseHook;
use globalrev_pushrebase_hook::GlobalrevPushrebaseHook;
use maplit::hashset;
use mercurial_bundle_replay_data::BundleReplayData;
use metaconfig_types::{BookmarkAttrs, InfinitepushParams, PushParams, PushrebaseParams};
use mononoke_types::{BonsaiChangeset, ChangesetId, RawBundle2Id};
use pushrebase::{self, PushrebaseHook};
@ -187,9 +187,8 @@ async fn run_push(
Some(bp) => Some(bp.part_id),
None => None,
};
let reason = BookmarkUpdateReason::Push {
bundle_replay_data: maybe_raw_bundle2_id.map(BundleReplayData::new),
};
let reason = BookmarkUpdateReason::Push;
let bundle_replay_data = maybe_raw_bundle2_id.map(BundleReplayData::new);
let maybe_bookmark_push = match maybe_bookmark_push {
Some(bookmark_push) => {
@ -232,7 +231,15 @@ async fn run_push(
let maybe_bookmark = maybe_bookmark_push.clone().map(|bp| bp.name);
let maybe_bookmark_push = maybe_bookmark_push.map(BookmarkPush::PlainPush);
save_bookmark_pushes_to_db(ctx, repo, reason, vec![maybe_bookmark_push], txn_hook).await?;
save_bookmark_pushes_to_db(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bookmark_push],
txn_hook,
)
.await?;
let bookmark_ids = maybe_bookmark_id.into_iter().collect();
log_commits_to_scribe(
ctx,
@ -492,9 +499,8 @@ async fn run_infinitepush(
}
let bookmark = if let Some(bookmark_push) = maybe_bookmark_push {
let reason = BookmarkUpdateReason::Push {
bundle_replay_data: maybe_raw_bundle2_id.map(BundleReplayData::new),
};
let reason = BookmarkUpdateReason::Push;
let bundle_replay_data = maybe_raw_bundle2_id.map(BundleReplayData::new);
let maybe_bonsai_bookmark_push = filter_or_check_infinitepush_allowed(
ctx,
repo,
@ -509,6 +515,7 @@ async fn run_infinitepush(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bonsai_bookmark_push
.clone()
.map(BookmarkPush::Infinitepush)],
@ -640,10 +647,9 @@ async fn run_bookmark_only_pushrebase(
} = action;
let part_id = bookmark_push.part_id;
let reason = BookmarkUpdateReason::Pushrebase {
// Since this a bookmark-only pushrebase, there are no changeset timestamps
bundle_replay_data: maybe_raw_bundle2_id.map(|id| BundleReplayData::new(id)),
};
let reason = BookmarkUpdateReason::Pushrebase;
// Since this a bookmark-only pushrebase, there are no changeset timestamps
let bundle_replay_data = maybe_raw_bundle2_id.map(BundleReplayData::new);
let bookmark_push = check_plain_bookmark_push_allowed(
ctx,
@ -670,7 +676,15 @@ async fn run_bookmark_only_pushrebase(
}
let maybe_bookmark_push = Some(BookmarkPush::PlainPush(bookmark_push));
save_bookmark_pushes_to_db(ctx, repo, reason, vec![maybe_bookmark_push], txn_hook).await?;
save_bookmark_pushes_to_db(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bookmark_push],
txn_hook,
)
.await?;
Ok(UnbundleBookmarkOnlyPushRebaseResponse {
bookmark_push_part_id: part_id,
})
@ -776,11 +790,11 @@ async fn force_pushrebase(
let maybe_target_bcs = bookmark_push.new.clone();
let target_bcs = maybe_target_bcs
.ok_or_else(|| Error::msg("new changeset is required for force pushrebase"))?;
let reason = BookmarkUpdateReason::Pushrebase {
bundle_replay_data: maybe_hg_replay_data
.as_ref()
.map(|hg_replay_data| hg_replay_data.get_raw_bundle2_id())
.map(BundleReplayData::new),
let reason = BookmarkUpdateReason::Pushrebase;
let bundle_replay_data = if let Some(hg_replay_data) = &maybe_hg_replay_data {
Some(hg_replay_data.to_bundle_replay_data(None).await?)
} else {
None
};
let maybe_bookmark_push = check_plain_bookmark_push_allowed(
@ -795,7 +809,15 @@ async fn force_pushrebase(
.await
.map(|bp| Some(BookmarkPush::PlainPush(bp)))?;
save_bookmark_pushes_to_db(ctx, repo, reason, vec![maybe_bookmark_push], None).await?;
save_bookmark_pushes_to_db(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bookmark_push],
None,
)
.await?;
// Note that this push did not do any actual rebases, so we do not
// need to provide any actual mapping, an empty Vec will do
@ -807,6 +829,7 @@ async fn save_bookmark_pushes_to_db<'a>(
ctx: &'a CoreContext,
repo: &'a BlobRepo,
reason: BookmarkUpdateReason,
bundle_replay_data: &'a Option<BundleReplayData>,
bonsai_bookmark_pushes: Vec<Option<BookmarkPush<ChangesetId>>>,
txn_hook: Option<BookmarkTransactionHook>,
) -> Result<(), Error> {
@ -820,7 +843,7 @@ async fn save_bookmark_pushes_to_db<'a>(
let mut txn = repo.update_bookmark_transaction(ctx.clone());
for bp in bonsai_bookmark_pushes.into_iter().flatten() {
add_bookmark_to_transaction(&mut txn, bp, reason.clone())?;
add_bookmark_to_transaction(&mut txn, bp, reason, bundle_replay_data)?;
}
let ok = if let Some(txn_hook) = txn_hook {
@ -946,14 +969,20 @@ fn add_bookmark_to_transaction(
txn: &mut Box<dyn BookmarkTransaction>,
bookmark_push: BookmarkPush<ChangesetId>,
reason: BookmarkUpdateReason,
bundle_replay_data: &Option<BundleReplayData>,
) -> Result<()> {
match bookmark_push {
BookmarkPush::PlainPush(PlainBookmarkPush { new, old, name, .. }) => match (new, old) {
(Some(new), Some(old)) => txn.update(&name, new, old, reason),
(Some(new), None) => txn.create(&name, new, reason),
(None, Some(old)) => txn.delete(&name, old, reason),
_ => Ok(()),
},
BookmarkPush::PlainPush(PlainBookmarkPush { new, old, name, .. }) => {
let bundle_replay = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
match (new, old) {
(Some(new), Some(old)) => txn.update(&name, new, old, reason, bundle_replay),
(Some(new), None) => txn.create(&name, new, reason, bundle_replay),
(None, Some(old)) => txn.delete(&name, old, reason, bundle_replay),
_ => Ok(()),
}
}
BookmarkPush::Infinitepush(InfiniteBookmarkPush { name, new, old, .. }) => match (new, old)
{
(new, Some(old)) => txn.update_scratch(&name, new, old),

View File

@ -139,9 +139,8 @@ pub async fn set_bookmark(
txn.force_set(
&bookmark,
bcs_id.unwrap(),
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)
.unwrap();
txn.commit().await.unwrap();
@ -1647,9 +1646,8 @@ pub mod many_diamonds {
txn.force_set(
&BookmarkName::new("master").unwrap(),
last_bcs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
BookmarkUpdateReason::TestMove,
None,
)
.unwrap();
txn.commit().await.unwrap();

View File

@ -301,13 +301,7 @@ impl UpdateBookmarkContext {
let cs_id = resolve_cs_id(&self.ctx, &self.repo, cs_ident).await?;
let mut book_txn = self.repo.update_bookmark_transaction(self.ctx);
book_txn.force_set(
&bookmark,
cs_id,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)?;
book_txn.force_set(&bookmark, cs_id, BookmarkUpdateReason::TestMove, None)?;
book_txn.commit().await?;
Ok(bookmark)
}
@ -320,12 +314,7 @@ impl UpdateBookmarkContext {
};
let mut book_txn = self.repo.update_bookmark_transaction(self.ctx);
book_txn.force_delete(
&bookmark,
BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
)?;
book_txn.force_delete(&bookmark, BookmarkUpdateReason::TestMove, None)?;
book_txn.commit().await?;
Ok(())
}

View File

@ -66,9 +66,7 @@ fn main(fb: FacebookInit) -> Result<()> {
let name = matches.value_of(BOOKMARK).unwrap().to_string();
let reason = match matches.is_present(BLOBIMPORT) {
true => BookmarkUpdateReason::Blobimport,
false => BookmarkUpdateReason::TestMove {
bundle_replay_data: None,
},
false => BookmarkUpdateReason::TestMove,
};
let bookmark = BookmarkName::new(name).unwrap();
@ -81,6 +79,7 @@ fn main(fb: FacebookInit) -> Result<()> {
&bookmark,
ChangesetId::from_str(&sub_m.value_of(ID).unwrap().to_string()).unwrap(),
reason,
None,
)
.unwrap();
}
@ -90,6 +89,7 @@ fn main(fb: FacebookInit) -> Result<()> {
ChangesetId::from_str(&sub_m.value_of(TO_ID).unwrap().to_string()).unwrap(),
ChangesetId::from_str(&sub_m.value_of(FROM_ID).unwrap().to_string()).unwrap(),
reason,
None,
)
.unwrap();
}

View File

@ -16,6 +16,7 @@ cmdlib = { path = "../cmdlib" }
context = { path = "../server/context" }
hooks = { path = "../hooks" }
hooks_content_stores = { path = "../hooks/content-stores" }
mercurial_bundle_replay_data = { path = "../mercurial/bundle_replay_data" }
mercurial_bundles = { path = "../mercurial/bundles" }
mercurial_types = { path = "../mercurial/types" }
metaconfig_types = { path = "../metaconfig/types" }

View File

@ -13,11 +13,12 @@ use bookmarks::{BookmarkName, BookmarkUpdateLogEntry};
use bytes::Bytes;
use context::CoreContext;
use futures::compat::Future01CompatExt;
use mercurial_bundle_replay_data::BundleReplayData;
use mercurial_types::HgChangesetId;
use mononoke_types::{hash::Blake2, ChangesetId, RawBundle2Id, Timestamp};
use mononoke_types::{ChangesetId, RawBundle2Id, Timestamp};
use slog::info;
use std::collections::HashMap;
use std::str::FromStr;
use std::convert::TryInto;
use std::time::Duration;
use tokio::process::Command;
@ -100,14 +101,12 @@ pub struct ReplaySpec<'a> {
impl ReplaySpec<'static> {
pub fn from_bookmark_update_log_entry(entry: BookmarkUpdateLogEntry) -> Result<Self, Error> {
let replay_data = entry
.reason
.into_bundle_replay_data()
.ok_or_else(|| format_err!("Entry has replay data"))?;
let replay_data: BundleReplayData = entry
.bundle_replay_data
.ok_or_else(|| format_err!("Entry has replay data"))?
.try_into()?;
let bundle = BundleHandle::blob(
Blake2::from_str(&replay_data.bundle_handle).map(RawBundle2Id::new)?,
);
let bundle = BundleHandle::blob(replay_data.bundle2_id);
let target = entry
.to_changeset_id
@ -116,7 +115,7 @@ impl ReplaySpec<'static> {
Ok(ReplaySpec {
bundle,
pushrebase_spec: PushrebaseSpec {
timestamps: replay_data.commit_timestamps,
timestamps: replay_data.timestamps,
onto: entry.bookmark_name,
onto_rev: entry.from_changeset_id.map(OntoRev::Bonsai),
target: Target::bonsai(target),