view: return &HashSet instead of Iterator

We want to be able to be able to do fast `.contains()` checks on the
result, so `Iterator` was a bad type. We probably should hide the
exact type (currently `HashSet` for both readonly and mutable views),
but we can do that later. I actually thought I'd want to use
`.contains()` for indiciting public-phase commits in the log output,
but of course want to also indicate ancestors as public. This still
seem like a step (mostly) in the right direction.
This commit is contained in:
Martin von Zweigbergk 2021-01-16 12:15:06 -08:00
parent 79eecb6119
commit 30939ca686
8 changed files with 65 additions and 96 deletions

View File

@ -31,8 +31,8 @@ use crate::store_wrapper::StoreWrapper;
pub trait View { pub trait View {
fn checkout(&self) -> &CommitId; fn checkout(&self) -> &CommitId;
fn heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a>; fn heads(&self) -> &HashSet<CommitId>;
fn public_heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a>; fn public_heads(&self) -> &HashSet<CommitId>;
fn git_refs(&self) -> &BTreeMap<String, CommitId>; fn git_refs(&self) -> &BTreeMap<String, CommitId>;
fn op_store(&self) -> Arc<dyn OpStore>; fn op_store(&self) -> Arc<dyn OpStore>;
fn base_op_head_id(&self) -> &OperationId; fn base_op_head_id(&self) -> &OperationId;
@ -323,12 +323,12 @@ impl View for ReadonlyView {
&self.data.checkout &self.data.checkout
} }
fn heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a> { fn heads(&self) -> &HashSet<CommitId> {
Box::new(self.data.head_ids.iter()) &self.data.head_ids
} }
fn public_heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a> { fn public_heads(&self) -> &HashSet<CommitId> {
Box::new(self.data.public_head_ids.iter()) &self.data.public_head_ids
} }
fn git_refs(&self) -> &BTreeMap<String, CommitId> { fn git_refs(&self) -> &BTreeMap<String, CommitId> {
@ -418,12 +418,12 @@ impl View for MutableView {
&self.data.checkout &self.data.checkout
} }
fn heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a> { fn heads(&self) -> &HashSet<CommitId> {
Box::new(self.data.head_ids.iter()) &self.data.head_ids
} }
fn public_heads<'a>(&'a self) -> Box<dyn Iterator<Item = &'a CommitId> + 'a> { fn public_heads(&self) -> &HashSet<CommitId> {
Box::new(self.data.public_head_ids.iter()) &self.data.public_head_ids
} }
fn git_refs(&self) -> &BTreeMap<String, CommitId> { fn git_refs(&self) -> &BTreeMap<String, CommitId> {

View File

@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use tempfile::TempDir; use tempfile::TempDir;
@ -129,9 +128,8 @@ fn test_bad_locking_children(use_git: bool) {
&merged_path, &merged_path,
); );
let merged_repo = ReadonlyRepo::load(&settings, merged_path).unwrap(); let merged_repo = ReadonlyRepo::load(&settings, merged_path).unwrap();
let heads: HashSet<_> = merged_repo.view().heads().cloned().collect(); assert!(merged_repo.view().heads().contains(child1.id()));
assert!(heads.contains(child1.id())); assert!(merged_repo.view().heads().contains(child2.id()));
assert!(heads.contains(child2.id()));
let op_head_id = merged_repo.view().base_op_head_id().clone(); let op_head_id = merged_repo.view().base_op_head_id().clone();
let op_head = merged_repo let op_head = merged_repo
.view() .view()

View File

@ -63,7 +63,7 @@ fn test_commit_parallel(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
// One commit per thread plus the commit from the initial checkout on top of the // One commit per thread plus the commit from the initial checkout on top of the
// root commit // root commit
assert_eq!(repo.view().heads().count(), 101); assert_eq!(repo.view().heads().len(), 101);
// One operation for initializing the repo (containing the root id and the // One operation for initializing the repo (containing the root id and the
// initial working copy commit). // initial working copy commit).
@ -94,7 +94,7 @@ fn test_commit_parallel_instances(use_git: bool) {
// One commit per thread plus the commit from the initial checkout on top of the // One commit per thread plus the commit from the initial checkout on top of the
// root commit // root commit
let repo = ReadonlyRepo::load(&settings, repo.working_copy_path().clone()).unwrap(); let repo = ReadonlyRepo::load(&settings, repo.working_copy_path().clone()).unwrap();
assert_eq!(repo.view().heads().count(), 101); assert_eq!(repo.view().heads().len(), 101);
// One operation for initializing the repo (containing the root id and the // One operation for initializing the repo (containing the root id and the
// initial working copy commit). // initial working copy commit).

View File

@ -67,10 +67,9 @@ fn test_import_refs() {
let git_repo = repo.store().git_repo().unwrap(); let git_repo = repo.store().git_repo().unwrap();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let heads_before: HashSet<_> = repo.view().heads().cloned().collect(); let heads_before: HashSet<_> = repo.view().heads().clone();
jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default(); jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default();
let view = tx.as_repo().view(); let view = tx.as_repo().view();
let heads_after: HashSet<_> = view.heads().cloned().collect();
let expected_heads: HashSet<_> = heads_before let expected_heads: HashSet<_> = heads_before
.union(&hashset!( .union(&hashset!(
commit_id(&commit3), commit_id(&commit3),
@ -79,9 +78,8 @@ fn test_import_refs() {
)) ))
.cloned() .cloned()
.collect(); .collect();
assert_eq!(heads_after, expected_heads); assert_eq!(*view.heads(), expected_heads);
let public_heads_after: HashSet<_> = view.public_heads().cloned().collect(); assert_eq!(*view.public_heads(), hashset!(commit_id(&commit5)));
assert_eq!(public_heads_after, hashset!(commit_id(&commit5)));
assert_eq!(view.git_refs().len(), 4); assert_eq!(view.git_refs().len(), 4);
assert_eq!( assert_eq!(
view.git_refs().get("refs/heads/main"), view.git_refs().get("refs/heads/main"),
@ -113,7 +111,7 @@ fn test_import_refs_reimport() {
let commit3 = empty_git_commit(&git_repo, "refs/heads/feature1", &[&commit2]); let commit3 = empty_git_commit(&git_repo, "refs/heads/feature1", &[&commit2]);
let commit4 = empty_git_commit(&git_repo, "refs/heads/feature2", &[&commit2]); let commit4 = empty_git_commit(&git_repo, "refs/heads/feature2", &[&commit2]);
let heads_before: HashSet<_> = repo.view().heads().cloned().collect(); let heads_before = repo.view().heads().clone();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default(); jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default();
tx.commit(); tx.commit();
@ -128,7 +126,6 @@ fn test_import_refs_reimport() {
jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default(); jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default();
let view = tx.as_repo().view(); let view = tx.as_repo().view();
let heads_after: HashSet<_> = view.heads().cloned().collect();
// TODO: commit3 and commit4 should probably be removed // TODO: commit3 and commit4 should probably be removed
let expected_heads: HashSet<_> = heads_before let expected_heads: HashSet<_> = heads_before
.union(&hashset!( .union(&hashset!(
@ -138,7 +135,7 @@ fn test_import_refs_reimport() {
)) ))
.cloned() .cloned()
.collect(); .collect();
assert_eq!(heads_after, expected_heads); assert_eq!(*view.heads(), expected_heads);
assert_eq!(view.git_refs().len(), 2); assert_eq!(view.git_refs().len(), 2);
assert_eq!( assert_eq!(
view.git_refs().get("refs/heads/main"), view.git_refs().get("refs/heads/main"),
@ -277,12 +274,11 @@ fn test_import_refs_empty_git_repo() {
std::fs::create_dir(&jj_repo_dir).unwrap(); std::fs::create_dir(&jj_repo_dir).unwrap();
let repo = ReadonlyRepo::init_external_git(&settings, jj_repo_dir, git_repo_dir); let repo = ReadonlyRepo::init_external_git(&settings, jj_repo_dir, git_repo_dir);
let heads_before: HashSet<_> = repo.view().heads().cloned().collect(); let heads_before = repo.view().heads().clone();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default(); jujube_lib::git::import_refs(&mut tx, &git_repo).unwrap_or_default();
let view = tx.as_repo().view(); let view = tx.as_repo().view();
let heads_after: HashSet<_> = view.heads().cloned().collect(); assert_eq!(*view.heads(), heads_before);
assert_eq!(heads_before, heads_after);
assert_eq!(view.git_refs().len(), 0); assert_eq!(view.git_refs().len(), 0);
tx.discard(); tx.discard();
} }
@ -298,10 +294,9 @@ fn test_init() {
let initial_commit_id = commit_id(&initial_git_commit); let initial_commit_id = commit_id(&initial_git_commit);
std::fs::create_dir(&jj_repo_dir).unwrap(); std::fs::create_dir(&jj_repo_dir).unwrap();
let repo = ReadonlyRepo::init_external_git(&settings, jj_repo_dir.clone(), git_repo_dir); let repo = ReadonlyRepo::init_external_git(&settings, jj_repo_dir.clone(), git_repo_dir);
let heads: HashSet<_> = repo.view().heads().cloned().collect();
// The refs were *not* imported -- it's the caller's responsibility to import // The refs were *not* imported -- it's the caller's responsibility to import
// any refs they care about. // any refs they care about.
assert!(!heads.contains(&initial_commit_id)); assert!(!repo.view().heads().contains(&initial_commit_id));
} }
#[test] #[test]
@ -323,14 +318,16 @@ fn test_fetch_success() {
// The new commit is not visible before git::fetch(). // The new commit is not visible before git::fetch().
let jj_repo = ReadonlyRepo::load(&settings, jj_repo_dir.clone()).unwrap(); let jj_repo = ReadonlyRepo::load(&settings, jj_repo_dir.clone()).unwrap();
let heads: HashSet<_> = jj_repo.view().heads().cloned().collect(); assert!(!jj_repo.view().heads().contains(&commit_id(&new_git_commit)));
assert!(!heads.contains(&commit_id(&new_git_commit)));
// The new commit is visible after git::fetch(). // The new commit is visible after git::fetch().
let mut tx = jj_repo.start_transaction("test"); let mut tx = jj_repo.start_transaction("test");
git::fetch(&mut tx, &clone_git_repo, "origin").unwrap(); git::fetch(&mut tx, &clone_git_repo, "origin").unwrap();
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); assert!(tx
assert!(heads.contains(&commit_id(&new_git_commit))); .as_repo()
.view()
.heads()
.contains(&commit_id(&new_git_commit)));
tx.discard(); tx.discard();
} }

View File

@ -16,7 +16,6 @@ use jujube_lib::commit_builder::CommitBuilder;
use jujube_lib::repo::Repo; use jujube_lib::repo::Repo;
use jujube_lib::store::CommitId; use jujube_lib::store::CommitId;
use jujube_lib::testutils; use jujube_lib::testutils;
use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use test_case::test_case; use test_case::test_case;
@ -107,12 +106,8 @@ fn test_concurrent_operations(use_git: bool) {
} }
fn assert_heads(repo: &impl Repo, expected: Vec<&CommitId>) { fn assert_heads(repo: &impl Repo, expected: Vec<&CommitId>) {
let actual: HashSet<_> = {
let locked_heads = repo.view();
locked_heads.heads().cloned().collect()
};
let expected = expected.iter().cloned().cloned().collect(); let expected = expected.iter().cloned().cloned().collect();
assert_eq!(actual, expected); assert_eq!(*repo.view().heads(), expected);
} }
#[test_case(false ; "local store")] #[test_case(false ; "local store")]

View File

@ -18,7 +18,6 @@ use jujube_lib::repo_path::FileRepoPath;
use jujube_lib::store::{Conflict, ConflictId, ConflictPart, TreeValue}; use jujube_lib::store::{Conflict, ConflictId, ConflictPart, TreeValue};
use jujube_lib::store_wrapper::StoreWrapper; use jujube_lib::store_wrapper::StoreWrapper;
use jujube_lib::testutils; use jujube_lib::testutils;
use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use test_case::test_case; use test_case::test_case;
@ -319,15 +318,12 @@ fn test_add_head_success(use_git: bool) {
tx.discard(); tx.discard();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); assert!(!tx.as_repo().view().heads().contains(new_commit.id()));
assert!(!heads.contains(new_commit.id()));
tx.add_head(&new_commit); tx.add_head(&new_commit);
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); assert!(tx.as_repo().view().heads().contains(new_commit.id()));
assert!(heads.contains(new_commit.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let heads: HashSet<_> = repo.view().heads().cloned().collect(); assert!(repo.view().heads().contains(new_commit.id()));
assert!(heads.contains(new_commit.id()));
} }
#[test_case(false ; "local store")] #[test_case(false ; "local store")]
@ -351,8 +347,7 @@ fn test_add_head_ancestor(use_git: bool) {
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
tx.add_head(&commit1); tx.add_head(&commit1);
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); assert!(!tx.as_repo().view().heads().contains(commit1.id()));
assert!(!heads.contains(commit1.id()));
tx.discard(); tx.discard();
} }
@ -376,17 +371,16 @@ fn test_remove_head(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); assert!(tx.as_repo().view().heads().contains(commit3.id()));
assert!(heads.contains(commit3.id()));
tx.remove_head(&commit3); tx.remove_head(&commit3);
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); let heads = tx.as_repo().view().heads().clone();
assert!(!heads.contains(commit3.id())); assert!(!heads.contains(commit3.id()));
assert!(!heads.contains(commit2.id())); assert!(!heads.contains(commit2.id()));
assert!(!heads.contains(commit1.id())); assert!(!heads.contains(commit1.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let heads: HashSet<_> = repo.view().heads().cloned().collect(); let heads = repo.view().heads().clone();
assert!(!heads.contains(commit3.id())); assert!(!heads.contains(commit3.id()));
assert!(!heads.contains(commit2.id())); assert!(!heads.contains(commit2.id()));
assert!(!heads.contains(commit1.id())); assert!(!heads.contains(commit1.id()));
@ -413,17 +407,17 @@ fn test_remove_head_ancestor_git_ref(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); let heads = tx.as_repo().view().heads().clone();
assert!(heads.contains(commit3.id())); assert!(heads.contains(commit3.id()));
tx.remove_head(&commit3); tx.remove_head(&commit3);
let heads: HashSet<_> = tx.as_repo().view().heads().cloned().collect(); let heads = tx.as_repo().view().heads().clone();
assert!(!heads.contains(commit3.id())); assert!(!heads.contains(commit3.id()));
assert!(!heads.contains(commit2.id())); assert!(!heads.contains(commit2.id()));
assert!(heads.contains(commit1.id())); assert!(heads.contains(commit1.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let heads: HashSet<_> = repo.view().heads().cloned().collect(); let heads = repo.view().heads().clone();
assert!(!heads.contains(commit3.id())); assert!(!heads.contains(commit3.id()));
assert!(!heads.contains(commit2.id())); assert!(!heads.contains(commit2.id()));
assert!(heads.contains(commit1.id())); assert!(heads.contains(commit1.id()));
@ -443,15 +437,12 @@ fn test_add_public_head(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(!tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
tx.add_public_head(&commit1); tx.add_public_head(&commit1);
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(public_heads.contains(commit1.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let public_heads: HashSet<_> = repo.view().public_heads().cloned().collect(); assert!(repo.view().public_heads().contains(commit1.id()));
assert!(public_heads.contains(commit1.id()));
} }
#[test_case(false ; "local store")] #[test_case(false ; "local store")]
@ -472,15 +463,12 @@ fn test_add_public_head_ancestor(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(!tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
tx.add_public_head(&commit1); tx.add_public_head(&commit1);
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(!tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let public_heads: HashSet<_> = repo.view().public_heads().cloned().collect(); assert!(!repo.view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
} }
#[test_case(false ; "local store")] #[test_case(false ; "local store")]
@ -498,13 +486,10 @@ fn test_remove_public_head(use_git: bool) {
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let mut tx = repo.start_transaction("test"); let mut tx = repo.start_transaction("test");
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(public_heads.contains(commit1.id()));
tx.remove_public_head(&commit1); tx.remove_public_head(&commit1);
let public_heads: HashSet<_> = tx.as_repo().view().public_heads().cloned().collect(); assert!(!tx.as_repo().view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
tx.commit(); tx.commit();
Arc::get_mut(&mut repo).unwrap().reload(); Arc::get_mut(&mut repo).unwrap().reload();
let public_heads: HashSet<_> = repo.view().public_heads().cloned().collect(); assert!(!repo.view().public_heads().contains(commit1.id()));
assert!(!public_heads.contains(commit1.id()));
} }

View File

@ -13,8 +13,8 @@
// limitations under the License. // limitations under the License.
use jujube_lib::repo::Repo; use jujube_lib::repo::Repo;
use jujube_lib::store::CommitId;
use jujube_lib::testutils; use jujube_lib::testutils;
use maplit::hashset;
use test_case::test_case; use test_case::test_case;
#[test_case(false ; "local store")] #[test_case(false ; "local store")]
@ -23,10 +23,8 @@ fn test_heads_empty(use_git: bool) {
let settings = testutils::user_settings(); let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git); let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
let heads = repo.view();
let wc = repo.working_copy_locked(); let wc = repo.working_copy_locked();
let all_heads: Vec<CommitId> = heads.heads().cloned().collect(); assert_eq!(*repo.view().heads(), hashset! {wc.current_commit_id()});
assert_eq!(all_heads, vec![wc.current_commit_id()]);
} }
#[test_case(false ; "local store")] #[test_case(false ; "local store")]
@ -46,18 +44,15 @@ fn test_heads_fork(use_git: bool) {
.set_parents(vec![initial.id().clone()]) .set_parents(vec![initial.id().clone()])
.write_to_transaction(&mut tx); .write_to_transaction(&mut tx);
let heads = tx.as_repo().view();
let wc = repo.working_copy_locked(); let wc = repo.working_copy_locked();
let mut actual_all_heads: Vec<CommitId> = heads.heads().cloned().collect(); assert_eq!(
actual_all_heads.sort(); *tx.as_repo().view().heads(),
let mut expected_all_heads = vec![ hashset! {
wc.current_commit_id(), wc.current_commit_id(),
child1.id().clone(), child1.id().clone(),
child2.id().clone(), child2.id().clone(),
]; }
expected_all_heads.sort(); );
assert_eq!(actual_all_heads, expected_all_heads);
drop(heads);
tx.discard(); tx.discard();
} }
@ -81,13 +76,10 @@ fn test_heads_merge(use_git: bool) {
.set_parents(vec![child1.id().clone(), child2.id().clone()]) .set_parents(vec![child1.id().clone(), child2.id().clone()])
.write_to_transaction(&mut tx); .write_to_transaction(&mut tx);
let heads = tx.as_repo().view();
let wc = repo.working_copy_locked(); let wc = repo.working_copy_locked();
let mut actual_all_heads: Vec<CommitId> = heads.heads().cloned().collect(); assert_eq!(
actual_all_heads.sort(); *tx.as_repo().view().heads(),
let mut expected_all_heads = vec![wc.current_commit_id(), merge.id().clone()]; hashset! {wc.current_commit_id(), merge.id().clone()}
expected_all_heads.sort(); );
assert_eq!(actual_all_heads, expected_all_heads);
drop(heads);
tx.discard(); tx.discard();
} }

View File

@ -145,6 +145,7 @@ fn resolve_single_rev(
let heads: HashSet<Commit> = repo let heads: HashSet<Commit> = repo
.view() .view()
.heads() .heads()
.iter()
.map(|commit_id| repo.store().get_commit(commit_id).unwrap()) .map(|commit_id| repo.store().get_commit(commit_id).unwrap())
.collect(); .collect();
let heads = skip_uninteresting_heads(repo, heads); let heads = skip_uninteresting_heads(repo, heads);
@ -256,7 +257,7 @@ fn update_checkout_after_rewrite(ui: &mut Ui, tx: &mut Transaction) {
// Filter out heads that already existed. // Filter out heads that already existed.
// TODO: Filter out *commits* that already existed (so we get updated to an // TODO: Filter out *commits* that already existed (so we get updated to an
// appropriate new non-head) // appropriate new non-head)
let old_heads: HashSet<_> = tx.base_repo().view().heads().cloned().collect(); let old_heads = tx.base_repo().view().heads().clone();
let new_checkout_candidates: HashSet<_> = new_checkout_candidates let new_checkout_candidates: HashSet<_> = new_checkout_candidates
.difference(&old_heads) .difference(&old_heads)
.cloned() .cloned()
@ -1034,6 +1035,7 @@ fn cmd_log(
let mut heads: HashSet<_> = repo let mut heads: HashSet<_> = repo
.view() .view()
.heads() .heads()
.iter()
.map(|id| repo.store().get_commit(id).unwrap()) .map(|id| repo.store().get_commit(id).unwrap())
.collect(); .collect();
if !sub_matches.is_present("all") { if !sub_matches.is_present("all") {