Fix clippy lints (1/N)

Summary: This fixes a lot of clippy lints. In particular, I didn't yet fix the mutable_key ones because we might want to disable that rule instead.

Differential Revision: D43979960

fbshipit-source-id: b5d0da42480f9a30bba38d31dca60a2fb810a0b3
This commit is contained in:
Yan Soares Couto 2023-03-10 09:41:25 -08:00 committed by Facebook GitHub Bot
parent 3595428d86
commit 72eb05a1bd
9 changed files with 24 additions and 26 deletions

View File

@ -118,7 +118,7 @@ where
fn consume_chunk(&mut self, amt: usize) {
if amt > 0 {
let chunk_size = match &self.state {
ReadingChunk(ref chunk_size) => *chunk_size,
ReadingChunk(chunk_size) => *chunk_size,
_ => panic!("Trying to consume bytes while internally not reading chunk yet"),
};

View File

@ -221,21 +221,21 @@ impl PartHeader {
aparams.sort();
// param sizes
for (ref key, ref val) in &mparams {
for (key, val) in &mparams {
out_buf.put_u8(key.len() as u8);
out_buf.put_u8(val.len() as u8);
}
for (ref key, ref val) in &aparams {
for (key, val) in &aparams {
out_buf.put_u8(key.len() as u8);
out_buf.put_u8(val.len() as u8);
}
// the actual params themselves
for (ref key, ref val) in &mparams {
for (key, val) in &mparams {
out_buf.put_slice(key.as_bytes());
out_buf.put_slice(val);
}
for (ref key, ref val) in &aparams {
for (key, val) in &aparams {
out_buf.put_slice(key.as_bytes());
out_buf.put_slice(val);
}

View File

@ -360,7 +360,7 @@ fn bundle2caps() -> String {
let mut encodedcaps = vec![];
for &(ref key, ref value) in &caps {
for (key, value) in &caps {
let encodedkey = key.to_string();
if !value.is_empty() {
let encodedvalue = value.join(",");
@ -948,9 +948,9 @@ impl RepoClient {
ctx.perf_counters()
.add_to_counter(PerfCounterType::GetpackResponseSize, len);
STATS::total_fetched_file_size.add_value(len as i64);
STATS::total_fetched_file_size.add_value(len);
if ctx.session().is_quicksand() {
STATS::quicksand_fetched_file_size.add_value(len as i64);
STATS::quicksand_fetched_file_size.add_value(len);
}
}
})
@ -1318,7 +1318,7 @@ impl HgCommands for RepoClient {
// Heads are all the commits that has a publishing bookmarks
// that points to it.
self.get_publishing_bookmarks_maybe_stale(ctx)
.map(|map| map.into_iter().map(|(_, hg_cs_id)| hg_cs_id).collect())
.map(|map| map.into_values().collect())
.compat()
.timeout(default_timeout())
.flatten_err()

View File

@ -73,7 +73,7 @@ pub(crate) fn split_changegroup(
.try_filter_map({
let mut seen_path = None;
move |part| {
if let &Some(ref seen_path) = &seen_path {
if let Some(seen_path) = &seen_path {
match &part {
&Part::CgChunk(Section::Filelog(ref path), _)
| &Part::SectionEnd(Section::Filelog(ref path)) => {

View File

@ -99,7 +99,7 @@ pub(crate) async fn enforce_file_changes_rate_limits<
}
};
let max_value = limit.raw_config.limit as f64;
let max_value = limit.raw_config.limit;
let interval = limit.window.as_secs() as u32;
let counter = GlobalTimeWindowCounterBuilder::build(
@ -212,7 +212,7 @@ async fn enforce_commit_rate_limits_on_commits<'a, I: Iterator<Item = &'a Bonsai
limit_name: COMMITS_PER_AUTHOR_LIMIT_NAME.to_string(),
limit: limit.clone(),
entity: author,
value: count as f64,
value: count,
}),
Err(_) => {
ctx.scuba()
@ -255,7 +255,7 @@ fn dispatch_counter_checks_and_bumps<'a>(
counters: Vec<(BoxGlobalTimeWindowCounter, String, u64)>,
enforced: bool,
) -> impl Iterator<Item = BoxFuture<'a, Result<(), (String, f64)>>> + 'a {
let max_value = limit.raw_config.limit as f64;
let max_value = limit.raw_config.limit;
let interval = limit.window.as_secs() as u32;
counters.into_iter().map(move |(counter, author, bump)| {

View File

@ -197,9 +197,7 @@ impl NewBlobs {
hash: nodehash,
};
if let Some(&(ref manifest_content, ref p1, ref p2, ref blobfuture)) =
manifests.get(&key)
{
if let Some((manifest_content, p1, p2, blobfuture)) = manifests.get(&key) {
counters.manifests_count += 1;
entries.push(
blobfuture
@ -248,7 +246,7 @@ fn get_manifest_parent_content(
p: Option<HgNodeHash>,
) -> Option<&ManifestContent> {
p.and_then(|p| manifests.get(&HgNodeKey { path, hash: p }))
.map(|&(ref content, ..)| content)
.map(|(content, ..)| content)
}
fn is_entry_present_in_parent(

View File

@ -90,7 +90,7 @@ impl IntersectNodeStream {
} else {
self.inputs
.iter()
.map(|&(_, ref state)| match state {
.map(|(_, state)| match state {
&Ok(Async::Ready(None)) => true,
_ => false,
})
@ -123,11 +123,11 @@ impl Stream for IntersectNodeStream {
// Return any errors
{
if self.inputs.iter().any(|&(_, ref state)| state.is_err()) {
if self.inputs.iter().any(|(_, state)| state.is_err()) {
let inputs = std::mem::take(&mut self.inputs);
let (_, err) = inputs
.into_iter()
.find(|&(_, ref state)| state.is_err())
.find(|(_, state)| state.is_err())
.unwrap();
return Err(err.unwrap_err());
}

View File

@ -50,7 +50,7 @@ impl UnionNodeStream {
}
fn gc_finished_inputs(&mut self) {
self.inputs.retain(|&(_, ref state)| {
self.inputs.retain(|(_, state)| {
if let Ok(Async::Ready(None)) = *state {
false
} else {
@ -111,13 +111,13 @@ impl Stream for UnionNodeStream {
// Return any errors
{
if self.inputs.iter().any(|&(_, ref state)| state.is_err()) {
if self.inputs.iter().any(|(_, state)| state.is_err()) {
let inputs = std::mem::take(&mut self.inputs);
let (_, err) = inputs
let err = inputs
.into_iter()
.find(|&(_, ref state)| state.is_err())
.find_map(|(_, state)| state.err())
.unwrap();
return Err(err.unwrap_err());
return Err(err);
}
}

View File

@ -82,7 +82,7 @@ impl<'a> SegmentedChangelog for ReadOnlySegmentedChangelog<'a> {
);
}
let constraints = FirstAncestorConstraint::KnownUniversally {
heads: DagIdSet::from_spans(master_head_dag_ids.into_iter().map(|(_k, v)| v)),
heads: DagIdSet::from_spans(master_head_dag_ids.into_values()),
};
let cs_to_vlocation: HashMap<ChangesetId, Result<Option<Location<_>>>> = cs_to_dag_id
.into_iter()