update Rust to 1.57.0

Summary: Config changes + source fixes to upgrade Rust to 1.57.0

Reviewed By: dtolnay

Differential Revision: D32852266

fbshipit-source-id: b7fc82e3cd82ec8611b68778d9c996e417a65725
This commit is contained in:
Matt Hammerly 2021-12-14 10:32:37 -08:00 committed by Facebook GitHub Bot
parent 8aa7b1cc88
commit dacc5fbd89
21 changed files with 29 additions and 3 deletions

View File

@ -18,7 +18,7 @@ jobs:
- name: Install Rust Stable
uses: actions-rs/toolchain@v1
with:
toolchain: 1.56.0
toolchain: 1.57.0
default: true
profile: minimal
- name: Install Python 3.8

View File

@ -16,7 +16,7 @@ jobs:
- name: Install Rust Stable
uses: actions-rs/toolchain@v1
with:
toolchain: 1.56.0
toolchain: 1.57.0
default: true
profile: minimal
- name: Install Python 3.8

View File

@ -164,6 +164,7 @@ struct CommitRevlogDataShowArgs {
struct CommitRevlogDataCheckArgs {
#[structopt(help = "Input CBOR file (stdin is used if omitted)")]
input: Option<PathBuf>,
#[allow(dead_code)]
#[structopt(long, short, help = "Only look at the first N entries")]
limit: Option<usize>,
}
@ -173,6 +174,7 @@ struct CommitRevlogDataCheckArgs {
struct CommitLocationToHashArgs {
#[structopt(help = "Input CBOR file (stdin is used if omitted)")]
input: Option<PathBuf>,
#[allow(dead_code)]
#[structopt(long, short, help = "Output file (stdout used if omitted)")]
output: Option<PathBuf>,
#[structopt(long, short, help = "Look at items starting with index start")]
@ -186,6 +188,7 @@ struct CommitLocationToHashArgs {
struct CommitHashToLocationArgs {
#[structopt(help = "Input CBOR file (stdin is used if omitted)")]
input: Option<PathBuf>,
#[allow(dead_code)]
#[structopt(long, short, help = "Output file (stdout used if omitted)")]
output: Option<PathBuf>,
#[structopt(long, short, help = "Look at items starting with index start")]
@ -206,6 +209,7 @@ struct CloneArgs {
struct BookmarkArgs {
#[structopt(help = "Input CBOR file (stdin is used if omitted)")]
input: Option<PathBuf>,
#[allow(dead_code)]
#[structopt(long, short, help = "Output file (stdout used if omitted)")]
output: Option<PathBuf>,
#[structopt(long, short, help = "Only look at the first N entries")]

View File

@ -24,6 +24,7 @@ use crate::{
#[derive(Clone, Debug)]
pub struct StoreFile {
key: Option<Key>,
#[allow(dead_code)]
parents: Option<Parents>,
entry_metadata: Option<Metadata>,
@ -41,6 +42,7 @@ enum StoreFileKind {
/// access to another store, unlike an LfsPointer.
File {
stripped_content: Option<Bytes>,
#[allow(dead_code)]
copied_from: Option<Key>,
},

View File

@ -16,8 +16,10 @@ use crate::{datastore::Metadata, indexedlogdatastore::Entry};
#[derive(Clone, Debug)]
pub struct StoreTree {
key: Option<Key>,
#[allow(dead_code)]
parents: Option<Parents>,
raw_content: Option<Bytes>,
#[allow(dead_code)]
entry_metadata: Option<Metadata>,
}

View File

@ -51,6 +51,7 @@ pub struct ChaosBlobstore<T> {
blobstore: T,
sample_threshold_read: f32,
sample_threshold_write: f32,
#[allow(dead_code)]
options: ChaosOptions,
}

View File

@ -109,6 +109,7 @@ impl RedactedBlobs {
#[derive(Debug)]
struct InnerConfig {
#[allow(dead_code)]
raw_config: Arc<RedactionSets>,
map: Arc<HashMap<String, RedactedMetadata>>,
}

View File

@ -208,7 +208,7 @@ macro_rules! blobstore_test_impl {
let state = $state;
let factory = $new_cb;
// This is really just checking that the constructed type is Sized
Box::new(factory(state, PutBehaviour::Overwrite)?);
let _ = Box::new(factory(state, PutBehaviour::Overwrite)?);
Ok(())
}
}

View File

@ -166,6 +166,7 @@ pub trait BlobstoreSyncQueue: Send + Sync {
#[derive(Clone)]
pub struct SqlBlobstoreSyncQueue {
write_connection: Arc<Connection>,
#[allow(dead_code)]
read_connection: Connection,
read_master_connection: Connection,
write_sender: Arc<

View File

@ -56,6 +56,7 @@ enum Mode {
struct AliasVerification {
logger: Logger,
blobrepo: BlobRepo,
#[allow(dead_code)]
repoid: RepositoryId,
mode: Mode,
err_cnt: Arc<AtomicUsize>,

View File

@ -105,6 +105,7 @@ where
}
#[cfg(test)]
#[allow(dead_code)]
mod test {
use super::*;

View File

@ -66,6 +66,7 @@ const CHUNK_HEADER3_LEN: usize = 20 + 20 + 20 + 20 + 20 + 2 + 4;
#[derive(Debug)]
pub struct CgUnpacker {
#[allow(dead_code)]
logger: Logger,
state: State,
version: CgVersion,

View File

@ -21,6 +21,7 @@ use super::{HgDataContext, HgDataId, HgRepoContext};
#[derive(Clone)]
pub struct HgTreeContext {
#[allow(dead_code)]
repo: HgRepoContext,
envelope: HgManifestEnvelope,
}

View File

@ -30,6 +30,7 @@ pub struct DarkstormGlobalrevSyncer {
#[derive(Clone)]
struct HgsqlConnection {
#[allow(dead_code)]
connection: Connection,
}

View File

@ -57,6 +57,7 @@ impl Caches {
pub struct SqlPhasesStore {
pub(crate) write_connection: Connection,
pub(crate) read_connection: Connection,
#[allow(dead_code)]
pub(crate) read_master_connection: Connection,
pub(crate) caches: Arc<Caches>,
}

View File

@ -87,6 +87,7 @@ impl SqlPushrebaseMutationMapping {
#[derive(Clone)]
pub struct SqlPushrebaseMutationMappingConnection {
#[allow(dead_code)]
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,

View File

@ -97,14 +97,18 @@ pub struct MononokeRateLimitConfig {
pub region_weight: f64,
pub rate_limits: Vec<RateLimit>,
pub load_shed_limits: Vec<LoadShedLimit>,
#[allow(dead_code)]
commits_per_author: RateLimitBody,
#[allow(dead_code)]
total_file_changes: Option<RateLimitBody>,
}
#[derive(Debug, Clone)]
pub struct RateLimit {
pub body: RateLimitBody,
#[allow(dead_code)]
target: Option<Target>,
#[allow(dead_code)]
metric: Metric,
}

View File

@ -53,6 +53,7 @@ pub struct MononokeRepo {
repo: Arc<Repo>,
bookmark_attrs: BookmarkAttrs,
streaming_clone: SqlStreamingCloneConfig,
#[allow(dead_code)]
mutable_counters: Arc<dyn MutableCounters>,
// Reverse filler queue for recording accepted infinitepush bundles
// This field is `None` if we don't want recording to happen

View File

@ -836,6 +836,7 @@ struct Bundle2Resolver<'r> {
ctx: &'r CoreContext,
repo: &'r BlobRepo,
infinitepush_writes_allowed: bool,
#[allow(dead_code)]
pushrebase_flags: PushrebaseFlags,
}

View File

@ -68,6 +68,7 @@ impl CatGroup {
#[derive(Clone)]
pub struct CatsSection<'a> {
groups: Vec<CatGroup>,
#[allow(dead_code)]
config: &'a dyn Config,
}

View File

@ -414,6 +414,7 @@ fn get_required_config<T: FromConfigValue>(
/// been appropriately parsed and validated.
#[derive(Debug)]
pub(crate) struct Config {
#[allow(dead_code)]
pub(crate) repo_name: String,
pub(crate) server_url: Url,
pub(crate) cert: Option<PathBuf>,