Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Prepare for latest clippy (nightly 09-10-2022) #12466

Merged
merged 1 commit into from
Oct 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion client/api/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ where
) -> sp_blockchain::Result<Block::Hash> {
check_genesis_storage(&storage)?;

let child_delta = storage.children_default.iter().map(|(_storage_key, child_content)| {
let child_delta = storage.children_default.values().map(|child_content| {
(
&child_content.child_info,
child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))),
Expand Down
2 changes: 1 addition & 1 deletion client/consensus/slots/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1118,7 +1118,7 @@ mod test {

// But lets assert all distances, which we expect to grow linearly until `max_interval + 1`
let expected_intervals: Vec<_> =
(0..497).map(|i| (i / 2).max(1).min(expected_distance)).collect();
(0..497).map(|i| (i / 2).clamp(1, expected_distance)).collect();

assert_eq!(intervals, expected_intervals);
}
Expand Down
2 changes: 1 addition & 1 deletion client/db/src/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ impl<B: BlockT> BenchmarkingState<B> {
state.add_whitelist_to_tracker();

state.reopen()?;
let child_delta = genesis.children_default.iter().map(|(_storage_key, child_content)| {
let child_delta = genesis.children_default.values().map(|child_content| {
(
&child_content.child_info,
child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))),
Expand Down
2 changes: 1 addition & 1 deletion client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -798,7 +798,7 @@ impl<Block: BlockT> BlockImportOperation<Block> {
return Err(sp_blockchain::Error::InvalidState)
}

let child_delta = storage.children_default.iter().map(|(_storage_key, child_content)| {
let child_delta = storage.children_default.values().map(|child_content| {
(
&child_content.child_info,
child_content.data.iter().map(|(k, v)| (&k[..], Some(&v[..]))),
Expand Down
3 changes: 1 addition & 2 deletions client/network/sync/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1434,8 +1434,7 @@ where
if let SyncMode::LightState { skip_proofs, .. } = &self.mode {
if self.state_sync.is_none() && !self.peers.is_empty() && self.queue_blocks.is_empty() {
// Finalized a recent block.
let mut heads: Vec<_> =
self.peers.iter().map(|(_, peer)| peer.best_number).collect();
let mut heads: Vec<_> = self.peers.values().map(|peer| peer.best_number).collect();
heads.sort();
let median = heads[heads.len() / 2];
if number + STATE_SYNC_FINALITY_THRESHOLD.saturated_into() >= median {
Expand Down
2 changes: 1 addition & 1 deletion client/state-db/src/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl<BlockHash: Hash, Key: Hash, D: MetaDb> DeathRowQueue<BlockHash, Key, D> {
window_size: u32,
) -> Result<DeathRowQueue<BlockHash, Key, D>, Error<D::Error>> {
// limit the cache capacity from 1 to `DEFAULT_MAX_BLOCK_CONSTRAINT`
let cache_capacity = window_size.max(1).min(DEFAULT_MAX_BLOCK_CONSTRAINT) as usize;
let cache_capacity = window_size.clamp(1, DEFAULT_MAX_BLOCK_CONSTRAINT) as usize;
let mut cache = VecDeque::with_capacity(cache_capacity);
trace!(target: "state-db", "Reading pruning journal for the database-backed queue. Pending #{}", base);
// Load block from db
Expand Down
6 changes: 3 additions & 3 deletions client/transaction-pool/src/graph/pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ impl<B: ChainApi> Pool<B> {
) -> Result<Vec<Result<ExtrinsicHash<B>, B::Error>>, B::Error> {
let xts = xts.into_iter().map(|xt| (source, xt));
let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::Yes).await?;
Ok(self.validated_pool.submit(validated_transactions.into_iter().map(|(_, tx)| tx)))
Ok(self.validated_pool.submit(validated_transactions.into_values()))
}

/// Resubmit the given extrinsics to the pool.
Expand All @@ -174,7 +174,7 @@ impl<B: ChainApi> Pool<B> {
) -> Result<Vec<Result<ExtrinsicHash<B>, B::Error>>, B::Error> {
let xts = xts.into_iter().map(|xt| (source, xt));
let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::No).await?;
Ok(self.validated_pool.submit(validated_transactions.into_iter().map(|(_, tx)| tx)))
Ok(self.validated_pool.submit(validated_transactions.into_values()))
}

/// Imports one unverified extrinsic to the pool
Expand Down Expand Up @@ -341,7 +341,7 @@ impl<B: ChainApi> Pool<B> {
at,
known_imported_hashes,
pruned_hashes,
reverified_transactions.into_iter().map(|(_, xt)| xt).collect(),
reverified_transactions.into_values().collect(),
)
}

Expand Down
8 changes: 4 additions & 4 deletions frame/transaction-payment/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,11 +228,11 @@ where

if positive {
let excess = first_term.saturating_add(second_term).saturating_mul(previous);
previous.saturating_add(excess).max(min_multiplier).min(max_multiplier)
previous.saturating_add(excess).clamp(min_multiplier, max_multiplier)
} else {
// Defensive-only: first_term > second_term. Safe subtraction.
let negative = first_term.saturating_sub(second_term).saturating_mul(previous);
previous.saturating_sub(negative).max(min_multiplier).min(max_multiplier)
previous.saturating_sub(negative).clamp(min_multiplier, max_multiplier)
}
}
}
Expand Down Expand Up @@ -713,8 +713,8 @@ where
let max_block_weight = max_block_weight.ref_time();
let info_weight = info.weight.ref_time();

let bounded_weight = info_weight.max(1).min(max_block_weight);
let bounded_length = (len as u64).max(1).min(max_block_length);
let bounded_weight = info_weight.clamp(1, max_block_weight);
let bounded_length = (len as u64).clamp(1, max_block_length);

let max_tx_per_block_weight = max_block_weight / bounded_weight;
let max_tx_per_block_length = max_block_length / bounded_length;
Expand Down
4 changes: 2 additions & 2 deletions primitives/runtime/src/generic/era.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ impl Era {
/// does not exceed `BlockHashCount` parameter passed to `system` module, since that
/// prunes old blocks and renders transactions immediately invalid.
pub fn mortal(period: u64, current: u64) -> Self {
let period = period.checked_next_power_of_two().unwrap_or(1 << 16).max(4).min(1 << 16);
let period = period.checked_next_power_of_two().unwrap_or(1 << 16).clamp(4, 1 << 16);
let phase = current % period;
let quantize_factor = (period >> 12).max(1);
let quantized_phase = phase / quantize_factor * quantize_factor;
Expand Down Expand Up @@ -105,7 +105,7 @@ impl Encode for Era {
Self::Immortal => output.push_byte(0),
Self::Mortal(period, phase) => {
let quantize_factor = (*period as u64 >> 12).max(1);
let encoded = (period.trailing_zeros() - 1).max(1).min(15) as u16 |
let encoded = (period.trailing_zeros() - 1).clamp(1, 15) as u16 |
((phase / quantize_factor) << 4) as u16;
encoded.encode_to(output);
},
Expand Down
14 changes: 6 additions & 8 deletions primitives/runtime/src/offchain/storage_lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ const STORAGE_LOCK_DEFAULT_EXPIRY_DURATION: Duration = Duration::from_millis(20_
const STORAGE_LOCK_DEFAULT_EXPIRY_BLOCKS: u32 = 4;

/// Time between checks if the lock is still being held in milliseconds.
const STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MIN: Duration = Duration::from_millis(100);
const STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX: Duration = Duration::from_millis(10);
const STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MIN: Duration = Duration::from_millis(10);
const STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX: Duration = Duration::from_millis(100);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So it was actually bogus?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes


/// Lockable item for use with a persisted storage lock.
///
Expand Down Expand Up @@ -137,10 +137,9 @@ impl Lockable for Time {
let remainder: Duration = now.diff(deadline);
// do not snooze the full duration, but instead snooze max 100ms
// it might get unlocked in another thread
use core::cmp::{max, min};
let snooze = max(
min(remainder, STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX),
let snooze = remainder.clamp(
STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MIN,
STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX,
);
sp_io::offchain::sleep_until(now.add(snooze));
}
Expand Down Expand Up @@ -239,10 +238,9 @@ impl<B: BlockNumberProvider> Lockable for BlockAndTime<B> {
fn snooze(deadline: &Self::Deadline) {
let now = offchain::timestamp();
let remainder: Duration = now.diff(&(deadline.timestamp));
use core::cmp::{max, min};
let snooze = max(
min(remainder, STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX),
let snooze = remainder.clamp(
STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MIN,
STORAGE_LOCK_PER_CHECK_ITERATION_SNOOZE_MAX,
);
sp_io::offchain::sleep_until(now.add(snooze));
}
Expand Down
4 changes: 2 additions & 2 deletions primitives/state-machine/src/in_memory_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ where
fn from((inners, state_version): (Storage, StateVersion)) -> Self {
let mut inner: HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>> = inners
.children_default
.into_iter()
.map(|(_k, c)| (Some(c.child_info), c.data))
.into_values()
.map(|c| (Some(c.child_info), c.data))
.collect();
inner.insert(None, inners.top);
(inner, state_version).into()
Expand Down
2 changes: 1 addition & 1 deletion primitives/state-machine/src/overlayed_changes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ impl OverlayedChanges {
pub fn children(
&self,
) -> impl Iterator<Item = (impl Iterator<Item = (&StorageKey, &OverlayedValue)>, &ChildInfo)> {
self.children.iter().map(|(_, v)| (v.0.changes(), &v.1))
self.children.values().map(|v| (v.0.changes(), &v.1))
}

/// Get an iterator over all top changes as been by the current transaction.
Expand Down
2 changes: 1 addition & 1 deletion primitives/trie/src/recorder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ impl<H: Hasher> Recorder<H> {
/// Returns the [`StorageProof`].
pub fn to_storage_proof(&self) -> StorageProof {
let recorder = self.inner.lock();
StorageProof::new(recorder.accessed_nodes.iter().map(|(_, v)| v.clone()))
StorageProof::new(recorder.accessed_nodes.values().cloned())
}

/// Returns the estimated encoded size of the proof.
Expand Down
2 changes: 1 addition & 1 deletion test-utils/runtime/client/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ impl substrate_test_client::GenesisInit for GenesisParameters {
.insert(sp_core::storage::well_known_keys::CODE.to_vec(), code.clone());
}

let child_roots = storage.children_default.iter().map(|(_sk, child_content)| {
let child_roots = storage.children_default.values().map(|child_content| {
let state_root =
<<<runtime::Block as BlockT>::Header as HeaderT>::Hashing as HashT>::trie_root(
child_content.data.clone().into_iter().collect(),
Expand Down
5 changes: 2 additions & 3 deletions utils/frame/benchmarking-cli/src/pallet/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,9 +268,8 @@ impl PalletCmd {

for s in 0..self.steps {
// This is the value we will be testing for component `name`
let component_value = ((lowest as f32 + step_size * s as f32) as u32)
.min(highest)
.max(lowest);
let component_value =
((lowest as f32 + step_size * s as f32) as u32).clamp(lowest, highest);

// Select the max value for all the other components.
let c: Vec<(BenchmarkParameter, u32)> = components
Expand Down