Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Easy PR: Fix warnings from latest nightly (#14195)
Browse files Browse the repository at this point in the history
* unneeded mut

* remove needless borrows
  • Loading branch information
gilescope authored and Ank4n committed Jul 8, 2023
1 parent 369fc77 commit 58881a8
Show file tree
Hide file tree
Showing 13 changed files with 31 additions and 33 deletions.
2 changes: 1 addition & 1 deletion bin/node/bench/src/tempdb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl TempDatabase {
},
DatabaseType::ParityDb => Arc::new(ParityDbWrapper({
let mut options = parity_db::Options::with_columns(self.0.path(), 1);
let mut column_options = &mut options.columns[0];
let column_options = &mut options.columns[0];
column_options.ref_counted = true;
column_options.preimage = true;
column_options.uniform = true;
Expand Down
6 changes: 3 additions & 3 deletions client/db/src/parity_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,16 @@ pub fn open<H: Clone + AsRef<[u8]>>(
];

for i in compressed {
let mut column = &mut config.columns[i as usize];
let column = &mut config.columns[i as usize];
column.compression = parity_db::CompressionType::Lz4;
}

let mut state_col = &mut config.columns[columns::STATE as usize];
let state_col = &mut config.columns[columns::STATE as usize];
state_col.ref_counted = true;
state_col.preimage = true;
state_col.uniform = true;

let mut tx_col = &mut config.columns[columns::TRANSACTION as usize];
let tx_col = &mut config.columns[columns::TRANSACTION as usize];
tx_col.ref_counted = true;
tx_col.preimage = true;
tx_col.uniform = true;
Expand Down
4 changes: 2 additions & 2 deletions client/db/src/pinned_blocks_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ impl<Block: BlockT> PinnedBlocksCache<Block> {
/// Attach body to an existing cache item
pub fn insert_body(&mut self, hash: Block::Hash, extrinsics: Option<Vec<Block::Extrinsic>>) {
match self.cache.peek_mut(&hash) {
Some(mut entry) => {
Some(entry) => {
entry.body = Some(extrinsics);
log::trace!(
target: LOG_TARGET,
Expand All @@ -192,7 +192,7 @@ impl<Block: BlockT> PinnedBlocksCache<Block> {
justifications: Option<Justifications>,
) {
match self.cache.peek_mut(&hash) {
Some(mut entry) => {
Some(entry) => {
entry.justifications = Some(justifications);
log::trace!(
target: LOG_TARGET,
Expand Down
2 changes: 1 addition & 1 deletion client/network/src/protocol/notifications/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ impl ConnectionHandler for NotifsHandler {
match event {
ConnectionEvent::FullyNegotiatedInbound(inbound) => {
let (mut in_substream_open, protocol_index) = inbound.protocol;
let mut protocol_info = &mut self.protocols[protocol_index];
let protocol_info = &mut self.protocols[protocol_index];

match protocol_info.state {
State::Closed { pending_opening } => {
Expand Down
2 changes: 1 addition & 1 deletion client/network/sync/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ where
} else {
values.key_values
};
let mut entry = self.state.entry(values.state_root).or_default();
let entry = self.state.entry(values.state_root).or_default();
if entry.0.len() > 0 && entry.1.len() > 1 {
// Already imported child_trie with same root.
// Warning this will not work with parallel download.
Expand Down
4 changes: 2 additions & 2 deletions client/statement-store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ impl Index {
let priority = Priority(statement.priority().unwrap_or(0));
self.entries.insert(hash, (account, priority, statement.data_len()));
self.total_size += statement.data_len();
let mut account_info = self.accounts.entry(account).or_default();
let account_info = self.accounts.entry(account).or_default();
account_info.data_size += statement.data_len();
if let Some(channel) = statement.channel() {
account_info.channels.insert(channel, ChannelEntry { hash, priority });
Expand Down Expand Up @@ -530,7 +530,7 @@ impl Store {

let mut config = parity_db::Options::with_columns(&path, col::COUNT);

let mut statement_col = &mut config.columns[col::STATEMENTS as usize];
let statement_col = &mut config.columns[col::STATEMENTS as usize];
statement_col.ref_counted = false;
statement_col.preimage = true;
statement_col.uniform = true;
Expand Down
10 changes: 5 additions & 5 deletions frame/bounties/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ pub mod pallet {
) -> DispatchResult {
let max_amount = T::SpendOrigin::ensure_origin(origin)?;
Bounties::<T, I>::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult {
let mut bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
let bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
ensure!(
bounty.value <= max_amount,
pallet_treasury::Error::<T, I>::InsufficientPermission
Expand Down Expand Up @@ -396,7 +396,7 @@ pub mod pallet {

let curator = T::Lookup::lookup(curator)?;
Bounties::<T, I>::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult {
let mut bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
let bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
ensure!(
bounty.value <= max_amount,
pallet_treasury::Error::<T, I>::InsufficientPermission
Expand Down Expand Up @@ -444,7 +444,7 @@ pub mod pallet {
.or_else(|_| T::RejectOrigin::ensure_origin(origin).map(|_| None))?;

Bounties::<T, I>::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult {
let mut bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
let bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;

let slash_curator = |curator: &T::AccountId,
curator_deposit: &mut BalanceOf<T, I>| {
Expand Down Expand Up @@ -527,7 +527,7 @@ pub mod pallet {
let signer = ensure_signed(origin)?;

Bounties::<T, I>::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult {
let mut bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
let bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;

match bounty.status {
BountyStatus::CuratorProposed { ref curator } => {
Expand Down Expand Up @@ -571,7 +571,7 @@ pub mod pallet {
let beneficiary = T::Lookup::lookup(beneficiary)?;

Bounties::<T, I>::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult {
let mut bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;
let bounty = maybe_bounty.as_mut().ok_or(Error::<T, I>::InvalidIndex)?;

// Ensure no active child bounties before processing the call.
ensure!(
Expand Down
8 changes: 4 additions & 4 deletions frame/child-bounties/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ pub mod pallet {
parent_bounty_id,
child_bounty_id,
|maybe_child_bounty| -> DispatchResult {
let mut child_bounty =
let child_bounty =
maybe_child_bounty.as_mut().ok_or(BountiesError::<T>::InvalidIndex)?;

// Ensure child-bounty is in expected state.
Expand Down Expand Up @@ -396,7 +396,7 @@ pub mod pallet {
parent_bounty_id,
child_bounty_id,
|maybe_child_bounty| -> DispatchResult {
let mut child_bounty =
let child_bounty =
maybe_child_bounty.as_mut().ok_or(BountiesError::<T>::InvalidIndex)?;

// Ensure child-bounty is in expected state.
Expand Down Expand Up @@ -473,7 +473,7 @@ pub mod pallet {
parent_bounty_id,
child_bounty_id,
|maybe_child_bounty| -> DispatchResult {
let mut child_bounty =
let child_bounty =
maybe_child_bounty.as_mut().ok_or(BountiesError::<T>::InvalidIndex)?;

let slash_curator = |curator: &T::AccountId,
Expand Down Expand Up @@ -591,7 +591,7 @@ pub mod pallet {
parent_bounty_id,
child_bounty_id,
|maybe_child_bounty| -> DispatchResult {
let mut child_bounty =
let child_bounty =
maybe_child_bounty.as_mut().ok_or(BountiesError::<T>::InvalidIndex)?;

// Ensure child-bounty is in active state.
Expand Down
2 changes: 1 addition & 1 deletion frame/nomination-pools/benchmarking/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ frame_benchmarking::benchmarks! {
let (depositor, pool_account) = create_pool_account::<T>(0, min_create_bond, None);
BondedPools::<T>::mutate(&1, |maybe_pool| {
// Force the pool into an invalid state
maybe_pool.as_mut().map(|mut pool| pool.points = min_create_bond * 10u32.into());
maybe_pool.as_mut().map(|pool| pool.points = min_create_bond * 10u32.into());
});

let caller = account("caller", 0, USER_SEED);
Expand Down
4 changes: 1 addition & 3 deletions frame/staking/src/pallet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1006,9 +1006,7 @@ pub mod pallet {

// Note: in case there is no current era it is fine to bond one era more.
let era = Self::current_era().unwrap_or(0) + T::BondingDuration::get();
if let Some(mut chunk) =
ledger.unlocking.last_mut().filter(|chunk| chunk.era == era)
{
if let Some(chunk) = ledger.unlocking.last_mut().filter(|chunk| chunk.era == era) {
// To keep the chunk count down, we only keep one chunk per era. Since
// `unlocking` is a FiFo queue, if a chunk exists for `era` we know that it will
// be the last one.
Expand Down
14 changes: 7 additions & 7 deletions frame/support/src/storage/generator/double_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use crate::{
Never,
};
use codec::{Decode, Encode, EncodeLike, FullCodec, FullEncode};
use sp_std::{borrow::Borrow, prelude::*};
use sp_std::prelude::*;

/// Generator for `StorageDoubleMap` used by `decl_storage`.
///
Expand Down Expand Up @@ -78,7 +78,7 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
KArg1: EncodeLike<K1>,
{
let storage_prefix = storage_prefix(Self::module_prefix(), Self::storage_prefix());
let key_hashed = k1.borrow().using_encoded(Self::Hasher1::hash);
let key_hashed = k1.using_encoded(Self::Hasher1::hash);

let mut final_key = Vec::with_capacity(storage_prefix.len() + key_hashed.as_ref().len());

Expand All @@ -95,8 +95,8 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
KArg2: EncodeLike<K2>,
{
let storage_prefix = storage_prefix(Self::module_prefix(), Self::storage_prefix());
let key1_hashed = k1.borrow().using_encoded(Self::Hasher1::hash);
let key2_hashed = k2.borrow().using_encoded(Self::Hasher2::hash);
let key1_hashed = k1.using_encoded(Self::Hasher1::hash);
let key2_hashed = k2.using_encoded(Self::Hasher2::hash);

let mut final_key = Vec::with_capacity(
storage_prefix.len() + key1_hashed.as_ref().len() + key2_hashed.as_ref().len(),
Expand Down Expand Up @@ -198,7 +198,7 @@ where
KArg2: EncodeLike<K2>,
VArg: EncodeLike<V>,
{
unhashed::put(&Self::storage_double_map_final_key(k1, k2), &val.borrow())
unhashed::put(&Self::storage_double_map_final_key(k1, k2), &val)
}

fn remove<KArg1, KArg2>(k1: KArg1, k2: KArg2)
Expand Down Expand Up @@ -336,8 +336,8 @@ where
let old_key = {
let storage_prefix = storage_prefix(Self::module_prefix(), Self::storage_prefix());

let key1_hashed = key1.borrow().using_encoded(OldHasher1::hash);
let key2_hashed = key2.borrow().using_encoded(OldHasher2::hash);
let key1_hashed = key1.using_encoded(OldHasher1::hash);
let key2_hashed = key2.using_encoded(OldHasher2::hash);

let mut final_key = Vec::with_capacity(
storage_prefix.len() + key1_hashed.as_ref().len() + key2_hashed.as_ref().len(),
Expand Down
4 changes: 2 additions & 2 deletions frame/support/src/storage/generator/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ pub trait StorageMap<K: FullEncode, V: FullCodec> {
KeyArg: EncodeLike<K>,
{
let storage_prefix = storage_prefix(Self::module_prefix(), Self::storage_prefix());
let key_hashed = key.borrow().using_encoded(Self::Hasher::hash);
let key_hashed = key.using_encoded(Self::Hasher::hash);

let mut final_key = Vec::with_capacity(storage_prefix.len() + key_hashed.as_ref().len());

Expand Down Expand Up @@ -327,7 +327,7 @@ impl<K: FullEncode, V: FullCodec, G: StorageMap<K, V>> storage::StorageMap<K, V>
fn migrate_key<OldHasher: StorageHasher, KeyArg: EncodeLike<K>>(key: KeyArg) -> Option<V> {
let old_key = {
let storage_prefix = storage_prefix(Self::module_prefix(), Self::storage_prefix());
let key_hashed = key.borrow().using_encoded(OldHasher::hash);
let key_hashed = key.using_encoded(OldHasher::hash);

let mut final_key =
Vec::with_capacity(storage_prefix.len() + key_hashed.as_ref().len());
Expand Down
2 changes: 1 addition & 1 deletion primitives/npos-elections/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ pub fn to_support_map<AccountId: IdentifierT>(
// build support struct.
for StakedAssignment { who, distribution } in assignments.iter() {
for (c, weight_extended) in distribution.iter() {
let mut support = supports.entry(c.clone()).or_default();
let support = supports.entry(c.clone()).or_default();
support.total = support.total.saturating_add(*weight_extended);
support.voters.push((who.clone(), *weight_extended));
}
Expand Down

0 comments on commit 58881a8

Please sign in to comment.