Skip to content

Commit

Permalink
*: run clippy for new toolchain
Browse files Browse the repository at this point in the history
The biggest changes here are

- replace .into_iter() with .iter() where possible
- use inclusive ranges where possible

The first is a good lint. The second I'm less sure about - the lint
says inclusive ranges may be more readable, but it's easy tot imagine
cases where they are not. I've just done as the lint says though.

Signed-off-by: Brian Anderson <[email protected]>

.
  • Loading branch information
brson committed Jan 7, 2019
1 parent 25996ef commit 0f9004e
Show file tree
Hide file tree
Showing 48 changed files with 85 additions and 89 deletions.
12 changes: 6 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,12 @@ pre-clippy: unset-override

clippy: pre-clippy
@cargo clippy --all --all-targets -- \
-A module_inception -A needless_pass_by_value -A cyclomatic_complexity \
-A unreadable_literal -A should_implement_trait -A verbose_bit_mask \
-A implicit_hasher -A large_enum_variant -A new_without_default \
-A new_without_default_derive -A neg_cmp_op_on_partial_ord \
-A too_many_arguments -A excessive_precision -A collapsible_if \
-A blacklisted_name
-A clippy::module_inception -A clippy::needless_pass_by_value -A clippy::cyclomatic_complexity \
-A clippy::unreadable_literal -A clippy::should_implement_trait -A clippy::verbose_bit_mask \
-A clippy::implicit_hasher -A clippy::large_enum_variant -A clippy::new_without_default \
-A clippy::new_without_default_derive -A clippy::neg_cmp_op_on_partial_ord \
-A clippy::too_many_arguments -A clippy::excessive_precision -A clippy::collapsible_if \
-A clippy::blacklisted_name

dev: format clippy
@env FAIL_POINT=1 make test
Expand Down
2 changes: 1 addition & 1 deletion components/test_raftstore/src/cluster.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ impl<T: Simulator> Cluster<T> {
.map(|region| {
region
.get_peers()
.into_iter()
.iter()
.map(|p| p.get_store_id())
.collect()
})
Expand Down
2 changes: 1 addition & 1 deletion components/test_storage/src/assert_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ impl<E: Engine> AssertionStorage<E> {
}

pub fn batch_get_ok(&self, keys: &[&[u8]], ts: u64, expect: Vec<&[u8]>) {
let keys: Vec<Key> = keys.into_iter().map(|x| Key::from_raw(x)).collect();
let keys: Vec<Key> = keys.iter().map(|x| Key::from_raw(x)).collect();
let result: Vec<Vec<u8>> = self
.store
.batch_get(self.ctx.clone(), &keys, ts)
Expand Down
2 changes: 1 addition & 1 deletion src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -812,7 +812,7 @@ pub mod log_level_serde {
.ok_or_else(|| D::Error::invalid_value(Unexpected::Str(&string), &"a valid log level"))
}

#[cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::trivially_copy_pass_by_ref))]
pub fn serialize<S>(value: &Level, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/codec/batch/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ impl BatchColumn {
///
/// Panics if `field_type` doesn't match current column's type.
#[inline]
#[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
pub fn push_datum(
&mut self,
mut raw_datum: &[u8],
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/codec/batch/rows.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ mod tests {
fn raw_row_from_datums(datums: impl AsRef<[Option<Datum>]>, comparable: bool) -> Vec<Vec<u8>> {
datums
.as_ref()
.into_iter()
.iter()
.map(|some_datum| {
let mut ret = Vec::new();
if some_datum.is_some() {
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/codec/chunk/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ impl Column {
col.fixed_len * col.length
} else {
col.var_offsets.clear();
for _ in 0..length + 1 {
for _ in 0..=length {
col.var_offsets.push(number::decode_i32_le(buf)? as usize);
}
col.var_offsets[col.length]
Expand Down
20 changes: 10 additions & 10 deletions src/coprocessor/codec/mysql/decimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,7 @@ fn do_div_mod(
}
}
let mut carry = 0;
for (r_idx, l_idx) in (r_start..r_stop).rev().zip((0..l_idx + r_len + 1).rev()) {
for (r_idx, l_idx) in (r_start..r_stop).rev().zip((0..=l_idx + r_len).rev()) {
let x = guess * i64::from(rhs.word_buf[r_idx]);
let hi = x / i64::from(WORD_BASE);
let lo = x - hi * i64::from(WORD_BASE);
Expand All @@ -668,7 +668,7 @@ fn do_div_mod(
if carry > 0 {
guess -= 1;
let mut carry = 0;
for (r_idx, l_idx) in (r_start..r_stop).rev().zip((0..l_idx + r_len + 1).rev()) {
for (r_idx, l_idx) in (r_start..r_stop).rev().zip((0..=l_idx + r_len).rev()) {
add(buf[l_idx], rhs.word_buf[r_idx], &mut carry, &mut buf[l_idx]);
}
}
Expand Down Expand Up @@ -1211,7 +1211,7 @@ impl Decimal {
// e.g ceiling 3.0001 to scale 1, gets 3.1
let idx = to_idx + frac_word_cnt as i8 - frac_words_to;
if idx > to_idx {
res.word_buf[(to_idx + 1) as usize..(idx as usize + 1)]
res.word_buf[(to_idx + 1) as usize..=(idx as usize)]
.iter()
.any(|c| *c != 0)
} else {
Expand Down Expand Up @@ -1441,18 +1441,18 @@ impl Decimal {
word_shift = new_front / DIGITS_PER_WORD as isize;
let to = ((beg / DIGITS_PER_WORD) as isize - word_shift) as usize;
let barier = (((end - 1) / DIGITS_PER_WORD) as isize - word_shift) as usize;
for i in to..barier + 1 {
for i in to..=barier {
res.word_buf[i] = res.word_buf[i + word_shift as usize];
}
for i in barier + 1..barier + word_shift as usize + 1 {
for i in barier + 1..=barier + word_shift as usize {
res.word_buf[i] = 0;
}
word_shift = -word_shift;
} else {
word_shift = (1 - new_front) / DIGITS_PER_WORD as isize;
let to = (((end - 1) / DIGITS_PER_WORD) as isize + word_shift) as usize;
let barier = ((beg / DIGITS_PER_WORD) as isize + word_shift) as usize;
for i in (barier..to + 1).rev() {
for i in (barier..=to).rev() {
res.word_buf[i] = res.word_buf[i - word_shift as usize];
}
for i in barier - word_shift as usize..barier {
Expand All @@ -1472,7 +1472,7 @@ impl Decimal {
0
};
if new_point_word > end_word {
for i in end_word + 1..new_point_word + 1 {
for i in end_word + 1..=new_point_word {
res.word_buf[i as usize] = 0;
}
} else {
Expand Down Expand Up @@ -1616,7 +1616,7 @@ impl Decimal {
let mut inner_idx = 0;
let mut word_idx = int_word_cnt as usize;
let mut word = 0;
for c in bs[int_idx - int_cnt as usize..int_idx].into_iter().rev() {
for c in bs[int_idx - int_cnt as usize..int_idx].iter().rev() {
word += u32::from(c - b'0') * TEN_POW[inner_idx];
inner_idx += 1;
if inner_idx == DIGITS_PER_WORD as usize {
Expand Down Expand Up @@ -1715,7 +1715,7 @@ macro_rules! enable_conv_for_int {
($s:ty, $t:ty) => {
impl From<$s> for Decimal {
fn from(t: $s) -> Decimal {
#[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
(t as $t).into()
}
}
Expand Down Expand Up @@ -2297,7 +2297,7 @@ mod tests {
}

#[test]
#[cfg_attr(feature = "cargo-clippy", allow(approx_constant))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::approx_constant))]
fn test_f64() {
let cases = vec![
("12345", 12345f64),
Expand Down
4 changes: 2 additions & 2 deletions src/coprocessor/codec/mysql/time/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub fn zero_datetime(tz: Tz) -> Time {
Time::new(zero_time(tz), TimeType::DateTime, mysql::DEFAULT_FSP).unwrap()
}

#[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))]
#[inline]
fn ymd_hms_nanos<T: TimeZone>(
tz: T,
Expand Down Expand Up @@ -1086,7 +1086,7 @@ mod tests {
}

#[test]
#[cfg_attr(feature = "cargo-clippy", allow(zero_prefixed_literal))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::zero_prefixed_literal))]
fn test_parse_datetime_system_timezone() {
// Basically, we check whether the parse result is the same when construcing using local.
let tables = vec![
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/codec/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ pub fn encode_row(row: Vec<Datum>, col_ids: &[i64]) -> Result<Vec<u8>> {
));
}
let mut values = Vec::with_capacity(cmp::max(row.len() * 2, 1));
for (&id, col) in col_ids.into_iter().zip(row) {
for (&id, col) in col_ids.iter().zip(row) {
values.push(Datum::I64(id));
let fc = flatten(col)?;
values.push(fc);
Expand Down
4 changes: 2 additions & 2 deletions src/coprocessor/dag/expr/builtin_cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ mod tests {
fn test_cast_as_int() {
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
let t = Time::parse_utc_datetime("2012-12-12 12:00:23", 0).unwrap();
#[cfg_attr(feature = "cargo-clippy", allow(inconsistent_digit_grouping))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::inconsistent_digit_grouping))]
let time_int = 2012_12_12_12_00_23i64;
let duration_t = Duration::parse(b"12:00:23", 0).unwrap();
let cases = vec![
Expand Down Expand Up @@ -878,7 +878,7 @@ mod tests {
fn test_cast_as_real() {
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
let t = Time::parse_utc_datetime("2012-12-12 12:00:23", 0).unwrap();
#[cfg_attr(feature = "cargo-clippy", allow(inconsistent_digit_grouping))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::inconsistent_digit_grouping))]
let int_t = 2012_12_12_12_00_23u64;
let duration_t = Duration::parse(b"12:00:23", 0).unwrap();
let cases = vec![
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/dag/expr/builtin_string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ impl ScalarFunc {
}
}

#[cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::wrong_self_convention))]
#[inline]
pub fn from_base64<'a, 'b: 'a>(
&'b self,
Expand Down
2 changes: 1 addition & 1 deletion src/coprocessor/endpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ mod tests {
}

#[test]
#[cfg_attr(feature = "cargo-clippy", allow(needless_range_loop))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::needless_range_loop))]
fn test_error_streaming_response() {
let pd_worker = FutureWorker::new("test-pd-worker");
let engine = TestEngineBuilder::new().build().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
#![feature(range_contains)]
// Currently this raises some false positives, so we allow it:
// https://github.com/rust-lang-nursery/rust-clippy/issues/2638
#![cfg_attr(feature = "cargo-clippy", allow(nonminimal_bool))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::nonminimal_bool))]

extern crate alloc;
extern crate backtrace;
Expand Down
2 changes: 1 addition & 1 deletion src/pd/pd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ impl<T: PdClient> Runner<T> {

if pd_region
.get_peers()
.into_iter()
.iter()
.all(|p| p.get_id() != peer.get_id())
{
// Peer is not a member of this Region anymore. Probably it's removed out.
Expand Down
2 changes: 1 addition & 1 deletion src/pd/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ pub fn try_connect_leader(
let mut resp = None;
// Try to connect to other members, then the previous leader.
'outer: for m in members
.into_iter()
.iter()
.filter(|m| *m != previous_leader)
.chain(&[previous_leader.clone()])
{
Expand Down
2 changes: 1 addition & 1 deletion src/raftstore/store/fsm/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ mod tests {
}
}

#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))]
fn new_runner(
cap: usize,
) -> (
Expand Down
2 changes: 1 addition & 1 deletion src/raftstore/store/fsm/peer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1630,7 +1630,7 @@ impl<T: Transport, C: PdClient> Store<T, C> {
};
}

#[cfg_attr(feature = "cargo-clippy", allow(if_same_then_else))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::if_same_then_else))]
pub fn on_raft_gc_log_tick(&mut self, event_loop: &mut EventLoop<Self>) {
// As leader, we would not keep caches for the peers that didn't response heartbeat in the
// last few seconds. That happens probably because another TiKV is down. In this case if we
Expand Down
2 changes: 1 addition & 1 deletion src/raftstore/store/fsm/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ where
}

impl<T: Transport, C: PdClient> Store<T, C> {
#[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))]
pub fn new(
ch: StoreChannel,
meta: metapb::Store,
Expand Down
2 changes: 1 addition & 1 deletion src/raftstore/store/peer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2360,7 +2360,7 @@ mod tests {
}
}

#[cfg_attr(feature = "cargo-clippy", allow(useless_vec))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::useless_vec))]
#[test]
fn test_request_inspector() {
struct DummyInspector {
Expand Down
10 changes: 5 additions & 5 deletions src/raftstore/store/peer_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ impl PeerStorage {
}

// Delete any previously appended log entries which never committed.
for i in (last_index + 1)..(prev_last_index + 1) {
for i in (last_index + 1)..=prev_last_index {
ready_ctx
.raft_wb
.delete(&keys::raft_log_key(self.get_region_id(), i))?;
Expand Down Expand Up @@ -1258,7 +1258,7 @@ pub fn clear_meta(
first_index = keys::raft_log_index(key).unwrap();
Ok(false)
})?;
for id in first_index..last_index + 1 {
for id in first_index..=last_index {
raft_wb.delete(&keys::raft_log_key(region_id, id))?;
}
raft_wb.delete(&keys::raft_state_key(region_id))?;
Expand Down Expand Up @@ -1975,14 +1975,14 @@ mod tests {
let cap = MAX_CACHE_CAPACITY as u64;

// result overflow
entries = (3..cap + 1).map(|i| new_entry(i + 5, 8)).collect();
entries = (3..=cap).map(|i| new_entry(i + 5, 8)).collect();
append_ents(&mut store, &entries);
exp_res.remove(0);
exp_res.extend_from_slice(&entries);
validate_cache(&store, &exp_res);

// input overflow
entries = (0..cap + 1).map(|i| new_entry(i + cap + 6, 8)).collect();
entries = (0..=cap).map(|i| new_entry(i + cap + 6, 8)).collect();
append_ents(&mut store, &entries);
exp_res = entries[entries.len() - cap as usize..].to_vec();
validate_cache(&store, &exp_res);
Expand All @@ -2000,7 +2000,7 @@ mod tests {
assert!(store.cache.cache.capacity() < cap as usize);

// append shrink
entries = (0..cap + 1).map(|i| new_entry(i, 8)).collect();
entries = (0..=cap).map(|i| new_entry(i, 8)).collect();
append_ents(&mut store, &entries);
assert!(store.cache.cache.capacity() >= cap as usize);
append_ents(&mut store, &[new_entry(6, 8)]);
Expand Down
4 changes: 2 additions & 2 deletions src/raftstore/store/region_snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ mod tests {
assert!(v4.is_err());
}

#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))]
#[test]
fn test_seek_and_seek_prev() {
let path = TempDir::new("test-raftstore").unwrap();
Expand Down Expand Up @@ -574,7 +574,7 @@ mod tests {
check_seek_result(&snap, Some(b"a00"), Some(b"a15"), &seek_table);
}

#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))]
#[test]
fn test_iterate() {
let path = TempDir::new("test-raftstore").unwrap();
Expand Down
4 changes: 2 additions & 2 deletions src/raftstore/store/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1410,7 +1410,7 @@ mod tests {
let path_str = path.path().to_str().unwrap();

let cfs_opts = ALL_CFS
.into_iter()
.iter()
.map(|cf| CFOptions::new(cf, ColumnFamilyOptions::new()))
.collect();
let db = new_engine_opt(path_str, DBOptions::new(), cfs_opts).unwrap();
Expand Down Expand Up @@ -1467,7 +1467,7 @@ mod tests {
let path_str = path.path().to_str().unwrap();

let cfs_opts = ALL_CFS
.into_iter()
.iter()
.map(|cf| {
let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.set_level_zero_file_num_compaction_trigger(1);
Expand Down
2 changes: 1 addition & 1 deletion src/raftstore/store/worker/split_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl<'a> MergedIterator<'a> {
) -> Result<MergedIterator<'a>> {
let mut iters = Vec::with_capacity(cfs.len());
let mut heap = BinaryHeap::with_capacity(cfs.len());
for (pos, cf) in cfs.into_iter().enumerate() {
for (pos, cf) in cfs.iter().enumerate() {
let iter_opt =
IterOption::new(Some(start_key.to_vec()), Some(end_key.to_vec()), fill_cache);
let mut iter = db.new_iterator_cf(cf, iter_opt)?;
Expand Down
2 changes: 1 addition & 1 deletion src/server/debug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2020,7 +2020,7 @@ mod tests {
let path = TempDir::new("test_mvcc_checker").expect("");
let path_str = path.path().to_str().unwrap();
let cfs_opts = ALL_CFS
.into_iter()
.iter()
.map(|cf| CFOptions::new(cf, ColumnFamilyOptions::new()))
.collect();
let db = Arc::new(new_engine_opt(path_str, DBOptions::new(), cfs_opts).unwrap());
Expand Down
4 changes: 2 additions & 2 deletions src/server/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ where
}
}

#[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))]
pub fn start<T>(
&mut self,
event_loop: EventLoop<Store<T, C>>,
Expand Down Expand Up @@ -322,7 +322,7 @@ where
Err(box_err!("check cluster bootstrapped failed"))
}

#[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))]
fn start_store<T>(
&mut self,
mut event_loop: EventLoop<Store<T, C>>,
Expand Down
Loading

0 comments on commit 0f9004e

Please sign in to comment.