Skip to content

Commit

Permalink
Merge pull request #1547 from nyurik/cast-lossless
Browse files Browse the repository at this point in the history
Fix clippy::cast_lossless
  • Loading branch information
Byron authored Aug 24, 2024
2 parents 030d880 + b5eb8fd commit c3a7dcf
Show file tree
Hide file tree
Showing 25 changed files with 52 additions and 47 deletions.
2 changes: 1 addition & 1 deletion gitoxide-core/src/repository/odb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ pub fn statistics(
find::Header::Packed(packed) => {
self.packed_objects += 1;
self.packed_delta_objects += usize::from(packed.num_deltas > 0);
self.total_delta_chain_length += packed.num_deltas as u64;
self.total_delta_chain_length += u64::from(packed.num_deltas);
self.count(packed.kind, packed.object_size);
}
}
Expand Down
4 changes: 2 additions & 2 deletions gix-archive/src/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ where
out,
match compression_level {
None => flate2::Compression::default(),
Some(level) => flate2::Compression::new(level as u32),
Some(level) => flate2::Compression::new(u32::from(level)),
},
);
let mut ar = tar::Builder::new(gz);
Expand Down Expand Up @@ -126,7 +126,7 @@ where
NextFn: FnMut(&mut Stream) -> Result<Option<Entry<'_>>, gix_worktree_stream::entry::Error>,
{
let compression_level = match opts.format {
Format::Zip { compression_level } => compression_level.map(|lvl| lvl as i64),
Format::Zip { compression_level } => compression_level.map(i64::from),
_other => return write_stream(stream, next_entry, out, opts),
};

Expand Down
2 changes: 1 addition & 1 deletion gix-commitgraph/src/file/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ impl File {
let base_graph_count = data[ofs];
ofs += 1;

let chunks = gix_chunk::file::Index::from_bytes(&data, ofs, chunk_count as u32)?;
let chunks = gix_chunk::file::Index::from_bytes(&data, ofs, u32::from(chunk_count))?;

let base_graphs_list_offset = chunks
.validated_usize_offset_by_id(BASE_GRAPHS_LIST_CHUNK_ID, |chunk_range| {
Expand Down
8 changes: 4 additions & 4 deletions gix-features/src/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ pub fn leb64_from_read(mut r: impl Read) -> Result<(u64, usize), std::io::Error>
let mut i = 0;
r.read_exact(&mut b)?;
i += 1;
let mut value = b[0] as u64 & 0x7f;
let mut value = u64::from(b[0]) & 0x7f;
while b[0] & 0x80 != 0 {
r.read_exact(&mut b)?;
i += 1;
debug_assert!(i <= 10, "Would overflow value at 11th iteration");
value += 1;
value = (value << 7) + (b[0] as u64 & 0x7f);
value = (value << 7) + (u64::from(b[0]) & 0x7f);
}
Ok((value, i))
}
Expand All @@ -26,13 +26,13 @@ pub fn leb64(d: &[u8]) -> (u64, usize) {
let mut i = 0;
let mut c = d[i];
i += 1;
let mut value = c as u64 & 0x7f;
let mut value = u64::from(c) & 0x7f;
while c & 0x80 != 0 {
c = d[i];
i += 1;
debug_assert!(i <= 10, "Would overflow value at 11th iteration");
value += 1;
value = (value << 7) + (c as u64 & 0x7f);
value = (value << 7) + (u64::from(c) & 0x7f);
}
(value, i)
}
4 changes: 2 additions & 2 deletions gix-index/src/entry/flags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ pub(crate) mod at_rest {

impl Flags {
pub fn to_memory(self) -> super::Flags {
super::Flags::from_bits_retain(self.bits() as u32)
super::Flags::from_bits_retain(u32::from(self.bits()))
}
}

Expand All @@ -135,7 +135,7 @@ pub(crate) mod at_rest {
)
}
pub fn to_flags(self) -> Option<super::Flags> {
super::Flags::from_bits((self.bits() as u32) << 16)
super::Flags::from_bits(u32::from(self.bits()) << 16)
}
}

Expand Down
2 changes: 1 addition & 1 deletion gix-index/src/entry/mode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl Mode {

impl From<gix_object::tree::EntryMode> for Mode {
fn from(value: gix_object::tree::EntryMode) -> Self {
Self::from_bits_truncate(value.0 as u32)
Self::from_bits_truncate(u32::from(value.0))
}
}

Expand Down
2 changes: 1 addition & 1 deletion gix-index/src/entry/stat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ impl Stat {
check_stat, use_nsec, ..
}: Options,
) -> bool {
match timestamp.unix_seconds().cmp(&(self.mtime.secs as i64)) {
match timestamp.unix_seconds().cmp(&i64::from(self.mtime.secs)) {
Ordering::Less => true,
Ordering::Equal if use_nsec && check_stat => timestamp.nanoseconds() <= self.mtime.nsecs,
Ordering::Equal => true,
Expand Down
2 changes: 1 addition & 1 deletion gix-object/src/tree/ref_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ fn mode_from_decimal(i: &[u8]) -> Option<(u32, &[u8])> {
if *b < b'0' || *b > b'7' {
return None;
}
mode = (mode << 3) + (b - b'0') as u32;
mode = (mode << 3) + u32::from(b - b'0');
spacer_pos += 1;
}
if i.len() < spacer_pos {
Expand Down
2 changes: 1 addition & 1 deletion gix-odb/src/store_impls/dynamic/prefix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ where
*snapshot = self.store.load_all_indices()?;
let mut obj_count = 0;
for index in &snapshot.indices {
obj_count += index.num_objects() as u64;
obj_count += u64::from(index.num_objects());
}
*count = Some(obj_count);
Ok(obj_count)
Expand Down
16 changes: 8 additions & 8 deletions gix-pack/src/data/delta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ pub fn decode_header_size(d: &[u8]) -> (u64, usize) {
let mut consumed = 0;
for cmd in d.iter() {
consumed += 1;
size |= (*cmd as u64 & 0x7f) << i;
size |= (u64::from(*cmd) & 0x7f) << i;
i += 7;
if *cmd & 0x80 == 0 {
break;
Expand All @@ -23,31 +23,31 @@ pub fn apply(base: &[u8], mut target: &mut [u8], data: &[u8]) {
cmd if cmd & 0b1000_0000 != 0 => {
let (mut ofs, mut size): (u32, u32) = (0, 0);
if cmd & 0b0000_0001 != 0 {
ofs = data[i] as u32;
ofs = u32::from(data[i]);
i += 1;
}
if cmd & 0b0000_0010 != 0 {
ofs |= (data[i] as u32) << 8;
ofs |= u32::from(data[i]) << 8;
i += 1;
}
if cmd & 0b0000_0100 != 0 {
ofs |= (data[i] as u32) << 16;
ofs |= u32::from(data[i]) << 16;
i += 1;
}
if cmd & 0b0000_1000 != 0 {
ofs |= (data[i] as u32) << 24;
ofs |= u32::from(data[i]) << 24;
i += 1;
}
if cmd & 0b0001_0000 != 0 {
size = data[i] as u32;
size = u32::from(data[i]);
i += 1;
}
if cmd & 0b0010_0000 != 0 {
size |= (data[i] as u32) << 8;
size |= u32::from(data[i]) << 8;
i += 1;
}
if cmd & 0b0100_0000 != 0 {
size |= (data[i] as u32) << 16;
size |= u32::from(data[i]) << 16;
i += 1;
}
if size == 0 {
Expand Down
8 changes: 4 additions & 4 deletions gix-pack/src/data/entry/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,13 @@ fn streaming_parse_header_info(read: &mut dyn io::Read) -> Result<(u8, u64, usiz
let mut c = byte[0];
let mut i = 1;
let type_id = (c >> 4) & 0b0000_0111;
let mut size = c as u64 & 0b0000_1111;
let mut size = u64::from(c) & 0b0000_1111;
let mut s = 4;
while c & 0b1000_0000 != 0 {
read.read_exact(&mut byte)?;
c = byte[0];
i += 1;
size += ((c & 0b0111_1111) as u64) << s;
size += u64::from(c & 0b0111_1111) << s;
s += 7;
}
Ok((type_id, size, i))
Expand All @@ -122,12 +122,12 @@ fn parse_header_info(data: &[u8]) -> (u8, u64, usize) {
let mut c = data[0];
let mut i = 1;
let type_id = (c >> 4) & 0b0000_0111;
let mut size = c as u64 & 0b0000_1111;
let mut size = u64::from(c) & 0b0000_1111;
let mut s = 4;
while c & 0b1000_0000 != 0 {
c = data[i];
i += 1;
size += ((c & 0b0111_1111) as u64) << s;
size += u64::from(c & 0b0111_1111) << s;
s += 7;
}
(type_id, size, i)
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/data/input/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl input::Entry {
}
/// The amount of bytes this entry may consume in a pack data file
pub fn bytes_in_pack(&self) -> u64 {
self.header_size as u64 + self.compressed_size
u64::from(self.header_size) + self.compressed_size
}

/// Update our CRC value by recalculating it from our header and compressed data.
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/data/input/lookup_ref_delta_objects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ where
let previous_header_size = entry.header_size;
entry.header_size = entry.header.size(entry.decompressed_size) as u16;

let change = entry.header_size as i64 - previous_header_size as i64;
let change = i64::from(entry.header_size) - i64::from(previous_header_size);
entry.crc32 = Some(entry.compute_crc32());
self.track_change(entry.pack_offset, pack_offset, change, None);
}
Expand Down
6 changes: 3 additions & 3 deletions gix-pack/src/index/access.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl index::File {
let (ofs, oid) = c.split_at(N32_SIZE);
Entry {
oid: gix_hash::ObjectId::from_bytes_or_panic(oid),
pack_offset: crate::read_u32(ofs) as u64,
pack_offset: u64::from(crate::read_u32(ofs)),
crc32: None,
}
}),
Expand Down Expand Up @@ -97,7 +97,7 @@ impl index::File {
}
index::Version::V1 => {
let start = V1_HEADER_SIZE + index * (N32_SIZE + self.hash_len);
crate::read_u32(&self.data[start..][..N32_SIZE]) as u64
u64::from(crate::read_u32(&self.data[start..][..N32_SIZE]))
}
}
}
Expand Down Expand Up @@ -202,7 +202,7 @@ impl index::File {
let from = pack64_offset + (ofs32 ^ N32_HIGH_BIT) as usize * N64_SIZE;
crate::read_u64(&self.data[from..][..N64_SIZE])
} else {
ofs32 as u64
u64::from(ofs32)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/index/traverse/reduce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ where
self.entries_seen,
elapsed_s,
objects_per_second,
gix_features::progress::bytesize::ByteSize(self.stats.average.object_size * objects_per_second as u64)
gix_features::progress::bytesize::ByteSize(self.stats.average.object_size * u64::from(objects_per_second))
));
Ok(self.stats)
}
Expand Down
6 changes: 4 additions & 2 deletions gix-pack/src/index/traverse/with_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,12 +206,14 @@ fn digest_statistics(traverse::Outcome { roots, children }: traverse::Outcome<En
res.total_compressed_entries_size += item.data.compressed_size;
res.total_decompressed_entries_size += item.data.decompressed_size;
res.total_object_size += item.data.object_size;
*res.objects_per_chain_length.entry(item.data.level as u32).or_insert(0) += 1;
*res.objects_per_chain_length
.entry(u32::from(item.data.level))
.or_insert(0) += 1;

average.decompressed_size += item.data.decompressed_size;
average.compressed_size += item.data.compressed_size as usize;
average.object_size += item.data.object_size;
average.num_deltas += item.data.level as u32;
average.num_deltas += u32::from(item.data.level);
use gix_object::Kind::*;
match item.data.object_kind {
Blob => res.num_blobs += 1,
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/index/write/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ impl crate::index::File {

decompressed_progress.inc_by(decompressed_size as usize);

let entry_len = header_size as u64 + compressed_size;
let entry_len = u64::from(header_size) + compressed_size;
pack_entries_end = pack_offset + entry_len;

let crc32 = crc32.expect("crc32 to be computed by the iterator. Caller assures correct configuration.");
Expand Down
4 changes: 2 additions & 2 deletions gix-pack/src/multi_index/access.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,10 @@ impl File {
let from = offsets_64 + (ofs32 ^ HIGH_BIT) as usize * 8;
crate::read_u64(&self.data[from..][..8])
} else {
ofs32 as u64
u64::from(ofs32)
}
} else {
ofs32 as u64
u64::from(ofs32)
};
(pack_index, pack_offset)
}
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/multi_index/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ pub mod large_offsets {
if entry.pack_offset > LARGE_OFFSET_THRESHOLD {
num_large_offsets += 1;
}
if entry.pack_offset > u32::MAX as crate::data::Offset {
if entry.pack_offset > crate::data::Offset::from(u32::MAX) {
needs_large_offsets = true;
}
}
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/src/multi_index/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl TryFrom<&Path> for File {
(version, object_hash, num_chunks, num_indices)
};

let chunks = gix_chunk::file::Index::from_bytes(&data, Self::HEADER_LEN, num_chunks as u32)?;
let chunks = gix_chunk::file::Index::from_bytes(&data, Self::HEADER_LEN, u32::from(num_chunks))?;

let index_names = chunks.data_by_id(&data, chunk::index_names::ID)?;
let index_names = chunk::index_names::from_bytes(index_names, num_indices)?;
Expand Down
2 changes: 1 addition & 1 deletion gix-pack/tests/pack/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ fn pack_lookup() -> Result<(), Box<dyn std::error::Error>> {
.compressed
.expect("bytes present in default configuration of streaming iter")
.len() as u64,
next_offset - entry.pack_offset - entry.header_size as u64,
next_offset - entry.pack_offset - u64::from(entry.header_size),
"we get the compressed bytes region after the head to the next entry"
);
}
Expand Down
2 changes: 1 addition & 1 deletion gix-status/src/index_as_worktree/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ impl<'index> State<'_, 'index> {
let file_size_bytes = if cfg!(windows) && metadata.is_symlink() {
// symlinks on Windows seem to have a length of zero, so just pretend
// they have the correct length to avoid short-cutting, and enforce a full buffer check.
entry.stat.size as u64
u64::from(entry.stat.size)
} else {
metadata.len()
};
Expand Down
2 changes: 1 addition & 1 deletion gix-status/src/index_as_worktree/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ impl CompareBlobs for FastEq {
// make sure to account for racily smudged entries here so that they don't always keep
// showing up as modified even after their contents have changed again, to a potentially
// unmodified state. That means that we want to ignore stat.size == 0 for non_empty_blobs.
if entry.stat.size as u64 != worktree_file_size && (entry.id.is_empty_blob() || entry.stat.size != 0) {
if u64::from(entry.stat.size) != worktree_file_size && (entry.id.is_empty_blob() || entry.stat.size != 0) {
return Ok(Some(()));
}
HashEq
Expand Down
9 changes: 6 additions & 3 deletions gix-status/tests/status/index_as_worktree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -595,8 +595,11 @@ fn racy_git() {
// This case doesn't happen in the realworld (except for file corruption) but
// makes sure we are actually hitting the right codepath.
index.entries_mut()[0].stat.mtime.secs = timestamp;
set_file_mtime(worktree.join("content"), FileTime::from_unix_time(timestamp as i64, 0))
.expect("changing filetime works");
set_file_mtime(
worktree.join("content"),
FileTime::from_unix_time(i64::from(timestamp), 0),
)
.expect("changing filetime works");
let mut recorder = Recorder::default();

let count = Arc::new(AtomicUsize::new(0));
Expand Down Expand Up @@ -649,7 +652,7 @@ fn racy_git() {
// Now we also backdate the index timestamp to match the artificially created
// mtime above this is now a realistic realworld race-condition which should trigger racy git
// and cause proper output.
index.set_timestamp(FileTime::from_unix_time(timestamp as i64, 0));
index.set_timestamp(FileTime::from_unix_time(i64::from(timestamp), 0));
let mut recorder = Recorder::default();
let out = index_as_worktree(
&index,
Expand Down
4 changes: 2 additions & 2 deletions gix/src/object/tree/diff/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@ impl<'a, 'repo> Platform<'a, 'repo> {
.flatten()
{
files_changed += 1;
lines_added += counts.insertions as u64;
lines_removed += counts.removals as u64;
lines_added += u64::from(counts.insertions);
lines_removed += u64::from(counts.removals);
}

resource_cache.clear_resource_cache_keep_allocation();
Expand Down

0 comments on commit c3a7dcf

Please sign in to comment.