Skip to content

Commit

Permalink
300 fix build warnings (#301)
Browse files Browse the repository at this point in the history
* fix build warnings

* Update mod.rs
  • Loading branch information
archeoss authored Dec 14, 2023
1 parent 41670bf commit 05a2a30
Show file tree
Hide file tree
Showing 10 changed files with 22 additions and 22 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ Pearl changelog


#### Fixed
- Fixed build warnings (#300)
- Fix the build by adding yanked aHash implementation (#302)

#### Updated
Expand Down
1 change: 0 additions & 1 deletion src/blob/index/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::{
prelude::*,
storage::{BlobRecordTimestamp, ReadResult},
};

Expand Down
4 changes: 2 additions & 2 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::prelude::*;

/// The error type for `Storage` operations.
#[derive(Debug, Error)]
#[derive(Debug, ThisError)]
pub struct Error {
kind: Kind,
}
Expand Down Expand Up @@ -221,4 +221,4 @@ impl IntoBincodeIfUnexpectedEofTrait for anyhow::Error {
}
return self;
}
}
}
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ mod prelude {
Arc,
},
};
pub(crate) use thiserror::Error;
pub(crate) use thiserror::Error as ThisError;
pub(crate) use tokio::{
fs::{read_dir, DirEntry},
sync::{RwLock, Semaphore},
Expand Down
16 changes: 8 additions & 8 deletions src/tools/blob_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl BlobReader {
self.position += bincode::serialized_size(&header)?;
header.validate().map_err(|err| {
self.latest_wrong_header = Some(header.clone());
Error::record_header_validation_error(err.to_string())
ToolsError::record_header_validation_error(err.to_string())
})?;
self.latest_wrong_header = None;

Expand All @@ -82,7 +82,7 @@ impl BlobReader {
let record = Record { header, meta, data };
let record = record
.validate()
.map_err(|err| Error::record_validation_error(err.to_string()))?;
.map_err(|err| ToolsError::record_validation_error(err.to_string()))?;
Ok(record)
}

Expand All @@ -91,14 +91,14 @@ impl BlobReader {
let header = self
.latest_wrong_header
.as_ref()
.ok_or_else(|| Error::skip_record_data_error("wrong header not found"))?;
.ok_or_else(|| ToolsError::skip_record_data_error("wrong header not found"))?;
let position = self
.position
.checked_add(header.data_size())
.and_then(|x| x.checked_add(header.meta_size()))
.ok_or_else(|| Error::skip_record_data_error("position overflow"))?;
.ok_or_else(|| ToolsError::skip_record_data_error("position overflow"))?;
if position >= self.len {
return Err(Error::skip_record_data_error("position is bigger than file size").into());
return Err(ToolsError::skip_record_data_error("position is bigger than file size").into());
}
self.file.seek(SeekFrom::Start(position))?;
debug!("Skipped {} bytes", position - self.position);
Expand All @@ -113,9 +113,9 @@ impl BlobReader {
match self.read_single_record() {
Ok(record) => Ok(record),
Err(error) => {
match error.downcast_ref::<Error>() {
Some(Error::RecordValidation(_)) => {}
Some(Error::RecordHeaderValidation(_)) => self.skip_wrong_record_data()?,
match error.downcast_ref::<ToolsError>() {
Some(ToolsError::RecordValidation(_)) => {}
Some(ToolsError::RecordHeaderValidation(_)) => self.skip_wrong_record_data()?,
_ => return Err(error),
}
warn!("Record read error, trying read next record: {}", error);
Expand Down
4 changes: 2 additions & 2 deletions src/tools/blob_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ impl BlobWriter {
let mut reader = BlobReader::from_file(file)?;
let header = reader.read_header()?;
if header != *written_header {
return Err(Error::blob_header_validation_error(
return Err(ToolsError::blob_header_validation_error(
"validation of written blob header failed",
)
.into());
Expand Down Expand Up @@ -97,7 +97,7 @@ impl BlobWriter {
for record in cache.iter() {
let written_record = reader.read_single_record()?;
if record != &written_record {
return Err(Error::record_validation_error(
return Err(ToolsError::record_validation_error(
"Written and cached records is not equal",
)
.into());
Expand Down
4 changes: 2 additions & 2 deletions src/tools/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use thiserror::Error;

/// Error type
#[derive(Debug, Error)]
pub enum Error {
pub enum ToolsError {
/// Failed to validate record header
#[error("record header validation error: {0}")]
RecordHeaderValidation(String),
Expand All @@ -30,7 +30,7 @@ pub enum Error {
UnsupportedKeySize(u16),
}

impl Error {
impl ToolsError {
pub(crate) fn record_header_validation_error(message: impl Into<String>) -> Self {
Self::RecordHeaderValidation(message.into())
}
Expand Down
4 changes: 2 additions & 2 deletions src/tools/migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ impl Record {
match (source, target) {
(source, target) if source >= target => Ok(self),
(0, 1) => self.mirgate_v0_to_v1(),
(source, target) => Err(Error::unsupported_migration(source, target).into()),
(source, target) => Err(ToolsError::unsupported_migration(source, target).into()),
}
}

Expand Down Expand Up @@ -40,7 +40,7 @@ impl BlobHeader {
match (source_version, target_version) {
(source, target) if source >= target => Ok(self),
(0, 1) => self.mirgate_v0_to_v1(),
(source, target) => Err(Error::unsupported_migration(source, target).into()),
(source, target) => Err(ToolsError::unsupported_migration(source, target).into()),
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/tools/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ where
let res = index.get_records_headers(index.blob_size()).await?;
AnyResult::<_>::Ok(res.0)
}
_ => return Err(Error::index_header_validation_error("unsupported header version").into()),
_ => return Err(ToolsError::index_header_validation_error("unsupported header version").into()),
}?;
let headers = headers
.into_iter()
Expand All @@ -163,7 +163,7 @@ pub async fn read_index(path: &Path) -> AnyResult<BTreeMap<Vec<u8>, Vec<record::
32 => index_from_file::<ArrayKey<32>>(&header, path).await,
64 => index_from_file::<ArrayKey<64>>(&header, path).await,
128 => index_from_file::<ArrayKey<128>>(&header, path).await,
size => return Err(Error::unsupported_key_size(size).into()),
size => return Err(ToolsError::unsupported_key_size(size).into()),
}?;
for (_, headers) in headers.iter() {
for header in headers {
Expand Down
4 changes: 2 additions & 2 deletions src/tools/validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ where
header.blob_size()
};
let headers = if header.version() < HEADER_VERSION {
return Err(Error::index_header_validation_error(format!(
return Err(ToolsError::index_header_validation_error(format!(
"Index version is outdated. Passed version: {}, latest version: {}",
header.version(),
HEADER_VERSION
Expand All @@ -43,7 +43,7 @@ where
AnyResult::<_>::Ok(res.0)
})??
} else {
return Err(Error::index_header_validation_error("unknown header version").into());
return Err(ToolsError::index_header_validation_error("unknown header version").into());
};
for (_, headers) in headers {
for header in headers {
Expand Down

0 comments on commit 05a2a30

Please sign in to comment.