Skip to content

Commit

Permalink
Move clippy config to cargo.toml (#93)
Browse files Browse the repository at this point in the history
  • Loading branch information
benbrandt authored Feb 3, 2024
1 parent 1b529aa commit 1b38bc8
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 32 deletions.
14 changes: 14 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,20 @@ harness = false
tokenizers = ["dep:tokenizers"]
tiktoken-rs = ["dep:tiktoken-rs"]

[lints.rust]
future_incompatible = "warn"
missing_debug_implementations = "warn"
missing_docs = "warn"
nonstandard_style = "warn"
rust_2018_compatibility = "warn"
rust_2018_idioms = "warn"
rust_2021_compatibility = "warn"
unused = "warn"

[lints.clippy]
cargo = "warn"
pedantic = "warn"

# Tokenizers and indirect deps can cause slow runtime
[profile.dev.package."*"]
opt-level = 1
Expand Down
2 changes: 2 additions & 0 deletions benches/chunk_size.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(missing_docs)]

use std::fs;

use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
Expand Down
12 changes: 0 additions & 12 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,4 @@
#![doc = include_str!("../README.md")]
#![warn(
clippy::cargo,
clippy::pedantic,
future_incompatible,
missing_debug_implementations,
missing_docs,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused
)]
#![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg))]

use core::{
Expand Down
12 changes: 0 additions & 12 deletions tests/text_splitter.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,3 @@
#![warn(
clippy::pedantic,
future_incompatible,
missing_debug_implementations,
missing_docs,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused
)]

use text_splitter::TextSplitter;

#[test]
Expand Down
16 changes: 8 additions & 8 deletions tests/text_splitter_snapshots.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ fn characters_default() {
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

assert_eq!(chunks.join(""), text);
for chunk in chunks.iter() {
for chunk in &chunks {
assert!(Characters.chunk_size(chunk, &chunk_size).fits().is_le());
}
insta::assert_yaml_snapshot!(chunks);
Expand All @@ -78,7 +78,7 @@ fn characters_trim() {
let splitter = TextSplitter::default().with_trim_chunks(true);
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

for chunk in chunks.iter() {
for chunk in &chunks {
assert!(Characters.chunk_size(chunk, &chunk_size).fits().is_le());
}
insta::assert_yaml_snapshot!(chunks);
Expand All @@ -96,7 +96,7 @@ fn characters_range() {
let chunks = splitter.chunks(&text, range.clone()).collect::<Vec<_>>();

assert_eq!(chunks.join(""), text);
for chunk in chunks.iter() {
for chunk in &chunks {
assert!(Characters.chunk_size(chunk, &range).fits().is_le());
}
insta::assert_yaml_snapshot!(chunks);
Expand All @@ -113,7 +113,7 @@ fn characters_range_trim() {
let splitter = TextSplitter::default().with_trim_chunks(true);
let chunks = splitter.chunks(&text, range.clone()).collect::<Vec<_>>();

for chunk in chunks.iter() {
for chunk in &chunks {
assert!(Characters.chunk_size(chunk, &range).fits().is_le());
}
insta::assert_yaml_snapshot!(chunks);
Expand All @@ -136,7 +136,7 @@ fn huggingface_default() {
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

assert_eq!(chunks.join(""), text);
for chunk in chunks.iter() {
for chunk in &chunks {
assert!(HUGGINGFACE_TOKENIZER
.chunk_size(chunk, &chunk_size)
.fits()
Expand All @@ -157,7 +157,7 @@ fn huggingface_trim() {
let splitter = TextSplitter::new(&*HUGGINGFACE_TOKENIZER).with_trim_chunks(true);
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

for chunk in chunks.iter() {
for chunk in &chunks {
assert!(HUGGINGFACE_TOKENIZER
.chunk_size(chunk, &chunk_size)
.fits()
Expand All @@ -182,7 +182,7 @@ fn tiktoken_default() {
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

assert_eq!(chunks.join(""), text);
for chunk in chunks.iter() {
for chunk in &chunks {
assert!(TIKTOKEN_TOKENIZER
.chunk_size(chunk, &chunk_size)
.fits()
Expand All @@ -203,7 +203,7 @@ fn tiktoken_trim() {
let splitter = TextSplitter::new(&*TIKTOKEN_TOKENIZER).with_trim_chunks(true);
let chunks = splitter.chunks(&text, chunk_size).collect::<Vec<_>>();

for chunk in chunks.iter() {
for chunk in &chunks {
assert!(TIKTOKEN_TOKENIZER
.chunk_size(chunk, &chunk_size)
.fits()
Expand Down

0 comments on commit 1b38bc8

Please sign in to comment.