Skip to content

Commit

Permalink
Fudge factor for reductions to prefer better reductions even if gzip …
Browse files Browse the repository at this point in the history
…estimation says otherwise
  • Loading branch information
kornelski committed Jan 23, 2019
1 parent 1f9ed83 commit 4d1c1f0
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 18 deletions.
28 changes: 16 additions & 12 deletions src/evaluate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use std::thread;
/// Collect image versions and pick one that compresses best
pub struct Evaluator {
/// images are sent to the thread for evaluation
eval_send: Option<SyncSender<(Arc<PngImage>, bool)>>,
eval_send: Option<SyncSender<(Arc<PngImage>, f32, bool)>>,
// the thread helps evaluate images asynchronously
eval_thread: thread::JoinHandle<Option<PngData>>,
}
Expand All @@ -43,24 +43,26 @@ impl Evaluator {

/// Set baseline image. It will be used only to measure minimum compression level required
pub fn set_baseline(&self, image: Arc<PngImage>) {
self.try_image_inner(image, false)
self.try_image_inner(image, 1.0, false)
}

/// Check if the image is smaller than others
pub fn try_image(&self, image: Arc<PngImage>) {
self.try_image_inner(image, true)
/// Bias is a value in 0..=1 range. Compressed size is multiplied by
/// this fraction when comparing to the best, so 0.95 allows 5% larger size.
pub fn try_image(&self, image: Arc<PngImage>, bias: f32) {
self.try_image_inner(image, bias, true)
}

fn try_image_inner(&self, image: Arc<PngImage>, is_reduction: bool) {
self.eval_send.as_ref().expect("not finished yet").send((image, is_reduction)).expect("send")
fn try_image_inner(&self, image: Arc<PngImage>, bias: f32, is_reduction: bool) {
self.eval_send.as_ref().expect("not finished yet").send((image, bias, is_reduction)).expect("send")
}

/// Main loop of evaluation thread
fn evaluate_images(from_channel: Receiver<(Arc<PngImage>, bool)>) -> Option<PngData> {
fn evaluate_images(from_channel: Receiver<(Arc<PngImage>, f32, bool)>) -> Option<PngData> {
let best_candidate_size = AtomicMin::new(None);
let best_result: Mutex<Option<(PngData, _, _)>> = Mutex::new(None);
// ends when sender is dropped
for (nth, (image, is_reduction)) in from_channel.iter().enumerate() {
for (nth, (image, bias, is_reduction)) in from_channel.iter().enumerate() {
#[cfg(feature = "parallel")]
let filters_iter = STD_FILTERS.par_iter().with_max_len(1);
#[cfg(not(feature = "parallel"))]
Expand All @@ -74,16 +76,18 @@ impl Evaluator {
STD_WINDOW,
&best_candidate_size,
) {
println!("RES {:?}/{:?}/{} = {}B vs {:?}", image.ihdr.bit_depth, image.ihdr.color_type, f, idat_data.len(), best_candidate_size.get());
let mut res = best_result.lock().unwrap();
if best_candidate_size.get().map_or(true, |best_len| {
if best_candidate_size.get().map_or(true, |old_best_len| {
let new_len = (idat_data.len() as f64 * bias as f64) as usize;
// a tie-breaker is required to make evaluation deterministic
if let Some(res) = res.as_ref() {
// choose smallest compressed, or if compresses the same, smallest uncompressed, or cheaper filter
let old_img = &res.0.raw;
let new = (idat_data.len(), image.data.len(), image.ihdr.bit_depth, f, nth);
let old = (best_len, old_img.data.len(), old_img.ihdr.bit_depth, res.1, res.2);
let new = (new_len, image.data.len(), image.ihdr.bit_depth, f, nth);
let old = (old_best_len, old_img.data.len(), old_img.ihdr.bit_depth, res.1, res.2);
new < old
} else if best_len > idat_data.len() {
} else if new_len < old_best_len {
true
} else {
false
Expand Down
10 changes: 5 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -747,7 +747,7 @@ fn perform_reductions(mut png: Arc<PngImage>, opts: &Options, deadline: &Deadlin
if let Some(interlacing) = opts.interlace {
if let Some(reduced) = png.change_interlacing(interlacing) {
png = Arc::new(reduced);
eval.try_image(png.clone());
eval.try_image(png.clone(), 0.);
}
if deadline.passed() {
return;
Expand All @@ -757,7 +757,7 @@ fn perform_reductions(mut png: Arc<PngImage>, opts: &Options, deadline: &Deadlin
if opts.palette_reduction {
if let Some(reduced) = reduced_palette(&png) {
png = Arc::new(reduced);
eval.try_image(png.clone());
eval.try_image(png.clone(), 0.85);
if opts.verbosity == Some(1) {
report_reduction(&png);
}
Expand All @@ -772,11 +772,11 @@ fn perform_reductions(mut png: Arc<PngImage>, opts: &Options, deadline: &Deadlin
let previous = png.clone();
let bits = reduced.ihdr.bit_depth;
png = Arc::new(reduced);
eval.try_image(png.clone());
eval.try_image(png.clone(), 1.0);
if (bits == BitDepth::One || bits == BitDepth::Two) && previous.ihdr.bit_depth != BitDepth::Four {
// Also try 16-color mode for all lower bits images, since that may compress better
if let Some(reduced) = reduce_bit_depth(&previous, 4) {
eval.try_image(Arc::new(reduced));
eval.try_image(Arc::new(reduced), 0.95);
}
}
if opts.verbosity == Some(1) {
Expand All @@ -791,7 +791,7 @@ fn perform_reductions(mut png: Arc<PngImage>, opts: &Options, deadline: &Deadlin
if opts.color_type_reduction {
if let Some(reduced) = reduce_color_type(&png) {
png = Arc::new(reduced);
eval.try_image(png.clone());
eval.try_image(png.clone(), 0.9);
if opts.verbosity == Some(1) {
report_reduction(&png);
}
Expand Down
2 changes: 1 addition & 1 deletion src/reduction/alpha.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub fn try_alpha_reductions(png: Arc<PngImage>, alphas: &HashSet<AlphaOptim>, ev
let alphas_iter = alphas.iter();
alphas_iter
.filter_map(|&alpha| filtered_alpha_channel(&png, *alpha))
.for_each(|image| eval.try_image(Arc::new(image)));
.for_each(|image| eval.try_image(Arc::new(image), 0.99));
}

pub fn filtered_alpha_channel(png: &PngImage, optim: AlphaOptim) -> Option<PngImage> {
Expand Down

0 comments on commit 4d1c1f0

Please sign in to comment.