Skip to content

Commit

Permalink
Async reduction evaluator
Browse files Browse the repository at this point in the history
  • Loading branch information
kornelski committed Jan 14, 2019
1 parent 90f8e9d commit 930602f
Show file tree
Hide file tree
Showing 7 changed files with 182 additions and 109 deletions.
99 changes: 99 additions & 0 deletions src/evaluate.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
//! Check if a reduction makes file smaller, and keep best reductions.
//! Works asynchronously when possible
use atomicmin::AtomicMin;
use deflate;
use png::PngData;
use png::PngImage;
use png::STD_COMPRESSION;
use png::STD_FILTERS;
use png::STD_STRATEGY;
use png::STD_WINDOW;
#[cfg(feature = "parallel")]
use rayon::prelude::*;
use std::sync::mpsc::*;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;

/// Collect image versions and pick one that compresses best
pub struct Evaluator {
/// images are sent to the thread for evaluation
eval_send: Option<SyncSender<(Arc<PngImage>, bool, bool)>>,
// the thread helps evaluate images asynchronously
eval_thread: thread::JoinHandle<Option<PngData>>,
}

impl Evaluator {
pub fn new() -> Self {
// queue size ensures we're not using too much memory for pending reductions
let (tx, rx) = sync_channel(4);
Self {
eval_send: Some(tx),
eval_thread: thread::spawn(move || Self::evaluate_images(rx)),
}
}

/// Wait for all evaluations to finish and return smallest reduction
/// Or `None` if all reductions were worse than baseline.
pub fn get_result(mut self) -> Option<PngData> {
let _ = self.eval_send.take(); // disconnect the sender, breaking the loop in the thread
self.eval_thread.join().expect("eval thread")
}

/// Set baseline image. It will be used only to measure minimum compression level required
pub fn set_baseline(&self, image: Arc<PngImage>) {
self.try_image_inner(image, false, true)
}

/// Check if the image is smaller than others
/// If more_filters is false, only filter 0 is going to be used.
/// If more_filters is true, all STD_FILTERS (0 & 5) are going to be used.
/// Filter 5 is needed for alpha reduction, but rarely useful for palette.
pub fn try_image(&self, image: Arc<PngImage>, more_filters: bool) {
self.try_image_inner(image, true, more_filters)
}

fn try_image_inner(&self, image: Arc<PngImage>, is_reduction: bool, more_filters: bool) {
self.eval_send.as_ref().expect("not finished yet").send((image, is_reduction, more_filters)).expect("send")
}

/// Main loop of evaluation thread
fn evaluate_images(from_channel: Receiver<(Arc<PngImage>, bool, bool)>) -> Option<PngData> {
let best_candidate_size = AtomicMin::new(None);
let best_result = Mutex::new(None);
// ends when sender is dropped
for (image, is_reduction, more_filters) in from_channel.iter() {
let filters = if more_filters {&STD_FILTERS[..]} else {&STD_FILTERS[..1]};
#[cfg(feature = "parallel")]
let filters_iter = filters.par_iter().with_max_len(1);
#[cfg(not(feature = "parallel"))]
let filters_iter = filters.iter();

filters_iter.for_each(|f| {
if let Ok(idat_data) = deflate::deflate(
&image.filter_image(*f),
STD_COMPRESSION,
STD_STRATEGY,
STD_WINDOW,
&best_candidate_size,
) {
let mut res = best_result.lock().unwrap();
if best_candidate_size.get().map_or(true, |len| len >= idat_data.len()) {
best_candidate_size.set_min(idat_data.len());
*res = if is_reduction {
Some(PngData {
idat_data,
raw: Arc::clone(&image),
})
} else {
None
};
}
}
});
}
best_result.into_inner().expect("filters should be done")
}
}

107 changes: 55 additions & 52 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,20 @@ use reduction::*;
use atomicmin::AtomicMin;
use crc::crc32;
use deflate::inflate;
use evaluate::Evaluator;
use image::{DynamicImage, GenericImageView, ImageFormat, Pixel};
use png::PngImage;
use png::PngData;
#[cfg(feature = "parallel")]
use rayon::prelude::*;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::borrow::Cow;
use std::fs::{copy, File};
use std::io::{stdin, stdout, BufWriter, Read, Write};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Duration, Instant};
use std::sync::Arc;

pub use colors::AlphaOptim;
pub use deflate::Deflaters;
Expand All @@ -39,6 +40,7 @@ mod atomicmin;
mod colors;
mod deflate;
mod error;
mod evaluate;
mod filters;
mod headers;
mod interlace;
Expand Down Expand Up @@ -522,9 +524,16 @@ fn optimize_png(png: &mut PngData, original_data: &[u8], opts: &Options) -> PngR
}
}

let reduction_occurred = if let Some(reduced) = perform_reductions(&png.raw, opts, &deadline) {
png.raw = reduced;
png.idat_data.clear(); // this field is out of date and needs to be replaced
// This will collect all versions of images and pick one that compresses best
let eval = Evaluator::new();
// Usually we want transformations that are smaller than the unmodified original,
// but if we're interlacing, we have to accept a possible file size increase.
if opts.interlace.is_none() {
eval.set_baseline(png.raw.clone());
}
perform_reductions(png.raw.clone(), opts, &deadline, &eval);
let reduction_occurred = if let Some(result) = eval.get_result() {
*png = result;
true
} else {
false
Expand Down Expand Up @@ -731,66 +740,59 @@ fn optimize_png(png: &mut PngData, original_data: &[u8], opts: &Options) -> PngR
Err(PngError::new("The resulting image is corrupted"))
}

fn if_owned(cow: Cow<PngImage>) -> Option<PngImage> {
match cow {
Cow::Owned(png) => Some(png),
_ => None,
}
}
fn perform_reductions(mut png: Arc<PngImage>, opts: &Options, deadline: &Deadline, eval: &Evaluator) {

fn perform_reductions(png: &PngImage, opts: &Options, deadline: &Deadline) -> Option<PngImage> {
let mut reduced = Cow::Borrowed(png);
// must be done first to evaluate rest with the correct interlacing
if let Some(interlacing) = opts.interlace {
if let Some(reduced) = png.change_interlacing(interlacing) {
png = Arc::new(reduced);
eval.try_image(png.clone(), false);
}
if deadline.passed() {
return;
}
}

if opts.palette_reduction {
if let Some(r) = reduced_palette(&reduced) {
reduced = Cow::Owned(r);
if let Some(reduced) = reduced_palette(&png) {
png = Arc::new(reduced);
eval.try_image(png.clone(), false);
if opts.verbosity == Some(1) {
report_reduction(&reduced);
report_reduction(&png);
}
}
}

if deadline.passed() {
return if_owned(reduced);
if deadline.passed() {
return;
}
}

if opts.bit_depth_reduction {
if let Some(r) = reduce_bit_depth(&reduced) {
reduced = Cow::Owned(r);
if let Some(reduced) = reduce_bit_depth(&png) {
png = Arc::new(reduced);
eval.try_image(png.clone(), false);
if opts.verbosity == Some(1) {
report_reduction(&reduced);
report_reduction(&png);
}
}
}

if deadline.passed() {
return if_owned(reduced);
if deadline.passed() {
return;
}
}

if opts.color_type_reduction {
if let Some(r) = reduce_color_type(&reduced) {
reduced = Cow::Owned(r);
if let Some(reduced) = reduce_color_type(&png) {
png = Arc::new(reduced);
eval.try_image(png.clone(), false);
if opts.verbosity == Some(1) {
report_reduction(&reduced);
report_reduction(&png);
}
}
}

if let Some(interlacing) = opts.interlace {
if let Some(r) = reduced.change_interlacing(interlacing) {
reduced = Cow::Owned(r);
if deadline.passed() {
return;
}
}

if deadline.passed() {
return if_owned(reduced);
}

if let Some(r) = try_alpha_reduction(&reduced, &opts.alphas) {
reduced = Cow::Owned(r);
}

if_owned(reduced)
try_alpha_reductions(png, &opts.alphas, eval);
}

/// Keep track of processing timeout
Expand Down Expand Up @@ -846,29 +848,30 @@ fn report_reduction(png: &PngImage) {

/// Strip headers from the `PngData` object, as requested by the passed `Options`
fn perform_strip(png: &mut PngData, opts: &Options) {
let raw = Arc::make_mut(&mut png.raw);
match opts.strip {
// Strip headers
Headers::None => (),
Headers::Keep(ref hdrs) => {
png.raw.aux_headers.retain(|chunk, _| {
raw.aux_headers.retain(|chunk, _| {
std::str::from_utf8(chunk)
.ok()
.map_or(false, |name| hdrs.contains(name))
});
}
Headers::Strip(ref hdrs) => for hdr in hdrs {
png.raw.aux_headers.remove(hdr.as_bytes());
raw.aux_headers.remove(hdr.as_bytes());
},
Headers::Safe => {
const PRESERVED_HEADERS: [[u8; 4]; 9] = [
*b"cHRM", *b"gAMA", *b"iCCP", *b"sBIT", *b"sRGB", *b"bKGD", *b"hIST", *b"pHYs",
*b"sPLT",
];
png.raw.aux_headers
raw.aux_headers
.retain(|hdr, _| PRESERVED_HEADERS.contains(hdr));
}
Headers::All => {
png.raw.aux_headers = HashMap::new();
raw.aux_headers = HashMap::new();
}
}

Expand All @@ -881,18 +884,18 @@ fn perform_strip(png: &mut PngData, opts: &Options) {
};

if may_replace_iccp {
if png.raw.aux_headers.get(b"sRGB").is_some() {
if raw.aux_headers.get(b"sRGB").is_some() {
// Files aren't supposed to have both chunks, so we chose to honor sRGB
png.raw.aux_headers.remove(b"iCCP");
} else if let Some(intent) = png.raw
raw.aux_headers.remove(b"iCCP");
} else if let Some(intent) = raw
.aux_headers
.get(b"iCCP")
.and_then(|iccp| srgb_rendering_intent(iccp))
{
// sRGB-like profile can be safely replaced with
// an sRGB chunk with the same rendering intent
png.raw.aux_headers.remove(b"iCCP");
png.raw.aux_headers.insert(*b"sRGB", vec![intent]);
raw.aux_headers.remove(b"iCCP");
raw.aux_headers.insert(*b"sRGB", vec![intent]);
}
}
}
Expand Down
26 changes: 14 additions & 12 deletions src/png/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@ use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::iter::Iterator;
use std::path::Path;
use std::sync::Arc;

pub(crate) const STD_COMPRESSION: u8 = 8;
pub(crate) const STD_COMPRESSION: u8 = 6;
/// Must use normal compression, as faster ones (Huffman/RLE-only) are not representative
pub(crate) const STD_STRATEGY: u8 = 0;
pub(crate) const STD_WINDOW: u8 = 15;
/// OK to use a bit smalller window for evaluation
pub(crate) const STD_WINDOW: u8 = 13;
pub(crate) const STD_FILTERS: [u8; 2] = [0, 5];

pub(crate) mod scan_lines;
Expand All @@ -42,7 +44,8 @@ pub struct PngImage {
/// Contains all data relevant to a PNG image
#[derive(Debug, Clone)]
pub struct PngData {
pub raw: PngImage,
/// Uncompressed image data
pub raw: Arc<PngImage>,
/// The filtered and compressed data of the IDAT chunk
pub idat_data: Vec<u8>,
}
Expand Down Expand Up @@ -122,19 +125,19 @@ impl PngData {
aux_headers.remove(b"tRNS"),
)?;

let mut png_data = Self {
idat_data: idat_headers,
raw: PngImage {
ihdr: ihdr_header,
data: raw_data,
let mut raw = PngImage {
ihdr: ihdr_header,
data: raw_data,
palette,
transparency_pixel,
aux_headers,
}
};
png_data.raw.data = png_data.raw.unfilter_image();
raw.data = raw.unfilter_image();
// Return the PngData
Ok(png_data)
Ok(Self {
idat_data: idat_headers,
raw: Arc::new(raw),
})
}

/// Handle transparency header
Expand Down Expand Up @@ -162,7 +165,6 @@ impl PngData {
}
}


/// Format the `PngData` struct into a valid PNG bytestream
pub fn output(&self) -> Vec<u8> {
// PNG header
Expand Down
Loading

0 comments on commit 930602f

Please sign in to comment.