fast resize, now higher quality; keep aspect ratio

This commit is contained in:
Vinzenz Schroeter 2025-03-02 01:26:09 +01:00
parent 0521e103ec
commit a1fa13b6e5
6 changed files with 160 additions and 60 deletions

30
Cargo.lock generated
View file

@ -557,6 +557,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b"
[[package]]
name = "document-features"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d"
dependencies = [
"litrs",
]
[[package]]
name = "either"
version = "1.14.0"
@ -607,6 +616,20 @@ dependencies = [
"zune-inflate",
]
[[package]]
name = "fast_image_resize"
version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b55264ccc579fc127eebf6c6c1841d0c160d79a44c8f6f97047b7bc4a9c0d1a5"
dependencies = [
"bytemuck",
"cfg-if",
"document-features",
"image",
"num-traits",
"thiserror 1.0.69",
]
[[package]]
name = "fdeflate"
version = "0.3.7"
@ -1025,6 +1048,12 @@ dependencies = [
"system-deps",
]
[[package]]
name = "litrs"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5"
[[package]]
name = "lock_api"
version = "0.4.12"
@ -1690,6 +1719,7 @@ version = "0.3.0"
dependencies = [
"clap",
"env_logger",
"fast_image_resize",
"image",
"log",
"scap",

View file

@ -19,3 +19,4 @@ env_logger = "0.11"
log = "0.4"
scap = "0.0.8"
image = "0.25.5"
fast_image_resize = { version = "5.1.2", features = ["image"] }

View file

@ -2,29 +2,39 @@ use crate::{
cli::ImageProcessingOptions,
ledwand_dither::{blur, histogram_correction, median_brightness, ostromoukhov_dither, sharpen},
};
use image::{
imageops::{resize, FilterType},
DynamicImage, ImageBuffer, Luma,
};
use fast_image_resize::{ResizeOptions, Resizer};
use image::{DynamicImage, GrayImage};
use log::{debug, trace};
use servicepoint::{Bitmap, Grid, PIXEL_HEIGHT, PIXEL_WIDTH, TILE_HEIGHT, TILE_SIZE};
use std::time::Instant;
use std::{default::Default, time::Instant};
#[derive(Debug)]
pub struct ImageProcessingPipeline {
options: ImageProcessingOptions,
resizer: Resizer,
render_size: (usize, usize),
}
const SPACER_HEIGHT: usize = TILE_SIZE / 2;
const PIXEL_HEIGHT_INCLUDING_SPACERS: usize = SPACER_HEIGHT * (TILE_HEIGHT - 1) + PIXEL_HEIGHT;
impl ImageProcessingPipeline {
pub fn new(options: ImageProcessingOptions) -> Self {
debug!("Creating image pipeline: {:?}", options);
Self { options }
let spacers_height = if options.no_spacers {
0
} else {
SPACER_HEIGHT * (TILE_HEIGHT - 1)
};
Self {
options,
resizer: Resizer::new(),
render_size: (PIXEL_WIDTH, PIXEL_HEIGHT + spacers_height),
}
}
pub fn process(&self, frame: DynamicImage) -> Bitmap {
pub fn process(&mut self, frame: DynamicImage) -> Bitmap {
let start_time = Instant::now();
let frame = self.resize_grayscale(frame);
@ -35,34 +45,31 @@ impl ImageProcessingPipeline {
result = Self::remove_spacers(result);
}
trace!("image processing took {:?}", start_time.elapsed());
trace!("pipeline took {:?}", start_time.elapsed());
result
}
fn resize_grayscale(&self, frame: DynamicImage) -> ImageBuffer<Luma<u8>, Vec<u8>> {
// TODO: keep aspect ratio
// TODO: make it work for non-maximum sizes
fn resize_grayscale(&mut self, frame: DynamicImage) -> GrayImage {
let start_time = Instant::now();
let frame = frame.grayscale().to_luma8();
let (scaled_width, scaled_height) = self.fit_size((frame.width(), frame.height()));
let mut dst_image = DynamicImage::new(scaled_width, scaled_height, frame.color());
let target_height = if self.options.no_spacers {
PIXEL_HEIGHT
} else {
PIXEL_HEIGHT_INCLUDING_SPACERS
};
self.resizer
.resize(&frame, &mut dst_image, &ResizeOptions::default())
.expect("image resize failed");
resize(
&frame,
PIXEL_WIDTH as u32,
target_height as u32,
FilterType::Nearest,
)
trace!("resizing took {:?}", start_time.elapsed());
let start_time = Instant::now();
let result = dst_image.into_luma8();
trace!("grayscale took {:?}", start_time.elapsed());
result
}
fn grayscale_processing(
&self,
mut frame: ImageBuffer<Luma<u8>, Vec<u8>>,
) -> ImageBuffer<Luma<u8>, Vec<u8>> {
fn grayscale_processing(&self, mut frame: GrayImage) -> GrayImage {
let start_time = Instant::now();
if !self.options.no_hist {
histogram_correction(&mut frame);
}
@ -78,35 +85,78 @@ impl ImageProcessingPipeline {
sharpen(&orig, &mut frame);
std::mem::swap(&mut frame, &mut orig);
}
trace!("image processing took {:?}", start_time.elapsed());
orig
}
fn grayscale_to_bitmap(&self, orig: ImageBuffer<Luma<u8>, Vec<u8>>) -> Bitmap {
if self.options.no_dither {
fn grayscale_to_bitmap(&self, orig: GrayImage) -> Bitmap {
let start_time = Instant::now();
let result = if self.options.no_dither {
let cutoff = median_brightness(&orig);
let bits = orig.iter().map(move |x| x > &cutoff).collect();
Bitmap::from_bitvec(orig.width() as usize, bits)
} else {
ostromoukhov_dither(orig, u8::MAX / 2)
}
};
trace!("bitmap conversion took {:?}", start_time.elapsed());
result
}
fn remove_spacers(bitmap: Bitmap) -> Bitmap {
let mut result = Bitmap::max_sized();
fn remove_spacers(source: Bitmap) -> Bitmap {
let start_time = Instant::now();
let full_tile_rows_with_spacers = source.height() / (TILE_SIZE + SPACER_HEIGHT);
let remaining_pixel_rows = source.height() % (TILE_SIZE + SPACER_HEIGHT);
let total_spacer_height = full_tile_rows_with_spacers * SPACER_HEIGHT
+ remaining_pixel_rows.saturating_sub(TILE_SIZE);
let height_without_spacers = source.height() - total_spacer_height;
trace!(
"spacers take up {total_spacer_height}, resulting in height {height_without_spacers}"
);
let mut result = Bitmap::new(source.width(), height_without_spacers);
let mut source_y = 0;
for result_y in 0..result.height() {
if result_y != 0 && result_y % TILE_SIZE == 0 {
source_y += 4;
}
for x in 0..result.width() {
result.set(x, result_y, bitmap.get(x, source_y));
result.set(x, result_y, source.get(x, source_y));
}
if result_y != 0 && result_y % TILE_SIZE == 0 {
source_y += SPACER_HEIGHT;
}
source_y += 1;
}
trace!("removing spacers took {:?}", start_time.elapsed());
result
}
fn fit_size(&self, source: (u32, u32)) -> (u32, u32) {
let (source_width, source_height) = source;
let (target_width, target_height) = self.render_size;
debug_assert_eq!(target_width % TILE_SIZE, 0);
let width_scale = target_width as f32 / source_width as f32;
let height_scale = target_height as f32 / source_height as f32;
let scale = f32::min(width_scale, height_scale);
let height = (source_height as f32 * scale) as u32;
let mut width = (source_width as f32 * scale) as u32;
if width % TILE_SIZE as u32 != 0 {
// because we do not have many pixels, round up even if it is a worse fit
width += 8 - width % 8;
}
let result = (width, height);
trace!(
"scaling {:?} to {:?} to fit {:?}",
source,
result,
self.render_size
);
result
}
}

View file

@ -174,11 +174,11 @@ pub(crate) fn ostromoukhov_dither(source: GrayImage, bias: u8) -> Bitmap {
for y in 0..height as usize {
let start = y * width as usize;
if y % 2 == 0 {
for x in 0..width as usize {
for x in start..start + width as usize {
ostromoukhov_dither_pixel(
&mut source,
&mut destination,
start + x,
x,
width as usize,
y == (height - 1) as usize,
1,
@ -186,11 +186,11 @@ pub(crate) fn ostromoukhov_dither(source: GrayImage, bias: u8) -> Bitmap {
);
}
} else {
for x in (0..width as usize).rev() {
for x in (start..start + width as usize).rev() {
ostromoukhov_dither_pixel(
&mut source,
&mut destination,
start + x,
x,
width as usize,
y == (height - 1) as usize,
-1,
@ -213,17 +213,9 @@ fn ostromoukhov_dither_pixel(
direction: isize,
bias: u8,
) {
let old_pixel = source[position];
let destination_value = old_pixel > bias;
let (destination_value, error) = gray_to_bit(source[position], bias);
destination.set(position, destination_value);
let error = if destination_value {
255 - old_pixel
} else {
old_pixel
};
let mut diffuse = |to: usize, mat: i16| {
let diffuse_value = source[to] as i16 + mat;
source[to] = diffuse_value.clamp(u8::MIN.into(), u8::MAX.into()) as u8;
@ -245,6 +237,16 @@ fn ostromoukhov_dither_pixel(
}
}
fn gray_to_bit(old_pixel: u8, bias: u8) -> (bool, u8) {
let destination_value = old_pixel > bias;
let error = if destination_value {
255 - old_pixel
} else {
old_pixel
};
(destination_value, error)
}
const ERROR_DIFFUSION_MATRIX: [[i16; 3]; 256] = [
[0, 1, 0],
[1, 0, 0],

View file

@ -44,7 +44,7 @@ fn pixels_image(
processing_options: ImageProcessingOptions,
) {
let image = image::open(&options.file_name).expect("failed to open image file");
let pipeline = ImageProcessingPipeline::new(processing_options);
let mut pipeline = ImageProcessingPipeline::new(processing_options);
let bitmap = pipeline.process(image);
connection
.send(Command::BitmapLinearWin(

View file

@ -3,14 +3,14 @@ use crate::{
image_processing::ImageProcessingPipeline,
};
use image::{DynamicImage, ImageBuffer, Rgb, Rgba};
use log::{error, info, warn};
use log::{debug, error, info, trace, warn};
use scap::{
capturer::{Capturer, Options},
frame::convert_bgra_to_rgb,
frame::Frame,
};
use servicepoint::{Command, CompressionCode, Connection, Origin, FRAME_PACING};
use std::time::Duration;
use std::time::{Duration, Instant};
pub fn stream_window(
connection: &Connection,
@ -23,20 +23,27 @@ pub fn stream_window(
None => return,
};
let pipeline = ImageProcessingPipeline::new(processing_options);
let mut pipeline = ImageProcessingPipeline::new(processing_options);
info!("now starting to stream images");
loop {
let frame = capturer.get_next_frame().expect("failed to capture frame");
let start = Instant::now();
let frame = capture_frame(&capturer);
let frame = frame_to_image(frame);
let bitmap = pipeline.process(frame);
trace!("bitmap ready to send in: {:?}", start.elapsed());
connection
.send(Command::BitmapLinearWin(
Origin::ZERO,
bitmap.clone(),
CompressionCode::Uncompressed,
CompressionCode::default(),
))
.expect("failed to send frame to display");
debug!("frame time: {:?}", start.elapsed());
}
}
@ -66,8 +73,16 @@ fn start_capture(options: &StreamScreenOptions) -> Option<Capturer> {
Some(capturer)
}
fn capture_frame(capturer: &Capturer) -> Frame {
let start_time = Instant::now();
let result = capturer.get_next_frame().expect("failed to capture frame");
trace!("capture took: {:?}", start_time.elapsed());
result
}
fn frame_to_image(frame: Frame) -> DynamicImage {
match frame {
let start_time = Instant::now();
let result = match frame {
Frame::BGRx(frame) => bgrx_to_rgb(frame.width, frame.height, frame.data),
Frame::RGBx(frame) => DynamicImage::from(
ImageBuffer::<Rgba<_>, _>::from_raw(
@ -84,7 +99,9 @@ fn frame_to_image(frame: Frame) -> DynamicImage {
),
Frame::BGRA(frame) => bgrx_to_rgb(frame.width, frame.height, frame.data),
Frame::YUVFrame(_) | Frame::XBGR(_) => panic!("unsupported frame format"),
}
};
trace!("conversion to image took: {:?}", start_time.elapsed());
result
}
fn bgrx_to_rgb(width: i32, height: i32, data: Vec<u8>) -> DynamicImage {