Refactor, working pan, cleanup

This commit is contained in:
Dawid Pietrykowski 2025-04-07 22:06:01 +02:00
parent 99f1c8098b
commit 5c321ea9e6
4 changed files with 199 additions and 993 deletions

View File

@ -1,5 +1,5 @@
use crate::egui_tools::EguiRenderer;
use egui::{Event, Key};
use egui::{Event, Key, PointerButton};
use egui_wgpu::wgpu::SurfaceError;
use egui_wgpu::{ScreenDescriptor, wgpu};
use imflow::store::ImageStore;
@ -58,7 +58,6 @@ fn setup_texture(
wgpu::RenderPipeline,
wgpu::Buffer,
) {
// Create your texture (one-time setup)
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("Image texture"),
size: wgpu::Extent3d {
@ -74,7 +73,6 @@ fn setup_texture(
view_formats: &[],
});
// Create texture view and sampler
let texture_view = texture.create_view(&wgpu::TextureViewDescriptor::default());
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
@ -86,7 +84,6 @@ fn setup_texture(
..Default::default()
});
// Create bind group layout for the texture
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("Texture Bind Group Layout"),
entries: &[
@ -145,7 +142,7 @@ fn setup_texture(
},
],
});
// Define vertex buffer layout
let vertex_buffer_layout = wgpu::VertexBufferLayout {
array_stride: 5 * std::mem::size_of::<f32>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
@ -165,13 +162,11 @@ fn setup_texture(
],
};
// Create shader modules
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("Texture Shader"),
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(include_str!("shader.wgsl"))),
});
// Create the render pipeline (simplified, you'd need to define vertex buffers, etc.)
let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Texture Render Pipeline"),
layout: Some(
@ -418,7 +413,7 @@ impl App {
self.pan_zoom(0.0, 0.0, 0.0);
}
pub fn pan_zoom(&mut self, zoom_delta: f32, pan_x: f32, pan_y: f32) {
fn update_transform(&mut self) {
let state = self.state.as_mut().unwrap();
let image_aspect_ratio =
@ -432,10 +427,6 @@ impl App {
} else {
scale_y = window_aspect_ratio / image_aspect_ratio;
}
state.transform_data.zoom = (state.transform_data.zoom + zoom_delta).clamp(1.0, 20.0);
state.transform_data.pan_x += pan_x;
state.transform_data.pan_y += pan_y;
let transform = create_transform_matrix(&state.transform_data, scale_x, scale_y);
state.queue.write_buffer(
&state.transform_buffer,
@ -450,6 +441,25 @@ impl App {
);
}
pub fn reset_transform(&mut self) {
let state = self.state.as_mut().unwrap();
state.transform_data.zoom = 1.0;
state.transform_data.pan_x = 0.0;
state.transform_data.pan_y = 0.0;
self.update_transform();
}
pub fn pan_zoom(&mut self, zoom_delta: f32, pan_x: f32, pan_y: f32) {
let state = self.state.as_mut().unwrap();
state.transform_data.zoom = (state.transform_data.zoom + zoom_delta).clamp(1.0, 20.0);
state.transform_data.pan_x += pan_x;
state.transform_data.pan_y += pan_y;
self.update_transform();
}
fn handle_redraw(&mut self) {
// Attempt to handle minimizing window
if let Some(window) = self.window.as_ref() {
@ -496,7 +506,7 @@ impl App {
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
// Add this render pass to clear the screen with green
// Clear buffer with black
{
let _ = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: None,
@ -506,7 +516,7 @@ impl App {
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.0,
g: 0.0, // Green
g: 0.0,
b: 0.0,
a: 1.0,
}),
@ -520,7 +530,6 @@ impl App {
}
{
// Define vertices for your quad
#[repr(C)]
#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
struct Vertex {
@ -528,7 +537,7 @@ impl App {
tex_coords: [f32; 2],
}
// Define a quad (two triangles)
// Quad (two triangles)
let vertices = [
// Position (x, y, z), Texture coords (u, v)
Vertex {
@ -549,10 +558,8 @@ impl App {
}, // top right
];
// Create indices for drawing two triangles
let indices: [u16; 6] = [0, 1, 2, 2, 1, 3];
// Create vertex buffer
let vertex_buffer =
state
.device
@ -562,7 +569,6 @@ impl App {
usage: wgpu::BufferUsages::VERTEX,
});
// Create index buffer
let index_buffer = state
.device
.create_buffer_init(&wgpu::util::BufferInitDescriptor {
@ -577,7 +583,6 @@ impl App {
view: &surface_view,
resolve_target: None,
ops: wgpu::Operations {
// Use Load instead of Clear so we don't erase the green background
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
@ -593,14 +598,14 @@ impl App {
// Bind the vertex buffer
render_pass.set_vertex_buffer(0, vertex_buffer.slice(..));
// Draw using the index buffer (more efficient)
// Draw using the index buffer
render_pass.set_index_buffer(index_buffer.slice(..), wgpu::IndexFormat::Uint16);
render_pass.draw_indexed(0..6, 0, 0..1);
}
let rating = state.store.get_current_rating();
let path = state.store.current_image_path.clone();
let filename = path.file_name().unwrap();
let filename = path.path.file_name().unwrap();
let window = self.window.as_ref().unwrap();
{
state.egui_renderer.begin_frame(window);
@ -621,7 +626,6 @@ impl App {
.size(10.0)
.strong(),
);
// ui.add_space(10.0);
});
});
@ -663,19 +667,15 @@ impl ApplicationHandler for App {
event_loop.exit();
}
WindowEvent::RedrawRequested => {
// let start = time::Instant::now();
self.handle_redraw();
// println!("Updated in: {}ms", start.elapsed().as_millis());
// Extract the events by cloning them from the input context
let (events, keys_down) = self
let (events, keys_down, pointer) = self
.state
.as_ref()
.unwrap()
.egui_renderer
.context()
.input(|i| (i.events.clone(), i.keys_down.clone()));
.input(|i| (i.events.clone(), i.keys_down.clone(), i.pointer.clone()));
// Now use the extracted events outside the closure
events.iter().for_each(|e| {
if let Event::Key { key, pressed, .. } = e {
if !*pressed {
@ -712,13 +712,20 @@ impl ApplicationHandler for App {
}
} else if let Event::MouseWheel { delta, .. } = e {
self.pan_zoom(delta.y * 0.2, 0.0, 0.0);
} else if let Event::PointerMoved(pos) = e {
if keys_down.contains(&Key::Tab) {
self.pan_zoom(0.0, pos.x * 0.00001, pos.y * 0.00001);
} else if let Event::PointerButton {
button, pressed, ..
} = e
{
if *pressed && *button == PointerButton::Secondary {
self.reset_transform();
}
}
});
if pointer.primary_down() && pointer.is_moving() {
self.pan_zoom(0.0, pointer.delta().x * 0.001, pointer.delta().y * -0.001);
}
self.window.as_ref().unwrap().request_redraw();
}
WindowEvent::Resized(new_size) => {

View File

@ -1,31 +1,39 @@
use image::DynamicImage;
use image::ImageDecoder;
use image::RgbaImage;
use image::imageops::FilterType;
use image::metadata::Orientation;
use itertools::Itertools;
use jpegxl_rs::Endianness;
use jpegxl_rs::decode::Data;
use jpegxl_rs::decode::PixelFormat;
use jpegxl_rs::decode::Pixels;
use jpegxl_rs::decoder_builder;
use libheif_rs::{HeifContext, LibHeif, RgbChroma};
use rexiv2::Metadata;
use zune_image::codecs::jpeg::JpegDecoder;
use zune_image::codecs::jpeg_xl::JxlDecoder;
use zune_image::codecs::qoi::zune_core::colorspace::ColorSpace;
use zune_image::codecs::qoi::zune_core::options::DecoderOptions;
use zune_image::traits::DecoderTrait;
use std::fs;
use std::fs::File;
use std::fs::read;
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::mem;
use std::path::PathBuf;
use std::time::Instant;
#[derive(Clone, Eq, Hash, PartialEq, PartialOrd)]
pub enum ImageFormat {
Jpg,
Jxl,
Heif,
}
#[derive(Clone, Eq, Hash, PartialEq, PartialOrd)]
pub struct ImageData {
pub path: PathBuf,
pub format: ImageFormat,
}
pub struct ImflowImageBuffer {
pub width: usize,
pub height: usize,
@ -33,18 +41,8 @@ pub struct ImflowImageBuffer {
pub rating: i32,
}
// pub fn create_iced_handle(width: u32, height: u32, rgba: Vec<u8>) -> Handle {
// Handle::from_rgba(width, height, rgba)
// }
pub fn get_rating(filename: &PathBuf) -> i32 {
// // Use xmp-toolkit for video files
// if is_video(&filename) {
// return Ok(read_rating_xmp(filename.clone()).unwrap_or(0));
// }
// Use rexiv2 for image files
let meta = Metadata::new_from_path(filename);
pub fn get_rating(image: &ImageData) -> i32 {
let meta = Metadata::new_from_path(&image.path);
match meta {
Ok(meta) => {
let rating = meta.get_tag_numeric("Xmp.xmp.Rating");
@ -54,14 +52,8 @@ pub fn get_rating(filename: &PathBuf) -> i32 {
}
}
pub fn get_orientation(filename: &PathBuf) -> u8 {
// // Use xmp-toolkit for video files
// if is_video(&filename) {
// return Ok(read_rating_xmp(filename.clone()).unwrap_or(0));
// }
// Use rexiv2 for image files
let meta = Metadata::new_from_path(filename);
pub fn get_orientation(image: &ImageData) -> u8 {
let meta = Metadata::new_from_path(&image.path);
match meta {
Ok(meta) => meta.get_orientation() as u8,
Err(e) => panic!("{:?}", e),
@ -82,98 +74,116 @@ fn swap_wh<T>(width: T, height: T, orientation: Orientation) -> (T, T) {
(width, height)
}
pub fn load_image(path: &PathBuf) -> ImflowImageBuffer {
fn get_format(path: &PathBuf) -> Option<ImageFormat> {
if !path.is_file() {
return None;
}
let os_str = path.extension().unwrap().to_ascii_lowercase();
let extension = &os_str.to_str().unwrap();
if ["heic", "heif"].contains(extension) {
Some(ImageFormat::Heif)
} else if ["jpg", "jpeg"].contains(extension) {
Some(ImageFormat::Jpg)
} else if ["jxl"].contains(extension) {
Some(ImageFormat::Jxl)
} else {
None
}
}
pub fn load_image(image: &ImageData) -> ImflowImageBuffer {
let total_start = Instant::now();
if is_heif(path) {
let img = load_heif(path, false);
let total_time = total_start.elapsed();
println!("Total HEIF loading time: {:?}", total_time);
return img;
}
let width: usize;
let height: usize;
let rating = get_rating(path);
if is_jxl(path) {
let file = read(path).unwrap();
use jpegxl_rs::ThreadsRunner;
let runner = ThreadsRunner::default();
let decoder = decoder_builder()
.parallel_runner(&runner)
.pixel_format(PixelFormat {
num_channels: 4,
endianness: Endianness::Big,
align: 8,
})
.build()
.unwrap();
let (metadata, buffer) = decoder.decode_with::<u8>(&file).unwrap();
width = metadata.width as usize;
height = metadata.height as usize;
let rgba_buffer = unsafe {
Vec::from_raw_parts(
buffer.as_ptr() as *mut u32,
buffer.len() / 4,
buffer.len() / 4,
)
};
std::mem::forget(buffer);
println!("Total loading time: {:?}", total_start.elapsed());
let rating = get_rating(path);
ImflowImageBuffer {
width,
height,
rgba_buffer,
rating,
match image.format {
ImageFormat::Heif => {
let img = load_heif(image, false);
let total_time = total_start.elapsed();
println!("Total HEIF loading time: {:?}", total_time);
img
}
} else {
let mut buffer: Vec<u8>;
let options = DecoderOptions::new_fast().jpeg_set_out_colorspace(ColorSpace::RGBA);
let file = read(path.clone()).unwrap();
let mut decoder = JpegDecoder::new(&file);
decoder.set_options(options);
ImageFormat::Jxl => {
let rating = get_rating(image);
decoder.decode_headers().unwrap();
let info = decoder.info().unwrap();
width = info.width as usize;
height = info.height as usize;
buffer = vec![0; width * height * 4];
decoder.decode_into(buffer.as_mut_slice()).unwrap();
let file = read(image.path.clone()).unwrap();
use jpegxl_rs::ThreadsRunner;
let runner = ThreadsRunner::default();
let decoder = decoder_builder()
.parallel_runner(&runner)
.pixel_format(PixelFormat {
num_channels: 4,
endianness: Endianness::Big,
align: 8,
})
.build()
.unwrap();
let orientation_start = Instant::now();
// TODO: Optimize rotation
let orientation =
Orientation::from_exif(get_orientation(path)).unwrap_or(Orientation::NoTransforms);
let image = RgbaImage::from_raw(width as u32, height as u32, buffer).unwrap();
let mut dynamic_image = DynamicImage::from(image);
dynamic_image.apply_orientation(orientation);
let buffer = dynamic_image.as_rgba8().unwrap();
let (width, height) = swap_wh(width, height, orientation);
let orientation_time = orientation_start.elapsed();
let (metadata, buffer) = decoder.decode_with::<u8>(&file).unwrap();
let width = metadata.width as usize;
let height = metadata.height as usize;
// Reinterpret to avoid copying
let rgba_buffer = unsafe {
Vec::from_raw_parts(
buffer.as_ptr() as *mut u32,
buffer.len() / 4,
buffer.len() / 4,
)
};
std::mem::forget(dynamic_image);
let total_time = total_start.elapsed();
println!("Orientation time: {:?}", orientation_time);
println!("Total loading time: {:?}", total_time);
ImflowImageBuffer {
width,
height,
rgba_buffer,
rating,
let rgba_buffer = unsafe {
Vec::from_raw_parts(
buffer.as_ptr() as *mut u32,
buffer.len() / 4,
buffer.len() / 4,
)
};
std::mem::forget(buffer);
println!("Total JXL loading time: {:?}", total_start.elapsed());
ImflowImageBuffer {
width,
height,
rgba_buffer,
rating,
}
}
ImageFormat::Jpg => {
let rating = get_rating(image);
let mut buffer: Vec<u8>;
let options = DecoderOptions::new_fast().jpeg_set_out_colorspace(ColorSpace::RGBA);
let file = read(image.path.clone()).unwrap();
let mut decoder = JpegDecoder::new(&file);
decoder.set_options(options);
decoder.decode_headers().unwrap();
let info = decoder.info().unwrap();
let width = info.width as usize;
let height = info.height as usize;
buffer = vec![0; width * height * 4];
decoder.decode_into(buffer.as_mut_slice()).unwrap();
let orientation_start = Instant::now();
// TODO: Optimize rotation
let orientation =
Orientation::from_exif(get_orientation(image)).unwrap_or(Orientation::NoTransforms);
let image = RgbaImage::from_raw(width as u32, height as u32, buffer).unwrap();
let mut dynamic_image = DynamicImage::from(image);
dynamic_image.apply_orientation(orientation);
let buffer = dynamic_image.as_rgba8().unwrap();
let (width, height) = swap_wh(width, height, orientation);
let orientation_time = orientation_start.elapsed();
// Reinterpret to avoid copying
let rgba_buffer = unsafe {
Vec::from_raw_parts(
buffer.as_ptr() as *mut u32,
buffer.len() / 4,
buffer.len() / 4,
)
};
std::mem::forget(dynamic_image);
let total_time = total_start.elapsed();
println!("Orientation time: {:?}", orientation_time);
println!("Total loading time: {:?}", total_time);
ImflowImageBuffer {
width,
height,
rgba_buffer,
rating,
}
}
}
}
@ -192,18 +202,23 @@ pub fn image_to_rgba_buffer(img: DynamicImage) -> Vec<u32> {
vec
}
pub fn load_available_images(dir: PathBuf) -> Vec<PathBuf> {
let mut files: Vec<PathBuf> = fs::read_dir(dir)
pub fn load_available_images(dir: PathBuf) -> Vec<ImageData> {
fs::read_dir(dir)
.unwrap()
.map(|f| f.unwrap().path())
.filter(is_image)
.collect();
files.sort();
files
.map(|f| f.unwrap().path().to_path_buf())
.sorted()
.filter_map(|path| {
if let Some(format) = get_format(&path) {
Some(ImageData { path, format })
} else {
None
}
})
.collect::<Vec<ImageData>>()
}
pub fn get_embedded_thumbnail(path: PathBuf) -> Option<Vec<u8>> {
let meta = rexiv2::Metadata::new_from_path(path);
pub fn get_embedded_thumbnail(image: &ImageData) -> Option<Vec<u8>> {
let meta = Metadata::new_from_path(&image.path);
match meta {
Ok(meta) => {
if let Some(previews) = meta.get_preview_images() {
@ -217,44 +232,8 @@ pub fn get_embedded_thumbnail(path: PathBuf) -> Option<Vec<u8>> {
}
}
fn is_image(path: &PathBuf) -> bool {
if !path.is_file() {
return false;
}
["jpg", "jxl", "heic", "heif"].contains(
&path
.extension()
.unwrap()
.to_ascii_lowercase()
.to_str()
.unwrap(),
)
}
fn is_heif(path: &PathBuf) -> bool {
["heif", "heic"].contains(
&path
.extension()
.unwrap()
.to_ascii_lowercase()
.to_str()
.unwrap(),
)
}
fn is_jxl(path: &PathBuf) -> bool {
["jxl", "jpgxl"].contains(
&path
.extension()
.unwrap()
.to_ascii_lowercase()
.to_str()
.unwrap(),
)
}
pub fn load_thumbnail(path: &PathBuf) -> ImflowImageBuffer {
if is_heif(path) {
pub fn load_thumbnail(path: &ImageData) -> ImflowImageBuffer {
if path.format == ImageFormat::Heif {
return load_heif(path, true);
}
match load_thumbnail_exif(path) {
@ -263,8 +242,8 @@ pub fn load_thumbnail(path: &PathBuf) -> ImflowImageBuffer {
}
}
pub fn load_thumbnail_exif(path: &PathBuf) -> Option<ImflowImageBuffer> {
match get_embedded_thumbnail(path.clone()) {
pub fn load_thumbnail_exif(path: &ImageData) -> Option<ImflowImageBuffer> {
match get_embedded_thumbnail(path) {
Some(thumbnail) => {
let decoder = image::ImageReader::new(Cursor::new(thumbnail))
.with_guessed_format()
@ -296,8 +275,8 @@ pub fn load_thumbnail_exif(path: &PathBuf) -> Option<ImflowImageBuffer> {
}
}
pub fn load_thumbnail_full(path: &PathBuf) -> ImflowImageBuffer {
let file = BufReader::new(File::open(path).unwrap());
pub fn load_thumbnail_full(path: &ImageData) -> ImflowImageBuffer {
let file = BufReader::new(File::open(path.path.clone()).unwrap());
let reader = image::ImageReader::new(file);
let image = reader
.with_guessed_format()
@ -318,9 +297,9 @@ pub fn load_thumbnail_full(path: &PathBuf) -> ImflowImageBuffer {
}
}
pub fn load_heif(path: &PathBuf, resize: bool) -> ImflowImageBuffer {
pub fn load_heif(path: &ImageData, resize: bool) -> ImflowImageBuffer {
let lib_heif = LibHeif::new();
let ctx = HeifContext::read_from_file(path.to_str().unwrap()).unwrap();
let ctx = HeifContext::read_from_file(path.path.to_str().unwrap()).unwrap();
let handle = ctx.primary_image_handle().unwrap();
// assert_eq!(handle.width(), 1652);
// assert_eq!(handle.height(), 1791);
@ -370,23 +349,3 @@ pub fn load_heif(path: &PathBuf, resize: bool) -> ImflowImageBuffer {
rating,
}
}
// fn load_jxl(path: &PathBuf) -> ImflowImageBuffer {
// let file = BufReader::new(File::open(path).unwrap());
// let decoder = JxlDecoder::try_new(file, DecoderOptions::new_fast()).unwrap();
// // let reader = image::ImageReader::new(file);
// let image = decoder
// .decode()
// .unwrap();
// let width = image.width() as usize;
// let height = image.height() as usize;
// let buffer = image_to_rgba_buffer(image);
// let rating = get_rating(path.into());
// ImflowImageBuffer {
// width,
// height,
// rgba_buffer: buffer,
// rating,
// }
// }

View File

@ -1,321 +1,5 @@
// use std::fs::{self};
// use std::path::{Path, PathBuf};
// use std::collections::HashMap;
// use iced::widget::image::FilterMethod;
// use iced::widget::{
// Column, Container, button, center, checkbox, column, container, row, slider, text,
// };
// use iced::{Center, Element, Fill, Length, Subscription, Task, keyboard};
use std::path::PathBuf;
use clap::Parser;
use minifb::{Key, Window, WindowOptions};
use imflow::image::ImflowImageBuffer;
use imflow::store::ImageStore;
// use winit::{
// application::ApplicationHandler,
// event::WindowEvent,
// event_loop::{ActiveEventLoop, ControlFlow, EventLoop},
// window::{Window, WindowId},
// };
// struct State {
// window: Arc<Window>,
// device: wgpu::Device,
// queue: wgpu::Queue,
// size: winit::dpi::PhysicalSize<u32>,
// surface: wgpu::Surface<'static>,
// surface_format: wgpu::TextureFormat,
// }
// impl State {
// async fn new(window: Arc<Window>) -> State {
// let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor::default());
// let adapter = instance
// .request_adapter(&wgpu::RequestAdapterOptions::default())
// .await
// .unwrap();
// let (device, queue) = adapter
// .request_device(&wgpu::DeviceDescriptor::default(), None)
// .await
// .unwrap();
// let size = window.inner_size();
// let surface = instance.create_surface(window.clone()).unwrap();
// let cap = surface.get_capabilities(&adapter);
// let surface_format = cap.formats[0];
// let state = State {
// window,
// device,
// queue,
// size,
// surface,
// surface_format,
// };
// // Configure surface for the first time
// state.configure_surface();
// state
// }
// fn get_window(&self) -> &Window {
// &self.window
// }
// fn configure_surface(&self) {
// let surface_config = wgpu::SurfaceConfiguration {
// usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
// format: self.surface_format,
// // Request compatibility with the sRGB-format texture view were going to create later.
// view_formats: vec![self.surface_format.add_srgb_suffix()],
// alpha_mode: wgpu::CompositeAlphaMode::Auto,
// width: self.size.width,
// height: self.size.height,
// desired_maximum_frame_latency: 2,
// present_mode: wgpu::PresentMode::AutoVsync,
// };
// self.surface.configure(&self.device, &surface_config);
// }
// fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
// self.size = new_size;
// // reconfigure the surface
// self.configure_surface();
// }
// // fn render(&mut self) {
// // // Create texture view
// // let surface_texture = self
// // .surface
// // .get_current_texture()
// // .expect("failed to acquire next swapchain texture");
// // let texture_view = surface_texture
// // .texture
// // .create_view(&wgpu::TextureViewDescriptor {
// // // Without add_srgb_suffix() the image we will be working with
// // // might not be "gamma correct".
// // format: Some(self.surface_format.add_srgb_suffix()),
// // ..Default::default()
// // });
// // // Renders a GREEN screen
// // let mut encoder = self.device.create_command_encoder(&Default::default());
// // // Create the renderpass which will clear the screen.
// // let renderpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
// // label: None,
// // color_attachments: &[Some(wgpu::RenderPassColorAttachment {
// // view: &texture_view,
// // resolve_target: None,
// // ops: wgpu::Operations {
// // load: wgpu::LoadOp::Clear(wgpu::Color::GREEN),
// // store: wgpu::StoreOp::Store,
// // },
// // })],
// // depth_stencil_attachment: None,
// // timestamp_writes: None,
// // occlusion_query_set: None,
// // });
// // // If you wanted to call any drawing commands, they would go here.
// // // End the renderpass.
// // drop(renderpass);
// // // Submit the command in the queue to execute
// // self.queue.submit([encoder.finish()]);
// // self.window.pre_present_notify();
// // surface_texture.present();
// // }
// fn render(&mut self) {
// let mmap = map_file("test_images/20240811-194516_DSC02274.JPG").unwrap();
// println!("mapped file");
// let img = read_zune_image(mmap.deref()).unwrap();
// let width = img.dimensions().0 as u32;
// let height = img.dimensions().1 as u32;
// println!("loaded");
// let flat = flatten_zune_image(&img);
// println!("flattened");
// let rgb_bytes = flat[0].as_slice();
// // Assuming `self.rgb_bytes` is your buffer containing RGB data.
// let texture_extent = wgpu::Extent3d {
// width: width,
// height: height,
// depth_or_array_layers: 1,
// };
// // Create a wgpu texture
// let texture = self.device.create_texture(&wgpu::TextureDescriptor {
// label: Some("RGB Texture"),
// size: texture_extent,
// mip_level_count: 1,
// sample_count: 1,
// dimension: wgpu::TextureDimension::D2,
// format: wgpu::TextureFormat::Rgba8Unorm, // It's better to use RGBA with proper padding
// usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
// view_formats: &[],
// });
// // Upload your RGB data into the texture
// self.queue.write_texture(
// wgpu::TexelCopyTextureInfo{
// texture: &texture,
// mip_level: 0,
// origin: wgpu::Origin3d::ZERO,
// aspect: wgpu::TextureAspect::All,
// },
// &rgb_bytes,
// wgpu::TexelCopyBufferLayout {
// offset: 0,
// bytes_per_row: Some(4 * width), // Assuming padded row length
// rows_per_image: Some(height),
// },
// texture_extent,
// );
// // Create a texture view
// let surface_texture = self
// .surface
// .get_current_texture()
// .expect("failed to acquire next swapchain texture");
// let texture_view = surface_texture.texture.create_view(&wgpu::TextureViewDescriptor {
// format: Some(self.surface_format.add_srgb_suffix()),
// ..Default::default()
// });
// let rgb_texture_view = texture.create_view(&wgpu::TextureViewDescriptor::default());
// let mut encoder = self.device.create_command_encoder(&Default::default());
// // Create the renderpass
// let mut renderpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
// label: None,
// color_attachments: &[Some(wgpu::RenderPassColorAttachment {
// view: &texture_view,
// resolve_target: None,
// ops: wgpu::Operations {
// load: wgpu::LoadOp::Clear(wgpu::Color::BLACK),
// store: wgpu::StoreOp::Store,
// },
// })],
// depth_stencil_attachment: None,
// timestamp_writes: None,
// occlusion_query_set: None,
// });
// // Bind and draw
// // renderpass.set_pipeline(&self.pipeline); // Ensure self.pipeline is your render pipeline setup
// // renderpass.set_bind_group(0, &self.texture_bind_group, &[]); // Assuming you have a bind group which holds the texture
// renderpass.draw(0..3, 0..1); // Draws a triangle to cover the viewport, modify as needed for quads
// // End the renderpass
// drop(renderpass);
// // Submit the command buffer
// // self.queue.submit(iter::once(encoder.finish()));
// self.window.pre_present_notify();
// surface_texture.present();
// }
// }
// #[derive(Default)]
// struct App {
// state: Option<State>,
// }
// impl ApplicationHandler for App {
// fn resumed(&mut self, event_loop: &ActiveEventLoop) {
// // Create window object
// let window = Arc::new(
// event_loop
// .create_window(Window::default_attributes())
// .unwrap(),
// );
// let state = pollster::block_on(State::new(window.clone()));
// self.state = Some(state);
// window.request_redraw();
// }
// fn window_event(&mut self, event_loop: &ActiveEventLoop, _id: WindowId, event: WindowEvent) {
// let state = self.state.as_mut().unwrap();
// match event {
// WindowEvent::CloseRequested => {
// println!("The close button was pressed; stopping");
// event_loop.exit();
// }
// WindowEvent::RedrawRequested => {
// state.render();
// // Emits a new redraw requested event.
// state.get_window().request_redraw();
// }
// WindowEvent::Resized(size) => {
// // Reconfigures the size of the surface. We do not re-render
// // here as this event is always followed up by redraw request.
// state.resize(size);
// }
// _ => (),
// }
// }
// }
// pub fn main() -> iced::Result {
// tracing_subscriber::fmt::init();
// iced::application("Game of Life - Iced", GameOfLife::update, GameOfLife::view)
// .subscription(GameOfLife::subscription)
// .theme(|_| Theme::Dark)
// .antialiasing(true)
// .centered()
// .window_size(Size::new(1500.0, 1000.0))
// .run()
// }
// fn main() {
// let mut window = match Window::new("Test", 640, 400, WindowOptions::default()) {
// Ok(win) => win,
// Err(err) => {
// println!("Unable to create window {}", err);
// return;
// }
// }
// }
// fn main() {
// // wgpu uses `log` for all of our logging, so we initialize a logger with the `env_logger` crate.
// //
// // To change the log level, set the `RUST_LOG` environment variable. See the `env_logger`
// // documentation for more information.
// env_logger::init();
// let event_loop = EventLoop::new().unwrap();
// // When the current loop iteration finishes, immediately begin a new
// // iteration regardless of whether or not new events are available to
// // process. Preferred for applications that want to render as fast as
// // possible, like games.
// event_loop.set_control_flow(ControlFlow::Poll);
// // When the current loop iteration finishes, suspend the thread until
// // another event arrives. Helps keeping CPU utilization low if nothing
// // is happening, which is preferred if the application might be idling in
// // the background.
// // event_loop.set_control_flow(ControlFlow::Wait);
// let mut app = App::default();
// event_loop.run_app(&mut app).unwrap();
// }
//
use eframe::egui;
use egui::{ColorImage, Image, TextureHandle, TextureOptions};
use std::path::PathBuf;
mod app;
mod egui_tools;
@ -339,78 +23,6 @@ async fn run(path: PathBuf) {
let mut app = app::App::new(path);
event_loop.run_app(&mut app).expect("Failed to run app");
// let path = args.path.unwrap_or("./test_images".into());
// let mut state = ImageStore::new(path);
// let mut waiting = true;
// window.set_key_repeat_delay(0.1);
// window.set_key_repeat_rate(0.1);
// show_image(&mut window, state.get_thumbnail());
// while window.is_open() && !window.is_key_down(Key::Escape) {
// window.update();
// state.check_loaded_images();
// if window.is_key_pressed(Key::Right, minifb::KeyRepeat::Yes) {
// state.next_image(1);
// if let Some(full) = state.get_current_image() {
// show_image(&mut window, full);
// } else {
// show_image(&mut window, state.get_thumbnail());
// waiting = true;
// }
// } else if window.is_key_pressed(Key::Left, minifb::KeyRepeat::Yes) {
// state.next_image(-1);
// if let Some(full) = state.get_current_image() {
// show_image(&mut window, full);
// } else {
// show_image(&mut window, state.get_thumbnail());
// waiting = true;
// }
// }
// if waiting {
// if let Some(image) = state.get_current_image() {
// waiting = false;
// show_image(&mut window, &image);
// }
// }
// }
}
struct MyApp {
// image: Image,
store: ImageStore,
texture: TextureHandle,
}
impl MyApp {
fn new(store: ImageStore, texture: TextureHandle) -> Self {
Self { store, texture }
}
}
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, frame: &mut eframe::Frame) {
// let img = Image::from_bytes("bytes://", buffer_u8);
egui::CentralPanel::default().show(ctx, |ui| {
ui.heading("This is an image:");
// image::show(ui);
// ui.add(img);
// ui.textu
ui.image(&self.texture);
// if ui.add(egui::ImageButton::new(&self.texture)).clicked() {
// // Handle click
// }
// img.
// ui.heading("This is an image you can click:");
// ui.add(egui::ImageButton::new(
// self.image.texture_id(ctx),
// self.image.size_vec2(),
// ));
});
}
}
#[derive(Parser, Debug)]
@ -418,376 +30,3 @@ impl eframe::App for MyApp {
struct Args {
path: Option<PathBuf>,
}
// fn init_app() {
// let mut store = ImageStore::new("./test_images".into());
// let mut imbuf = store.get_current_image().unwrap();
// let width = imbuf.width;
// let height = imbuf.height;
// let mut buffer = imbuf.argb_buffer.clone();
// // Reinterpret to avoid copying
// let buffer_u8 = unsafe {
// Vec::from_raw_parts(
// buffer.as_mut_ptr() as *mut u8,
// buffer.len() * 4,
// buffer.capacity() * 4,
// )
// };
// std::mem::forget(buffer);
// let color_image = ColorImage::from_rgba_unmultiplied([width, height], &buffer_u8);
// let texture = cc
// .egui_ctx
// .load_texture("img", color_image, TextureOptions::LINEAR);
// Ok(Box::new(MyApp::new(store, texture)))
// }
// fn main() {
// let native_options = eframe::NativeOptions {
// viewport: egui::ViewportBuilder::default().with_inner_size((400.0, 400.0)),
// ..eframe::NativeOptions::default()
// };
// eframe::run_native(
// "aaa",
// native_options,
// Box::new(|cc| {
// // Initialize image loaders
// egui_extras::install_image_loaders(&cc.egui_ctx);
// let mut store = ImageStore::new("./test_images".into());
// let mut imbuf = store.get_current_image().unwrap();
// let width = imbuf.width;
// let height = imbuf.height;
// let mut buffer = imbuf.argb_buffer.clone();
// // Reinterpret to avoid copying
// let buffer_u8 = unsafe {
// Vec::from_raw_parts(
// buffer.as_mut_ptr() as *mut u8,
// buffer.len() * 4,
// buffer.capacity() * 4,
// )
// };
// std::mem::forget(buffer);
// let color_image = ColorImage::from_rgba_unmultiplied([width, height], &buffer_u8);
// let texture = cc
// .egui_ctx
// .load_texture("img", color_image, TextureOptions::LINEAR);
// Ok(Box::new(MyApp::new(store, texture)))
// }),
// )
// .unwrap();
// // eframe::run_native(Box::new(MyApp::default()), options);
// let args = Args::parse();
// const WIDTH: usize = 2000;
// const HEIGHT: usize = 1000;
// let mut window = Window::new(
// "Test - ESC to exit",
// WIDTH,
// HEIGHT,
// WindowOptions::default(),
// )
// .unwrap_or_else(|e| {
// panic!("{}", e);
// });
// window.set_target_fps(120);
// let path = args.path.unwrap_or("./test_images".into());
// let mut state = ImageStore::new(path);
// let mut waiting = true;
// window.set_key_repeat_delay(0.1);
// window.set_key_repeat_rate(0.1);
// show_image(&mut window, state.get_thumbnail());
// while window.is_open() && !window.is_key_down(Key::Escape) {
// window.update();
// state.check_loaded_images();
// if window.is_key_pressed(Key::Right, minifb::KeyRepeat::Yes) {
// state.next_image(1);
// if let Some(full) = state.get_current_image() {
// show_image(&mut window, full);
// } else {
// show_image(&mut window, state.get_thumbnail());
// waiting = true;
// }
// } else if window.is_key_pressed(Key::Left, minifb::KeyRepeat::Yes) {
// state.next_image(-1);
// if let Some(full) = state.get_current_image() {
// show_image(&mut window, full);
// } else {
// show_image(&mut window, state.get_thumbnail());
// waiting = true;
// }
// }
// if waiting {
// if let Some(image) = state.get_current_image() {
// waiting = false;
// show_image(&mut window, &image);
// }
// }
// }
// }
// fn show_image(window: &mut Window, image: &ImflowImageBuffer) {
// window
// .update_with_buffer(&image.argb_buffer, image.width, image.height)
// .unwrap();
// }
// struct MainApp {
// is_playing: bool,
// queued_ticks: usize,
// speed: usize,
// next_speed: Option<usize>,
// version: usize,
// image_filter_method: FilterMethod,
// current_image: Option<PathBuf>,
// width: u32,
// available_images: Vec<PathBuf>,
// current_image_id: usize,
// loaded_images: HashMap<PathBuf, iced::widget::image::Handle>,
// }
// #[derive(Debug, Clone)]
// enum Message {
// TogglePlayback,
// ToggleGrid(bool),
// Clear,
// SpeedChanged(f32),
// Tick,
// Next(i32),
// ImageWidthChanged(u32),
// ImageUseNearestToggled(bool),
// }
// impl MainApp {
// fn new() -> Self {
// let mut dir: Vec<PathBuf> = fs::read_dir(Path::new("./test_images"))
// .unwrap()
// .map(|f| f.unwrap().path())
// .collect();
// dir.sort();
// let mut res = Self {
// is_playing: false,
// queued_ticks: 0,
// speed: 5,
// next_speed: None,
// version: 0,
// image_filter_method: FilterMethod::Nearest,
// width: 1400,
// current_image: Some(dir.first().unwrap().clone()),
// available_images: dir,
// current_image_id: 0,
// loaded_images: HashMap::new(),
// };
// let _ = res.update(Message::Next(0));
// res
// }
// fn update(&mut self, message: Message) -> Task<Message> {
// match message {
// Message::Tick => {
// self.queued_ticks = (self.queued_ticks + 1).min(self.speed);
// // if let Some(task) = self.grid.tick(self.queued_ticks) {
// // if let Some(speed) = self.next_speed.take() {
// // self.speed = speed;
// // }
// // self.queued_ticks = 0;
// // let version = self.version;
// // // return Task::perform(task, Message::Grid.with(version));
// // }
// }
// Message::TogglePlayback => {
// self.is_playing = !self.is_playing;
// }
// Message::ToggleGrid(show_grid_lines) => {
// // self.grid.toggle_lines(show_grid_lines);
// }
// Message::Clear => {
// // self.grid.clear();
// self.version += 1;
// }
// Message::SpeedChanged(speed) => {
// if self.is_playing {
// self.next_speed = Some(speed.round() as usize);
// } else {
// self.speed = speed.round() as usize;
// }
// }
// Message::ImageWidthChanged(image_width) => {
// self.width = image_width;
// }
// Message::ImageUseNearestToggled(use_nearest) => {
// self.image_filter_method = if use_nearest {
// FilterMethod::Nearest
// } else {
// FilterMethod::Linear
// };
// }
// Message::Next(change) => {
// let elements = self.available_images.len() as i32;
// let new_id = (self.current_image_id as i32 + change).clamp(0, elements - 1);
// println!(
// "updated id: {} from {} total {}",
// new_id, self.current_image_id, elements
// );
// self.current_image_id = new_id as usize;
// let path = self
// .available_images
// .get(self.current_image_id)
// .unwrap()
// .clone();
// self.current_image = Some(path.clone());
// if !self.loaded_images.contains_key(&path.to_path_buf()) {
// // self.loaded_images.insert(
// // path.to_path_buf(),
// // load_thumbnail(path.to_str().unwrap(), Approach::ImageRs).unwrap(),
// // );
// }
// }
// }
// Task::none()
// }
// fn subscription(&self) -> Subscription<Message> {
// keyboard::on_key_press(|key, _modifiers| match key {
// keyboard::Key::Named(keyboard::key::Named::ArrowRight) => Some(Message::Next(1)),
// keyboard::Key::Named(keyboard::key::Named::ArrowLeft) => Some(Message::Next(-1)),
// _ => None,
// })
// }
// fn view(&self) -> Element<'_, Message> {
// let version = self.version;
// let selected_speed = self.next_speed.unwrap_or(self.speed);
// let controls = view_controls(
// self.is_playing,
// true,
// // self.grid.are_lines_visible(),
// selected_speed,
// // self.grid.preset(),
// );
// let content = column![
// // image("/media/nfs/sphotos/Images/24-08-11-Copenhagen/24-08-12/20240812-175614_DSC03844.JPG").into(),
// // self.grid.view().map(Message::Grid.with(version)),
// self.image(),
// controls,
// ]
// .height(Fill);
// container(content).width(Fill).height(Fill).into()
// // image("/media/nfs/sphotos/Images/24-08-11-Copenhagen/24-08-12/20240812-175614_DSC03844.JPG").into()
// }
// fn image(&self) -> Column<Message> {
// let width = self.width;
// let filter_method = self.image_filter_method;
// Self::container("Image")
// .push("An image that tries to keep its aspect ratio.")
// .push(self.ferris(
// width,
// filter_method,
// self.current_image.as_ref().unwrap().as_ref(),
// ))
// .push(slider(100..=1500, width, Message::ImageWidthChanged))
// .push(text!("Width: {width} px").width(Fill).align_x(Center))
// .push(
// checkbox(
// "Use nearest interpolation",
// filter_method == FilterMethod::Nearest,
// )
// .on_toggle(Message::ImageUseNearestToggled),
// )
// .align_x(Center)
// }
// fn container(title: &str) -> Column<'_, Message> {
// column![text(title).size(50)].spacing(20)
// }
// fn ferris<'a>(
// &self,
// width: u32,
// filter_method: iced::widget::image::FilterMethod,
// path: &Path,
// ) -> Container<'a, Message> {
// if self.loaded_images.get(path).is_none() {
// return center(text("loading"));
// }
// let img = iced::widget::image::Image::new(self.loaded_images.get(path).unwrap());
// center(
// // This should go away once we unify resource loading on native
// // platforms
// img.filter_method(filter_method)
// .width(Length::Fixed(width as f32)),
// )
// }
// }
// impl Default for MainApp {
// fn default() -> Self {
// Self::new()
// }
// }
// fn view_controls<'a>(
// is_playing: bool,
// is_grid_enabled: bool,
// speed: usize,
// // preset: Preset,
// ) -> Element<'a, Message> {
// let playback_controls = row![
// button(if is_playing { "Pause" } else { "Play" }).on_press(Message::TogglePlayback),
// button("Previous")
// .on_press(Message::Next(-1))
// .style(button::secondary),
// button("Next")
// .on_press(Message::Next(1))
// .style(button::secondary),
// ]
// .spacing(10);
// let speed_controls = row![
// slider(1.0..=1000.0, speed as f32, Message::SpeedChanged),
// text!("x{speed}").size(16),
// ]
// .align_y(Center)
// .spacing(10);
// row![
// playback_controls,
// speed_controls,
// // checkbox("Grid", is_grid_enabled).on_toggle(Message::ToggleGrid),
// // row![
// // pick_list(preset::ALL, Some(preset), Message::PresetPicked),
// // button("Clear")
// // .on_press(Message::Clear)
// // .style(button::danger)
// // ]
// // .spacing(10)
// ]
// .padding(10)
// .spacing(20)
// .align_y(Center)
// .into()
// }

View File

@ -1,4 +1,4 @@
use crate::image::load_thumbnail;
use crate::image::{ImageData, load_thumbnail};
use crate::image::{ImflowImageBuffer, load_available_images, load_image};
use rexiv2::Metadata;
use std::collections::HashMap;
@ -12,21 +12,21 @@ const PRELOAD_NEXT_IMAGE_N: usize = 16;
pub struct ImageStore {
pub(crate) current_image_id: usize,
pub(crate) loaded_images: HashMap<PathBuf, ImflowImageBuffer>,
pub(crate) loaded_images_thumbnails: HashMap<PathBuf, ImflowImageBuffer>,
pub(crate) available_images: Vec<PathBuf>,
pub current_image_path: PathBuf,
pub(crate) loaded_images: HashMap<ImageData, ImflowImageBuffer>,
pub(crate) loaded_images_thumbnails: HashMap<ImageData, ImflowImageBuffer>,
pub(crate) available_images: Vec<ImageData>,
pub current_image_path: ImageData,
pub(crate) pool: ThreadPool,
pub(crate) loader_rx: mpsc::Receiver<(PathBuf, ImflowImageBuffer)>,
pub(crate) loader_tx: mpsc::Sender<(PathBuf, ImflowImageBuffer)>,
pub(crate) currently_loading: HashSet<PathBuf>,
pub(crate) loader_rx: mpsc::Receiver<(ImageData, ImflowImageBuffer)>,
pub(crate) loader_tx: mpsc::Sender<(ImageData, ImflowImageBuffer)>,
pub(crate) currently_loading: HashSet<ImageData>,
}
impl ImageStore {
pub fn new(path: PathBuf) -> Self {
let current_image_id: usize = 0;
let mut loaded_images: HashMap<PathBuf, ImflowImageBuffer> = HashMap::new();
let mut loaded_thumbnails: HashMap<PathBuf, ImflowImageBuffer> = HashMap::new();
let mut loaded_images: HashMap<ImageData, ImflowImageBuffer> = HashMap::new();
let mut loaded_thumbnails: HashMap<ImageData, ImflowImageBuffer> = HashMap::new();
let available_images = load_available_images(path);
let new_path = available_images[0].clone();
@ -73,11 +73,12 @@ impl ImageStore {
}
pub fn set_rating(&mut self, rating: i32) {
let meta = Metadata::new_from_path(self.current_image_path.clone());
let meta = Metadata::new_from_path(self.current_image_path.path.clone());
match meta {
Ok(meta) => {
meta.set_tag_numeric("Xmp.xmp.Rating", rating).unwrap();
meta.save_to_file(self.current_image_path.clone()).unwrap();
meta.save_to_file(self.current_image_path.path.clone())
.unwrap();
}
Err(e) => panic!("{:?}", e),
}
@ -117,7 +118,7 @@ impl ImageStore {
}
}
pub fn request_load(&mut self, path: PathBuf) {
pub fn request_load(&mut self, path: ImageData) {
if self.loaded_images.contains_key(&path) || self.currently_loading.contains(&path) {
return;
}
@ -154,7 +155,7 @@ impl ImageStore {
self.loaded_images.get(&self.current_image_path)
}
pub fn get_image(&self, path: &PathBuf) -> Option<&ImflowImageBuffer> {
pub fn get_image(&self, path: &ImageData) -> Option<&ImflowImageBuffer> {
self.loaded_images.get(path)
}