477 lines
16 KiB
Rust
477 lines
16 KiB
Rust
use std::fs;
|
|
use std::fs::File;
|
|
use std::io;
|
|
use std::io::BufReader;
|
|
use std::path::Path;
|
|
use std::path::PathBuf;
|
|
use std::sync::Arc;
|
|
|
|
use google_photoslibrary1 as photos;
|
|
use image::imageops;
|
|
use image::DynamicImage;
|
|
use image::GenericImage;
|
|
use image::GenericImageView;
|
|
use image::ImageBuffer;
|
|
use image::ImageFormat;
|
|
use image::ImageResult;
|
|
use jpeg_decoder::Decoder;
|
|
use log::error;
|
|
use log::info;
|
|
use log::warn;
|
|
use photos::schemas::Album;
|
|
use photos::schemas::MediaItem;
|
|
use rocksdb::Direction;
|
|
use rocksdb::IteratorMode;
|
|
use rocksdb::DB;
|
|
|
|
// Used to ensure DB is invalidated after schema changes.
|
|
const LIBRARY_GENERATION: &'static str = "14";
|
|
|
|
#[derive(Clone)]
|
|
pub struct Library {
|
|
root: PathBuf,
|
|
originals_dir: PathBuf,
|
|
cache_db: Arc<DB>,
|
|
}
|
|
|
|
pub fn load_image<P>(
|
|
path: P,
|
|
width_hint: Option<u32>,
|
|
height_hint: Option<u32>,
|
|
) -> Result<DynamicImage, Box<dyn std::error::Error>>
|
|
where
|
|
P: AsRef<Path>,
|
|
{
|
|
// TODO(wathiede): fall back to image::load_image when jpeg decoding fails.
|
|
let file = File::open(path).expect("failed to open file");
|
|
let mut decoder = Decoder::new(BufReader::new(file));
|
|
let (w, h) = match (width_hint, height_hint) {
|
|
(Some(w), Some(h)) => {
|
|
let got = decoder.scale(w as u16, h as u16)?;
|
|
//info!("Hinted at {}x{}, got {}x{}", w, h, got.0, got.1);
|
|
(got.0 as u32, got.1 as u32)
|
|
}
|
|
// TODO(wathiede): handle partial hints by grabbing info and then computing the absent
|
|
// dimenison.
|
|
_ => {
|
|
decoder.read_info()?;
|
|
let info = decoder.info().unwrap();
|
|
(info.width as u32, info.height as u32)
|
|
}
|
|
};
|
|
let pixels = decoder.decode().expect("failed to decode image");
|
|
Ok(DynamicImage::ImageRgb8(
|
|
ImageBuffer::from_raw(w, h, pixels).expect("pixels to small for given dimensions"),
|
|
))
|
|
}
|
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
pub enum FilterType {
|
|
Builtin(imageops::FilterType),
|
|
Nearest,
|
|
}
|
|
|
|
/// fill_size computes the largest rectangle that fits in src with the aspect ratio of dst.
|
|
fn fill_size(src: (u32, u32), dst: (u32, u32)) -> (u32, u32) {
|
|
debug_assert!(src.0 >= dst.0);
|
|
debug_assert!(src.1 >= dst.1);
|
|
let x_scale = src.0 as f32 / dst.0 as f32;
|
|
let y_scale = src.1 as f32 / dst.1 as f32;
|
|
if x_scale > y_scale {
|
|
// Height will fill, width will crop.
|
|
(
|
|
(dst.0 as f32 * y_scale) as u32,
|
|
(dst.1 as f32 * y_scale) as u32,
|
|
)
|
|
} else {
|
|
// Width will fill, height will crop.
|
|
(
|
|
(dst.0 as f32 * x_scale) as u32,
|
|
(dst.1 as f32 * x_scale) as u32,
|
|
)
|
|
}
|
|
}
|
|
|
|
fn resize_to_fill_nearest(w: u32, h: u32, img: &DynamicImage) -> DynamicImage {
|
|
let mut dst = DynamicImage::new_rgb8(w, h);
|
|
let (src_w, src_h) = img.dimensions();
|
|
let (crop_w, crop_h) = fill_size((src_w, src_h), (w, h));
|
|
let off_x = (src_w - crop_w) / 2;
|
|
let off_y = (src_h - crop_h) / 2;
|
|
let src = img.view(off_x, off_y, crop_w, crop_h);
|
|
let x_scale = crop_w as f32 / w as f32;
|
|
let y_scale = crop_h as f32 / h as f32;
|
|
|
|
for y in 0..h {
|
|
for x in 0..w {
|
|
let x_idx = (x as f32 * x_scale).round() as u32;
|
|
let y_idx = (y as f32 * y_scale).round() as u32;
|
|
dst.put_pixel(x, y, src.get_pixel(x_idx, y_idx))
|
|
}
|
|
}
|
|
dst
|
|
}
|
|
|
|
pub fn resize(
|
|
img: &DynamicImage,
|
|
dimensions: (Option<u32>, Option<u32>),
|
|
filter: FilterType,
|
|
) -> DynamicImage {
|
|
let (w, h) = dimensions;
|
|
let (orig_w, orig_h) = img.dimensions();
|
|
let (w, h) = match (w, h) {
|
|
(Some(w), Some(h)) => (w, h),
|
|
(Some(w), None) => (w, orig_h * w / orig_w),
|
|
(None, Some(h)) => (orig_w * h / orig_h, h),
|
|
(None, None) => (orig_w, orig_h),
|
|
};
|
|
match filter {
|
|
FilterType::Builtin(filter) => img.resize(w, h, filter),
|
|
FilterType::Nearest => unimplemented!(), //resize_to_fill_nearest(w, h, img),
|
|
}
|
|
}
|
|
|
|
pub fn resize_to_fill(
|
|
img: &DynamicImage,
|
|
dimensions: (Option<u32>, Option<u32>),
|
|
filter: FilterType,
|
|
) -> DynamicImage {
|
|
let (w, h) = dimensions;
|
|
let (orig_w, orig_h) = img.dimensions();
|
|
let (w, h) = match (w, h) {
|
|
(Some(w), Some(h)) => (w, h),
|
|
(Some(w), None) => (w, orig_h * w / orig_w),
|
|
(None, Some(h)) => (orig_w * h / orig_h, h),
|
|
(None, None) => (orig_w, orig_h),
|
|
};
|
|
match filter {
|
|
FilterType::Builtin(filter) => img.resize_to_fill(w, h, filter),
|
|
FilterType::Nearest => resize_to_fill_nearest(w, h, img),
|
|
}
|
|
}
|
|
|
|
pub fn save_to_jpeg_bytes(img: &DynamicImage) -> ImageResult<Vec<u8>> {
|
|
let mut buf = Vec::new();
|
|
img.write_to(&mut buf, ImageFormat::Jpeg)?;
|
|
Ok(buf)
|
|
}
|
|
|
|
impl Library {
|
|
pub fn new(root: PathBuf) -> Result<Library, Box<dyn std::error::Error>> {
|
|
let db = DB::open_default(root.join("cache"))?;
|
|
let cache_db = Arc::new(db);
|
|
let lib = Library {
|
|
originals_dir: root.join("images").join("originals"),
|
|
cache_db,
|
|
root,
|
|
};
|
|
let cnt = lib.clean_db()?;
|
|
if cnt != 0 {
|
|
info!("Deleted {} entries", cnt);
|
|
}
|
|
if !lib.originals_dir.exists() {
|
|
info!(
|
|
"create originals dir {}",
|
|
&lib.originals_dir.to_string_lossy()
|
|
);
|
|
fs::create_dir_all(&lib.originals_dir)?;
|
|
}
|
|
Ok(lib)
|
|
}
|
|
// Removes all data in the database from older schema.
|
|
pub fn clean_db(&self) -> Result<usize, rocksdb::Error> {
|
|
Library::gc(LIBRARY_GENERATION, &self.cache_db)
|
|
}
|
|
fn gc(generation: &str, db: &DB) -> Result<usize, rocksdb::Error> {
|
|
let gen = format!("{}/", generation);
|
|
// '0' is the next character after '/', so iterator's starting there would be after the
|
|
// last `gen` entry.
|
|
let next_gen = format!("{}0", generation);
|
|
let mut del_cnt = 0;
|
|
for (k, _v) in db.iterator(IteratorMode::From(gen.as_bytes(), Direction::Reverse)) {
|
|
if !k.starts_with(gen.as_bytes()) {
|
|
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
|
|
db.delete(k)?;
|
|
del_cnt += 1;
|
|
}
|
|
}
|
|
for (k, _v) in db.iterator(IteratorMode::From(next_gen.as_bytes(), Direction::Forward)) {
|
|
if !k.starts_with(gen.as_bytes()) {
|
|
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
|
|
db.delete(k)?;
|
|
del_cnt += 1;
|
|
}
|
|
}
|
|
Ok(del_cnt)
|
|
}
|
|
pub fn create_album_index(&self, albums: &Vec<Album>) -> io::Result<()> {
|
|
// Serialize it to a JSON string.
|
|
let j = serde_json::to_string(albums)?;
|
|
|
|
let path = self.root.join("albums.json");
|
|
info!("saving {}", path.to_string_lossy());
|
|
fs::write(path, j)
|
|
}
|
|
pub fn create_album<P: AsRef<Path>>(
|
|
&self,
|
|
album_id: P,
|
|
media_items: &Vec<MediaItem>,
|
|
) -> io::Result<()> {
|
|
let album_dir = self.root.join(album_id);
|
|
if !album_dir.exists() {
|
|
info!("making album directory {}", album_dir.to_string_lossy());
|
|
fs::create_dir_all(&album_dir)?;
|
|
}
|
|
let j = serde_json::to_string(&media_items)?;
|
|
let path = album_dir.join("album.json");
|
|
info!("saving {}", path.to_string_lossy());
|
|
fs::write(path, j)
|
|
}
|
|
pub fn albums(&self) -> Result<Vec<Album>, Box<dyn std::error::Error>> {
|
|
let albums_path = self.root.join("albums.json");
|
|
info!("loading {}", albums_path.to_string_lossy());
|
|
let bytes = fs::read(albums_path)?;
|
|
Ok(serde_json::from_slice(&bytes)?)
|
|
}
|
|
pub fn album(&self, album_id: &str) -> Result<Vec<MediaItem>, Box<dyn std::error::Error>> {
|
|
let album_path = self.root.join(album_id).join("album.json");
|
|
let bytes = fs::read(album_path)?;
|
|
Ok(serde_json::from_slice(&bytes)?)
|
|
}
|
|
pub fn download_image(
|
|
&self,
|
|
filename: &str,
|
|
media_items_id: &str,
|
|
base_url: &str,
|
|
) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
|
// Put images from all albums in common directory.
|
|
let image_path = self.originals_dir.join(media_items_id);
|
|
if image_path.exists() {
|
|
info!(
|
|
"Skipping already downloaded {} @ {}",
|
|
&filename,
|
|
image_path.to_string_lossy()
|
|
);
|
|
} else {
|
|
let download_path = image_path.with_extension("download");
|
|
let url = format!("{}=d", base_url);
|
|
let mut r = reqwest::blocking::get(&url)?;
|
|
let mut w = File::create(&download_path)?;
|
|
info!("Downloading {}", &url);
|
|
let _n = io::copy(&mut r, &mut w)?;
|
|
info!(
|
|
"Rename {} -> {}",
|
|
download_path.to_string_lossy(),
|
|
image_path.to_string_lossy()
|
|
);
|
|
fs::rename(download_path, &image_path)?;
|
|
}
|
|
Ok(image_path)
|
|
}
|
|
pub fn original(&self, media_items_id: &str) -> Option<PathBuf> {
|
|
let path = self.originals_dir.join(media_items_id);
|
|
if path.exists() {
|
|
Some(path)
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
// TODO(wathiede): make this a macro like format! to skip the second string create and copy.
|
|
fn generational_key(generation: &str, key: &str) -> String {
|
|
format!("{}/{}", generation, key)
|
|
}
|
|
|
|
pub fn generate_thumbnail(
|
|
&self,
|
|
media_items_id: &str,
|
|
dimensions: (Option<u32>, Option<u32>),
|
|
filter: FilterType,
|
|
fill: bool,
|
|
) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
|
|
match self.original(&media_items_id) {
|
|
None => {
|
|
warn!("Couldn't find original {}", &media_items_id);
|
|
Err(io::Error::new(io::ErrorKind::NotFound, format!("{}", media_items_id)).into())
|
|
}
|
|
Some(path) => {
|
|
let orig_img = load_image(&path, dimensions.0, dimensions.1)?;
|
|
//.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
|
let img = if fill {
|
|
resize_to_fill(&orig_img, dimensions, filter)
|
|
} else {
|
|
resize(&orig_img, dimensions, filter)
|
|
};
|
|
let buf = save_to_jpeg_bytes(&img)
|
|
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
|
Ok(buf)
|
|
}
|
|
}
|
|
}
|
|
pub fn thumbnail(
|
|
&self,
|
|
media_items_id: &str,
|
|
dimensions: (Option<u32>, Option<u32>),
|
|
fill: bool,
|
|
) -> Option<Vec<u8>> {
|
|
fn cache_key(media_items_id: &str, dimensions: (Option<u32>, Option<u32>)) -> String {
|
|
let dim = match dimensions {
|
|
(Some(w), Some(h)) => format!("-w={}-h={}", w, h),
|
|
(Some(w), None) => format!("-w={}", w),
|
|
(None, Some(h)) => format!("-h={}", h),
|
|
(None, None) => "".to_string(),
|
|
};
|
|
Library::generational_key(LIBRARY_GENERATION, &format!("{}{}", media_items_id, dim))
|
|
}
|
|
let key = cache_key(media_items_id, dimensions);
|
|
let db = self.cache_db.clone();
|
|
match db.get(key.as_bytes()) {
|
|
// Cache hit, return bytes as-is.
|
|
Ok(Some(bytes)) => Some(bytes),
|
|
// Cache miss, fill cache and return.
|
|
Ok(None) => {
|
|
info!("cache MISS {}", key);
|
|
let bytes = match self.generate_thumbnail(
|
|
media_items_id,
|
|
dimensions,
|
|
FilterType::Builtin(imageops::FilterType::Lanczos3),
|
|
fill,
|
|
) {
|
|
Ok(bytes) => bytes,
|
|
Err(e) => {
|
|
error!("Failed to generate thumbnail for {}: {}", media_items_id, e);
|
|
return None;
|
|
}
|
|
};
|
|
match db.put(key.as_bytes(), &bytes) {
|
|
Ok(_) => Some(bytes),
|
|
Err(e) => {
|
|
error!("Failed to put bytes to {}: {}", key, e);
|
|
None
|
|
}
|
|
}
|
|
}
|
|
// RocksDB error.
|
|
Err(e) => {
|
|
error!("Failed to search DB for {}: {}", key, e);
|
|
None
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod test {
|
|
use super::*;
|
|
use tempdir::TempDir;
|
|
|
|
fn compare_images(lhs: DynamicImage, rhs: DynamicImage) {
|
|
let lhs = lhs.to_rgb();
|
|
let rhs = rhs.to_rgb();
|
|
// Based on https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio#Definition
|
|
//
|
|
let mut mse: [i64; 3] = [0, 0, 0];
|
|
for (l, r) in lhs.pixels().zip(rhs.pixels()) {
|
|
let image::Rgb(l_pix) = l;
|
|
let image::Rgb(r_pix) = r;
|
|
|
|
{
|
|
for i in 0..3 {
|
|
let d = l_pix[i] as i64 - r_pix[i] as i64;
|
|
let d2 = d * d;
|
|
mse[i] += d2;
|
|
}
|
|
}
|
|
|
|
// assert_eq!(l_pix, r_pix, "{:?} != {:?} @ {} x {} ", l_pix, r_pix, x, y);
|
|
}
|
|
let (w, h) = lhs.dimensions();
|
|
let mn = (w * h) as i64;
|
|
mse.iter_mut().for_each(|i| *i = *i / mn);
|
|
let psnr: Vec<_> = mse
|
|
.iter()
|
|
.map(|i| 20. * 255_f32.log10() - 10. * (*i as f32).log10())
|
|
.collect();
|
|
// Uncomment to explore differences
|
|
/*
|
|
lhs.save("/tmp/lhs.png").expect("failed to write lhs.png");
|
|
rhs.save("/tmp/rhs.png").expect("failed to write rhs.png");
|
|
assert!(false, "MSE {:?} PSNR {:?} dB", mse, psnr);
|
|
*/
|
|
}
|
|
|
|
#[test]
|
|
fn fill_sizes() {
|
|
let srcs = vec![(400, 300), (300, 400)];
|
|
|
|
let dsts = vec![(225, 300), (300, 225), (100, 100)];
|
|
|
|
let want = vec![
|
|
(225, 300),
|
|
(400, 300),
|
|
(300, 300),
|
|
(300, 400),
|
|
(300, 225),
|
|
(300, 300),
|
|
];
|
|
let mut i = 0;
|
|
for s in &srcs {
|
|
for d in &dsts {
|
|
let w = want[i];
|
|
dbg!(s, d, w);
|
|
let got = fill_size(*s, *d);
|
|
assert_eq!(
|
|
got, w,
|
|
"{}. src {:?} dst {:?} want {:?} got {:?}",
|
|
i, s, d, w, got
|
|
);
|
|
i += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn resize_to_fill_nearest() {
|
|
let w = 256;
|
|
let h = 256;
|
|
const TEST_IMAGE_PATH: &'static str = "testdata/image.jpg";
|
|
let img = load_image(TEST_IMAGE_PATH).expect("failed to load test image");
|
|
let reference = resize(
|
|
&img,
|
|
(Some(w), Some(h)),
|
|
FilterType::Builtin(imageops::FilterType::Nearest),
|
|
);
|
|
let got = resize(&img, (Some(w), Some(h)), FilterType::Nearest);
|
|
compare_images(reference, got);
|
|
}
|
|
|
|
#[test]
|
|
fn clean_db() {
|
|
let td = TempDir::new("photosync_test").expect("failed to create temporary directory");
|
|
eprintln!("creating database in {}", td.path().to_string_lossy());
|
|
let db = DB::open_default(td.path()).expect("failed to open DB");
|
|
let keys = vec!["one", "two", "three"];
|
|
|
|
fn get_keys(db: &DB) -> Vec<String> {
|
|
db.iterator(rocksdb::IteratorMode::Start)
|
|
.map(|(k, _v)| String::from_utf8(k.to_vec()).expect("key not utf-8"))
|
|
.collect()
|
|
}
|
|
for k in &keys {
|
|
for g in vec!["1", "2", "3"] {
|
|
db.put(Library::generational_key(g, k), k)
|
|
.expect("failed to put");
|
|
}
|
|
}
|
|
|
|
assert_eq!(
|
|
get_keys(&db),
|
|
vec![
|
|
"1/one", "1/three", "1/two", "2/one", "2/three", "2/two", "3/one", "3/three",
|
|
"3/two"
|
|
]
|
|
);
|
|
Library::gc("2", &db).expect("failed to GC DB");
|
|
assert_eq!(get_keys(&db), vec!["2/one", "2/three", "2/two",]);
|
|
}
|
|
}
|