photosync/src/library.rs

310 lines
11 KiB
Rust

use std::fs;
use std::fs::File;
use std::io;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use cacher::Cacher;
use google_photoslibrary1 as photos;
use image::imageops;
use imageutils::{load_image, resize, resize_to_fill, save_to_jpeg_bytes, FilterType};
use log::error;
use log::info;
use log::warn;
use photos::schemas::Album;
use photos::schemas::MediaItem;
use rocksdb::Direction;
use rocksdb::IteratorMode;
use rocksdb::DB;
// Used to ensure DB is invalidated after schema changes.
const LIBRARY_GENERATION: &'static str = "14";
#[derive(Clone)]
pub struct Library {
root: PathBuf,
originals_dir: PathBuf,
cache_db: Arc<DB>,
image_cache: Arc<Mutex<Box<dyn Cacher>>>,
}
impl Library {
pub fn new(
root: PathBuf,
image_cache: Arc<Mutex<Box<dyn Cacher>>>,
) -> Result<Library, Box<dyn std::error::Error>> {
let db = DB::open_default(root.join("cache"))?;
let cache_db = Arc::new(db);
let lib = Library {
originals_dir: root.join("images").join("originals"),
cache_db,
root,
image_cache,
};
let cnt = lib.clean_db()?;
if cnt != 0 {
info!("Deleted {} entries", cnt);
}
if !lib.originals_dir.exists() {
info!(
"create originals dir {}",
&lib.originals_dir.to_string_lossy()
);
fs::create_dir_all(&lib.originals_dir)?;
}
Ok(lib)
}
// Removes all data in the database from older schema.
pub fn clean_db(&self) -> Result<usize, rocksdb::Error> {
Library::gc(LIBRARY_GENERATION, &self.cache_db)
}
fn gc(generation: &str, db: &DB) -> Result<usize, rocksdb::Error> {
let gen = format!("{}/", generation);
// '0' is the next character after '/', so iterator's starting there would be after the
// last `gen` entry.
let next_gen = format!("{}0", generation);
let mut del_cnt = 0;
for (k, _v) in db.iterator(IteratorMode::From(gen.as_bytes(), Direction::Reverse)) {
if !k.starts_with(gen.as_bytes()) {
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
db.delete(k)?;
del_cnt += 1;
}
}
for (k, _v) in db.iterator(IteratorMode::From(next_gen.as_bytes(), Direction::Forward)) {
if !k.starts_with(gen.as_bytes()) {
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
db.delete(k)?;
del_cnt += 1;
}
}
Ok(del_cnt)
}
pub fn create_album_index(&self, albums: &Vec<Album>) -> io::Result<()> {
// Serialize it to a JSON string.
let j = serde_json::to_string(albums)?;
let path = self.root.join("albums.json");
info!("saving {}", path.to_string_lossy());
fs::write(path, j)
}
pub fn create_album<P: AsRef<Path>>(
&self,
album_id: P,
media_items: &Vec<MediaItem>,
) -> io::Result<()> {
let album_dir = self.root.join(album_id);
if !album_dir.exists() {
info!("making album directory {}", album_dir.to_string_lossy());
fs::create_dir_all(&album_dir)?;
}
let j = serde_json::to_string(&media_items)?;
let path = album_dir.join("album.json");
info!("saving {}", path.to_string_lossy());
fs::write(path, j)
}
pub fn albums(&self) -> Result<Vec<Album>, Box<dyn std::error::Error>> {
let albums_path = self.root.join("albums.json");
info!("loading {}", albums_path.to_string_lossy());
let bytes = fs::read(albums_path)?;
Ok(serde_json::from_slice(&bytes)?)
}
pub fn album(&self, album_id: &str) -> Result<Vec<MediaItem>, Box<dyn std::error::Error>> {
let album_path = self.root.join(album_id).join("album.json");
let bytes = fs::read(album_path)?;
Ok(serde_json::from_slice(&bytes)?)
}
pub fn download_image(
&self,
filename: &str,
media_items_id: &str,
base_url: &str,
) -> Result<PathBuf, Box<dyn std::error::Error>> {
// Put images from all albums in common directory.
let image_path = self.originals_dir.join(media_items_id);
if image_path.exists() {
info!(
"Skipping already downloaded {} @ {}",
&filename,
image_path.to_string_lossy()
);
} else {
let download_path = image_path.with_extension("download");
let c = Arc::clone(&self.image_cache);
let mut c = c.lock().unwrap();
match c.get(media_items_id) {
Some(bytes) => {
info!(
"saving local copy of original from cache {}",
media_items_id
);
fs::write(&download_path, bytes)?;
}
None => {
let url = format!("{}=d", base_url);
let mut r = reqwest::blocking::get(&url)?;
let mut buf = Vec::new();
info!("Downloading {}", &url);
r.read_to_end(&mut buf)?;
fs::write(&download_path, &buf);
c.set(media_items_id, &buf);
}
};
info!(
"Rename {} -> {}",
download_path.to_string_lossy(),
image_path.to_string_lossy()
);
fs::rename(download_path, &image_path)?;
}
Ok(image_path)
}
pub fn original(&self, media_items_id: &str) -> Option<PathBuf> {
let path = self.originals_dir.join(media_items_id);
if path.exists() {
Some(path)
} else {
None
}
}
// TODO(wathiede): make this a macro like format! to skip the second string create and copy.
fn generational_key(generation: &str, key: &str) -> String {
format!("{}/{}", generation, key)
}
pub fn generate_thumbnail(
&self,
media_items_id: &str,
dimensions: (Option<u32>, Option<u32>),
filter: FilterType,
fill: bool,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
match self.original(&media_items_id) {
None => {
warn!("Couldn't find original {}", &media_items_id);
Err(io::Error::new(io::ErrorKind::NotFound, format!("{}", media_items_id)).into())
}
Some(path) => {
let orig_img = load_image(&path, dimensions.0, dimensions.1)?;
//.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
let img = if fill {
resize_to_fill(&orig_img, dimensions, filter)
} else {
resize(&orig_img, dimensions, filter)
};
let buf = save_to_jpeg_bytes(&img)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
Ok(buf)
}
}
}
pub fn thumbnail(
&self,
media_items_id: &str,
dimensions: (Option<u32>, Option<u32>),
fill: bool,
) -> Option<Vec<u8>> {
fn cache_key(media_items_id: &str, dimensions: (Option<u32>, Option<u32>)) -> String {
let dim = match dimensions {
(Some(w), Some(h)) => format!("-w={}-h={}", w, h),
(Some(w), None) => format!("-w={}", w),
(None, Some(h)) => format!("-h={}", h),
(None, None) => "".to_string(),
};
Library::generational_key(LIBRARY_GENERATION, &format!("{}{}", media_items_id, dim))
}
let key = cache_key(media_items_id, dimensions);
let db = self.cache_db.clone();
match db.get(key.as_bytes()) {
// Cache hit, return bytes as-is.
Ok(Some(bytes)) => Some(bytes),
// Cache miss, fill cache and return.
Ok(None) => {
// TODO(wathiede): use cache for thumbnail like download_image does.
let c = Arc::clone(&self.image_cache);
let mut c = c.lock().unwrap();
let bytes = match c.get(&key) {
Some(bytes) => {
info!(
"saving local copy of thumbnail from cache {}",
media_items_id
);
bytes
}
None => {
info!("cache MISS {}", key);
let bytes = match self.generate_thumbnail(
media_items_id,
dimensions,
FilterType::Builtin(imageops::FilterType::Lanczos3),
fill,
) {
Ok(bytes) => bytes,
Err(e) => {
error!(
"Failed to generate thumbnail for {}: {}",
media_items_id, e
);
return None;
}
};
c.set(&key, &bytes);
bytes
}
};
match db.put(key.as_bytes(), &bytes) {
Ok(_) => Some(bytes),
Err(e) => {
error!("Failed to put bytes to {}: {}", key, e);
None
}
}
}
// RocksDB error.
Err(e) => {
error!("Failed to search DB for {}: {}", key, e);
None
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use tempdir::TempDir;
#[test]
fn clean_db() {
let td = TempDir::new("photosync_test").expect("failed to create temporary directory");
eprintln!("creating database in {}", td.path().to_string_lossy());
let db = DB::open_default(td.path()).expect("failed to open DB");
let keys = vec!["one", "two", "three"];
fn get_keys(db: &DB) -> Vec<String> {
db.iterator(rocksdb::IteratorMode::Start)
.map(|(k, _v)| String::from_utf8(k.to_vec()).expect("key not utf-8"))
.collect()
}
for k in &keys {
for g in vec!["1", "2", "3"] {
db.put(Library::generational_key(g, k), k)
.expect("failed to put");
}
}
assert_eq!(
get_keys(&db),
vec![
"1/one", "1/three", "1/two", "2/one", "2/three", "2/two", "3/one", "3/three",
"3/two"
]
);
Library::gc("2", &db).expect("failed to GC DB");
assert_eq!(get_keys(&db), vec!["2/one", "2/three", "2/two",]);
}
}