sync: download fullsize images.

This commit is contained in:
2020-02-09 21:26:03 -08:00
parent 991b5a1469
commit 249db6500e
3 changed files with 370 additions and 17 deletions

View File

@@ -1,12 +1,18 @@
use std::collections::HashMap;
use std::error::Error;
use std::fs;
use std::fs::File;
use std::io;
use std::path::PathBuf;
use google_api_auth;
use google_photoslibrary1 as photos;
use hexihasher;
use lazy_static::lazy_static;
use log::{debug, info};
use photos::schemas::{Album, MediaItem, SearchMediaItemsRequest};
use regex::Regex;
use reqwest;
use structopt::StructOpt;
use yup_oauth2::{Authenticator, InstalledFlow};
@@ -125,10 +131,15 @@ impl<'a> Iterator for SearchIter<'a> {
fn print_media_items(media_items: Vec<MediaItem>) {
for mi in &media_items {
let id = mi
.id
.as_ref()
.map_or("NO ID".to_string(), |s| s.to_string());
println!(
"{} {}",
mi.id.as_ref().unwrap_or(&"NO ID".to_string()),
mi.filename.as_ref().unwrap_or(&"NO FILENAME".to_string())
"media item: {}\n\t{}\n\t{}",
mi.filename.as_ref().unwrap_or(&"NO FILENAME".to_string()),
hexihasher::sha256(id.as_bytes()),
id,
);
}
println!("({}) items total", media_items.len());
@@ -136,12 +147,12 @@ fn print_media_items(media_items: Vec<MediaItem>) {
fn search_media_items(
client: &photos::Client,
album_id: String,
album_id: &str,
) -> Result<Vec<MediaItem>, Box<dyn Error>> {
let media_items = SearchIter::new(
&client,
SearchMediaItemsRequest {
album_id: Some(album_id.clone()),
album_id: Some(album_id.to_string()),
// 100 is the documented max.
page_size: Some(100),
..Default::default()
@@ -152,6 +163,17 @@ fn search_media_items(
Ok(media_items)
}
lazy_static! {
static ref MIME_TO_EXT: HashMap<&'static str, &'static str> = [
("image/gif", "gif"),
("image/heif", "heic"),
("image/jpeg", "jpg"),
]
.iter()
.copied()
.collect();
}
fn sync_albums(
client: &photos::Client,
title_filter: Option<Regex>,
@@ -159,13 +181,54 @@ fn sync_albums(
) -> Result<(), Box<dyn Error>> {
let albums = list_albums(client, title_filter)?;
for a in &albums {
let album_dir = output_dir.join(a.id.as_ref().expect("missing album id"));
let album_id = a.id.as_ref().expect("unset album id").to_string();
let album_dir = output_dir.join(&album_id);
if !album_dir.exists() {
info!("making album directory {}", album_dir.to_string_lossy());
fs::create_dir_all(&album_dir)?;
}
let album = search_media_items(client, a.id.as_ref().expect("unset album id").to_string())?;
let album = search_media_items(client, &album_id)?;
for (i, mi) in album.iter().enumerate() {
let mi_id = mi.id.as_ref().expect("unset media item id").to_string();
let filename = mi
.filename
.as_ref()
.map_or("NO_FILENAME".to_string(), |s| s.to_string());
// Put images from all albums in common directory.
let image_path = output_dir.join("images").join(&mi_id);
if !image_path.exists() {
fs::create_dir_all(&image_path)?;
}
let image_path = image_path.join(&filename);
if image_path.exists() {
info!(
"Skipping already downloaded {} @ {}",
&filename,
image_path.to_string_lossy()
);
} else {
let download_path = image_path.with_extension("download");
info!(
"({}/{}) Downloading {} -> {}",
i + 1,
&album.len(),
&filename,
download_path.to_string_lossy()
);
let base_url = mi.base_url.as_ref().expect("missing base_url");
let url = format!("{}=d", base_url);
let mut r = reqwest::blocking::get(&url)?;
let mut w = File::create(&download_path)?;
let _n = io::copy(&mut r, &mut w)?;
info!(
"Rename {} -> {}",
download_path.to_string_lossy(),
image_path.to_string_lossy()
);
fs::rename(download_path, &image_path)?;
}
}
let j = serde_json::to_string(&album)?;
let path = album_dir.join("album.json");
info!("saving {}", path.to_string_lossy());
@@ -230,7 +293,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
Command::SearchMediaItems { album_id } => {
print_media_items(search_media_items(&client, album_id)?);
print_media_items(search_media_items(&client, &album_id)?);
Ok(())
}
Command::Sync {