Compare commits

...

35 Commits

Author SHA1 Message Date
8633e9a6cc More cacheable Dockerfile. 2020-06-24 21:11:40 -07:00
f8d0da1af4 Latest lock. 2020-06-24 21:06:10 -07:00
625af91c20 Pin google_api_auth to known working version. 2020-06-23 21:15:10 -07:00
7cc0af4f07 Use supported nodejs and nightly rust compiler (required for rocket). 2020-06-22 21:12:42 -07:00
7d3b38af12 Version bumps. 2020-06-22 21:12:16 -07:00
06f82cc160 Remove unused params. 2020-06-22 21:12:07 -07:00
1bca4c3642 Rewrite to use s3 instead of local files and rocksdb. 2020-06-22 20:48:15 -07:00
337f5dfd49 Remove unnecessary Rocket.toml 2020-06-22 20:19:07 -07:00
e7b29509e5 Use cache for thumbnail too. 2020-06-20 22:40:08 -07:00
58064a6309 Use cacher for downloading fullsize images. 2020-06-20 22:25:29 -07:00
3402d7bcf4 Clean lint. 2020-06-20 20:08:37 -07:00
009dd1ff19 Remove deprecated file. 2020-06-20 20:05:35 -07:00
639a1c7b3a Set default handler to high rank to prevent collision. 2020-06-20 17:08:26 -07:00
76594dc0c1 Rocket based webserver now has parity with warp version. 2020-06-20 14:36:00 -07:00
80ef93f20f Start of rewrite from warp->rocket. 2020-06-20 12:50:22 -07:00
a19874fe47 Attempt to create a cacher but I can't make it work with warp. 2020-06-19 20:37:51 -07:00
4fc0bc9d01 react-slideshow: cleaner looking front page. 2020-06-17 18:11:58 -07:00
7e2cf1b956 Move image helpers to xinu.tv/imageutils 2020-04-01 18:21:22 -07:00
b61e65bd83 Pair-up portraits when in landscape mode. 2020-03-14 22:02:35 -07:00
0799708109 Fix build. 2020-03-14 20:29:15 -07:00
24240c5f68 Use day of month instead of day of week when seeding shuffle. 2020-03-14 16:30:44 -07:00
f683acf0ae Build react-slideshow when building photosync into a docker image. 2020-03-14 15:08:36 -07:00
f14dbff066 Prettier /embedz, now with sizes. 2020-03-14 15:07:58 -07:00
ba304c85c3 Remove unused logos. 2020-03-14 14:29:05 -07:00
bd3aac5bc0 Add /embedz handler to see what files are in binary. 2020-03-14 14:26:12 -07:00
737b290cc0 Docker build for slideshow app. 2020-03-14 13:37:02 -07:00
2ee2a98c7d Set production slide time to 1 minute (matches Google hub). 2020-03-14 13:36:25 -07:00
a96fe1da9d Implement ServeAndSync. 2020-03-10 18:38:45 -07:00
89037b6b24 Prefetch images for next slide. 2020-03-08 11:56:07 -07:00
9e4fdf7644 Fix UI after refactor. 2020-02-26 21:23:32 -08:00
e3182d4cf2 Refactor image handling in preparation of multi-image slides. 2020-02-26 21:17:38 -08:00
9f9c3cc00c Version bump target so newer features can be used.
This allows usage of #private members.
2020-02-26 21:17:00 -08:00
0dc3c5edef Cleanup page metadata. 2020-02-26 14:19:40 -08:00
62ae230f70 Remove TODOs, and set favicon. 2020-02-26 13:51:30 -08:00
49695dd393 Preserve aspect ratio in debug thumbnails. 2020-02-26 09:21:07 -08:00
24 changed files with 2962 additions and 3483 deletions

4
.dockerignore Normal file
View File

@@ -0,0 +1,4 @@
Dockerfile
target
*/node_modules
*/yarn.lock

1305
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ edition = "2018"
[dependencies] [dependencies]
# TODO, use https://git.z.xinu.tv/wathiede/google-api-photoslibrary and figure out auth story. # TODO, use https://git.z.xinu.tv/wathiede/google-api-photoslibrary and figure out auth story.
google-photoslibrary1 = { git = "https://git.z.xinu.tv/wathiede/google-api-photoslibrary" } google-photoslibrary1 = { git = "https://git.z.xinu.tv/wathiede/google-api-photoslibrary" }
google_api_auth = { git = "https://github.com/google-apis-rs/generator", features = ["with-yup-oauth2"] } google_api_auth = { git = "https://github.com/google-apis-rs/generator", rev="7504e31", features = ["with-yup-oauth2"] }
hexihasher = { git = "https://git.z.xinu.tv/wathiede/hexihasher" } hexihasher = { git = "https://git.z.xinu.tv/wathiede/hexihasher" }
lazy_static = "1.4.0" lazy_static = "1.4.0"
log = "0.4.8" log = "0.4.8"
@@ -19,13 +19,17 @@ serde_json = "1.0.46"
stderrlog = "0.4.3" stderrlog = "0.4.3"
structopt = "0.3.9" structopt = "0.3.9"
yup-oauth2 = "^3.1" yup-oauth2 = "^3.1"
warp = "0.1"
serde = { version = "1.0.104", features = ["derive"] } serde = { version = "1.0.104", features = ["derive"] }
image = { version = "0.23.0" } #, default-features = false, features = ["jpeg"] } image = { version = "0.23.2" } #, default-features = false, features = ["jpeg"] }
rust-embed = "5.2.0" rust-embed = "5.2.0"
mime_guess = "2.0.1" mime_guess = "2.0.1"
rocksdb = "0.13.0"
jpeg-decoder = "0.1.18" jpeg-decoder = "0.1.18"
imageutils = { git = "https://git.z.xinu.tv/wathiede/imageutils" }
cacher = { git = "https://git.z.xinu.tv/wathiede/cacher" }
rocket = "0.4.5"
thiserror = "1.0.20"
rusoto_s3 = "0.42.0"
rusoto_core = "0.42.0"
[dependencies.prometheus] [dependencies.prometheus]
features = ["process"] features = ["process"]
@@ -44,3 +48,8 @@ harness = false
# Build dependencies with release optimizations even in dev mode. # Build dependencies with release optimizations even in dev mode.
[profile.dev.package."*"] [profile.dev.package."*"]
opt-level = 3 opt-level = 3
[dependencies.rocket_contrib]
version = "0.4.5"
default-features = false
features = ["json"]

17
Dockerfile Normal file
View File

@@ -0,0 +1,17 @@
FROM rustlang/rust:nightly AS build-env
COPY ./dockerfiles/netrc /root/.netrc
RUN mkdir /root/.cargo
COPY ./dockerfiles/cargo-config /.cargo/config
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN curl -sL https://deb.nodesource.com/setup_12.x | bash -
RUN apt-get update && apt-get install -y strace build-essential clang nodejs yarn
COPY ./ /src/
WORKDIR /src/react-slideshow
RUN yarn install
RUN yarn build
WORKDIR /src
RUN cargo version && cargo install --path .
FROM rust:slim
COPY --from=build-env /usr/local/cargo/bin/photosync /usr/bin/

1
config.dbuild Normal file
View File

@@ -0,0 +1 @@
package="app/photosync"

2
dockerfiles/cargo-config Normal file
View File

@@ -0,0 +1,2 @@
[net]
git-fetch-with-cli = true

1
dockerfiles/netrc Normal file
View File

@@ -0,0 +1 @@
machine git.z.xinu.tv login wathiede password gitgit

View File

@@ -1,7 +1,7 @@
{ {
"name": "react-slideshow", "name": "react-slideshow",
"version": "0.1.0", "version": "0.1.0",
"proxy": "http://localhost:4000", "proxy": "http://sky.h:8000",
"private": true, "private": true,
"dependencies": { "dependencies": {
"@testing-library/jest-dom": "^4.2.4", "@testing-library/jest-dom": "^4.2.4",
@@ -13,7 +13,9 @@
"@types/react-dom": "^16.9.5", "@types/react-dom": "^16.9.5",
"@types/react-router": "^5.1.4", "@types/react-router": "^5.1.4",
"@types/react-router-dom": "^5.1.3", "@types/react-router-dom": "^5.1.3",
"bootstrap": "^4.5.0",
"react": "^16.12.0", "react": "^16.12.0",
"react-bootstrap": "^1.0.1",
"react-dom": "^16.12.0", "react-dom": "^16.12.0",
"react-router-dom": "^5.1.2", "react-router-dom": "^5.1.2",
"react-scripts": "3.4.0", "react-scripts": "3.4.0",

View File

@@ -2,14 +2,14 @@
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" /> <link rel="icon" href="https://static.xinu.tv/favicon/gallery.png" />
<meta name="viewport" content="width=device-width, initial-scale=1" /> <meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" /> <meta name="theme-color" content="#000000" />
<meta <meta
name="description" name="description"
content="Web site created using create-react-app" content="Photo gallery @ xinu.tv"
/> />
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" /> <link rel="apple-touch-icon" href="https://static.xinu.tv/favicon/gallery.png" />
<!-- <!--
manifest.json provides metadata used when your web app is installed on a manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
@@ -24,7 +24,7 @@
work correctly both with client-side routing and a non-root public URL. work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`. Learn how to configure a non-root public URL by running `npm run build`.
--> -->
<title>React App</title> <title>Xinu Slideshow</title>
</head> </head>
<body> <body>
<noscript>You need to enable JavaScript to run this app.</noscript> <noscript>You need to enable JavaScript to run this app.</noscript>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

View File

@@ -3,19 +3,8 @@
"name": "Create React App Sample", "name": "Create React App Sample",
"icons": [ "icons": [
{ {
"src": "favicon.ico", "src": "https://static.xinu.tv/favicon/gallery.png",
"sizes": "64x64 32x32 24x24 16x16", "type": "image/png"
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
} }
], ],
"start_url": ".", "start_url": ".",

View File

@@ -2,14 +2,24 @@ body, html, #root {
height: 100%; height: 100%;
} }
.container {
margin-top: 2em;
}
#ui { #ui {
background-color: white; top: 0;
bottom: 0;
line-height: 3em; line-height: 3em;
position: 'absolute'; position: absolute;
width: 100%; width: 100%;
height: 100%;
} }
#ui .meta { #ui .meta {
background-color: rgba(255, 255, 255, 0.5);
line-height: 3em;
text-align: center; text-align: center;
} }
#slide {
height: 100%;
}

View File

@@ -5,6 +5,11 @@ import {
Route, Route,
useParams useParams
} from "react-router-dom"; } from "react-router-dom";
import {
Button,
Card
} from 'react-bootstrap';
import Random from './rand'; import Random from './rand';
import './App.css'; import './App.css';
@@ -17,7 +22,7 @@ type Config = {
let CONFIG: Config; let CONFIG: Config;
if (process.env.NODE_ENV === 'production') { if (process.env.NODE_ENV === 'production') {
CONFIG = { CONFIG = {
sleepTimeSeconds: 5 * 60, sleepTimeSeconds: 60,
showUI: false, showUI: false,
} }
} else { } else {
@@ -42,8 +47,8 @@ const roundup = (v: number, mod: number) => {
* From https://stackoverflow.com/questions/6274339/how-can-i-shuffle-an-array * From https://stackoverflow.com/questions/6274339/how-can-i-shuffle-an-array
* @param {Array} a items An array containing the items. * @param {Array} a items An array containing the items.
*/ */
function shuffle(a: Array<MediaItem>) { function shuffle<T>(a: Array<T>) {
let rng = new Random(new Date().getDay()); let rng = new Random(new Date().getDate());
for (let i = a.length - 1; i > 0; i--) { for (let i = a.length - 1; i > 0; i--) {
const j = Math.floor(rng.nextFloat() * (i + 1)); const j = Math.floor(rng.nextFloat() * (i + 1));
[a[i], a[j]] = [a[j], a[i]]; [a[i], a[j]] = [a[j], a[i]];
@@ -51,6 +56,70 @@ function shuffle(a: Array<MediaItem>) {
return a; return a;
} }
class Slide {
// One or two items. For example if display is landscape we'll try to fit
// two portrait images and only one landscape.
items: Array<MediaItem>;
nextSlide?: Slide;
prevSlide?: Slide;
constructor(items: Array<MediaItem>) {
this.items = items;
}
prefetchImages() {
console.log(`prefetchImages, I have ${this.imageUrls.length} images`);
this.imageUrls.map(url => new Image().src = url);
}
get imageUrls(): Array<string> {
let w = window.innerWidth * window.devicePixelRatio;
let h = window.innerHeight * window.devicePixelRatio;
let ratio = w/h;
if (ratio > 1) {
// Landscape image
w = roundup(w, IMAGE_CHUNK);
h = Math.round(w/ratio);
} else {
// Portrait image
h = roundup(h, IMAGE_CHUNK);
w = Math.round(h/ratio);
}
//console.log(`Window size ${window.innerWidth}x${window.innerHeight} with a devicePixelRatio of ${window.devicePixelRatio} for a total size of ${w}x${h}`);
return this.items.map(img => `/api/image/${img.id}?w=${w}&h=${h}`);
}
render() {
let urls = this.imageUrls;
let frac = 100 / urls.length;
let imgs = urls.map(url => {
// TODO(wathiede): make this landscape/portrait aware.
let style: React.CSSProperties = {
height: '100%',
width: frac + '%',
backgroundColor: 'black',
backgroundImage: `url(${url})`,
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center center',
backgroundSize: 'cover',
float: 'left',
};
return <div key={url} style={style}></div>;
});
// TODO(wathiede): make sure the style handles multiple items.
return <div style={{
height: '100%',
width: '100%',
}}>{imgs}</div>;
}
};
function makePairs<T>(items: Array<T>) {
const half = Math.floor(items.length/2);
console.log(`items ${items.length} half ${half}`)
let pairs = [];
for (let i = 0; i < half; i++) {
pairs.push([items[2*i], items[2*i+1]]);
}
return pairs;
}
type MediaMetadata = { type MediaMetadata = {
width: number; width: number;
height: number; height: number;
@@ -67,9 +136,8 @@ type AlbumProps = {
}; };
type AlbumState = { type AlbumState = {
error: any; error: any;
// TODO(wathiede): define a MediaItem type.
mediaItems: Array<MediaItem> | null; mediaItems: Array<MediaItem> | null;
idx: number; curSlide?: Slide,
showUI: boolean; showUI: boolean;
timerID: any | null; timerID: any | null;
}; };
@@ -77,7 +145,6 @@ class Album extends React.Component<AlbumProps, AlbumState> {
state: AlbumState = { state: AlbumState = {
error: null, error: null,
mediaItems: null, mediaItems: null,
idx: 0,
showUI: this.props.showUI, showUI: this.props.showUI,
timerID: null, timerID: null,
}; };
@@ -89,12 +156,55 @@ class Album extends React.Component<AlbumProps, AlbumState> {
fetch(process.env.PUBLIC_URL + `/api/album/${album}`) fetch(process.env.PUBLIC_URL + `/api/album/${album}`)
.then(res => res.json()) .then(res => res.json())
.then( .then(
(result) => { (mediaItems: Array<MediaItem>) => {
this.setState({mediaItems: result}); let w = window.innerWidth * window.devicePixelRatio;
let h = window.innerHeight * window.devicePixelRatio;
let ratio = w/h;
let landscapes = mediaItems.filter((mi) => {
let md = mi.mediaMetadata;
let ratio = md.width/md.height;
return ratio > 1;
});
let portraits = mediaItems.filter((mi) => {
let md = mi.mediaMetadata;
let ratio = md.width/md.height;
return ratio <= 1;
});
console.log(`${landscapes.length} landscape photos`);
console.log(`${portraits.length} portraits photos`);
let slides: Array<Slide>;
if (ratio > 1) {
console.log('display in landscape mode');
slides = landscapes.map((p)=>{
return new Slide([p]);
});
let pairs = makePairs(shuffle(portraits));
slides = slides.concat(pairs.map((p, i) => new Slide(p)));
} else {
console.log('display in portrait mode');
slides = portraits.map((p)=>{
return new Slide([p]);
});
// TODO(wathiede): fix Slide::render before adding landscapes
// to slides here.
}
slides = shuffle(slides);
console.log(`${slides.length} slides`);
let numSlides = slides.length;
slides.forEach((p, idx)=>{
let nextIdx = (idx+1)%numSlides;
let prevIdx = (numSlides+idx-1)%numSlides;
p.nextSlide = slides[nextIdx];
p.prevSlide = slides[prevIdx];
})
this.setState({curSlide: slides[0]});
let {sleepTimeSeconds} = this.props; let {sleepTimeSeconds} = this.props;
let timerID = setInterval(()=>{ let timerID = setInterval(()=>{
let {idx} = this.state; let {curSlide} = this.state;
this.setState({idx: idx+1}) this.setState({curSlide: curSlide?.nextSlide})
console.log('timer fired'); console.log('timer fired');
}, sleepTimeSeconds*1000); }, sleepTimeSeconds*1000);
this.setState({timerID}); this.setState({timerID});
@@ -110,73 +220,19 @@ class Album extends React.Component<AlbumProps, AlbumState> {
} }
render() { render() {
// TODO(wathiede): fade transition. // TODO(wathiede): fade transition.
// TODO(wathiede): pair-up portrait orientation images. let {curSlide, error, showUI} = this.state;
// TODO(wathiede): fetch an image that maintains the originals aspect ratio
let w = window.innerWidth * window.devicePixelRatio;
let h = window.innerHeight * window.devicePixelRatio;
let ratio = w/h;
if (ratio > 1) {
// Landscape image
w = roundup(w, IMAGE_CHUNK);
h = Math.round(w/ratio);
} else {
// Portrait image
h = roundup(h, IMAGE_CHUNK);
w = Math.round(h/ratio);
}
console.log(`Window size ${window.innerWidth}x${window.innerHeight} with a devicePixelRatio of ${window.devicePixelRatio} for a total size of ${w}x${h}`);
//let w = roundup(window.innerWidth*window.devicePixelRatio, IMAGE_CHUNK);
//let h = roundup(window.innerHeight*window.devicePixelRatio, IMAGE_CHUNK);
let {idx, error, mediaItems, showUI} = this.state;
if (error !== null) { if (error !== null) {
return <h2>Error: {JSON.stringify(error)}</h2>; return <h2>Error: {JSON.stringify(error)}</h2>;
} else if (mediaItems !== null) { } else if (curSlide) {
let landscapes = mediaItems.filter((mi) => { let nextSlide = curSlide?.nextSlide;
let md = mi.mediaMetadata; let prevSlide = curSlide?.prevSlide;
let ratio = md.width/md.height;
return ratio > 1;
});
let portraits = mediaItems.filter((mi) => {
let md = mi.mediaMetadata;
let ratio = md.width/md.height;
return ratio <= 1;
});
console.log(`${landscapes.length} landscape photos`);
console.log(`${portraits.length} portraits photos`);
let photos;
if (ratio > 1) {
console.log('display in landscape mode');
photos = landscapes;
} else {
console.log('display in portrait mode');
photos = portraits;
}
photos = shuffle(photos);
let numImages = photos.length;
idx = idx % numImages;
let nextIdx = (idx+1)%numImages;
let prevIdx = (numImages+idx-1)%numImages;
let image = photos[idx];
let nextImage = photos[nextIdx];
let prevImage = photos[prevIdx];
let style: React.CSSProperties = {
height: '100%',
width: '100%',
backgroundColor: 'black',
backgroundImage: `url(/api/image/${image.id}?w=${w}&h=${h})`,
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center center',
backgroundSize: 'cover',
};
let prefetchStyle: React.CSSProperties = { let prefetchStyle: React.CSSProperties = {
backgroundColor: 'rgba(127, 127, 127, 0.5)',
backgroundPosition: 'center center',
bottom: 0,
height: '25%',
position: 'absolute', position: 'absolute',
width: '25%', width: '25%',
height: '25%',
bottom: 0,
}; };
let leftPrefetchStyle: React.CSSProperties = { let leftPrefetchStyle: React.CSSProperties = {
left: 0, left: 0,
@@ -189,27 +245,28 @@ class Album extends React.Component<AlbumProps, AlbumState> {
let ui; let ui;
if (showUI) { if (showUI) {
ui = <div id="ui"> ui = <div id="ui">
<img <div
style={leftPrefetchStyle} style={leftPrefetchStyle}
onClick={(e)=>{ onClick={(e)=>{
e.stopPropagation(); e.stopPropagation();
this.setState({idx: prevIdx}) this.setState({curSlide: curSlide?.prevSlide})
}} }}>{ prevSlide?.render() }</div>
src={`/api/image/${prevImage.id}?w=${w}&h=${h}`} alt="prefetch prev" /> {/* TODO(wathiede): make this work with multiple items. */}
<div className="meta">{image.filename}</div> <div className="meta">{curSlide?.items.map(i=>i.filename).join(' | ')}</div>
<img <div
style={rightPrefetchStyle} style={rightPrefetchStyle}
onClick={(e)=>{ onClick={(e)=>{
e.stopPropagation(); e.stopPropagation();
this.setState({idx: nextIdx}) this.setState({curSlide: curSlide?.nextSlide})
}} }}>{ nextSlide?.render() }</div>
src={`/api/image/${nextImage.id}?w=${w}&h=${h}`} alt="prefetch next" />
</div>; </div>;
} }
return <div style={style} onClick={(e)=>{ nextSlide?.prefetchImages();
return <div id="slide" onClick={(e)=>{
e.stopPropagation(); e.stopPropagation();
this.setState({showUI: !showUI}) this.setState({showUI: !showUI})
}}> }}>
{ curSlide?.render() }
{ ui } { ui }
</div>; </div>;
} else { } else {
@@ -243,8 +300,10 @@ class AlbumIndex extends React.Component<AlbumIndexProps, AlbumIndexState> {
return <h2>Error: {JSON.stringify(error)}</h2>; return <h2>Error: {JSON.stringify(error)}</h2>;
} else if (albums !== null) { } else if (albums !== null) {
return albums.map((a) => { return albums.map((a) => {
let img_url = "https://via.placeholder.com/256x128";
let img = <img src="https://via.placeholder.com/256x128" className="mr-3" alt="unset"/>; let img = <img src="https://via.placeholder.com/256x128" className="mr-3" alt="unset"/>;
if (a.coverPhotoMediaItemId !== undefined) { if (a.coverPhotoMediaItemId !== undefined) {
img_url = `/api/image/${a.coverPhotoMediaItemId}?w=512&h=512`
img = <img src={ `/api/image/${a.coverPhotoMediaItemId}?w=256&h=256` } className="mr-3" alt={ a.title }/> img = <img src={ `/api/image/${a.coverPhotoMediaItemId}?w=256&h=256` } className="mr-3" alt={ a.title }/>
} }
@@ -252,9 +311,16 @@ class AlbumIndex extends React.Component<AlbumIndexProps, AlbumIndexState> {
{img} {img}
<figcaption className="figure-caption">{ a.title || "No title" } - { a.mediaItemsCount || 0 } photos </figcaption> <figcaption className="figure-caption">{ a.title || "No title" } - { a.mediaItemsCount || 0 } photos </figcaption>
</figure>; </figure>;
return <a key={ a.id } href={ '#' + a.id }> return <Card key={a.id} style={{width: '50%'}}>
{ figure } <Card.Img variant="top" src={img_url} />
</a> <Card.Body>
<Card.Title>{a.title}</Card.Title>
<Card.Text>
{a.mediaItemsCount || 0} photos
</Card.Text>
<Button href={'#' + a.id} variant="primary" block>Slideshow</Button>
</Card.Body>
</Card>
}); });
} else { } else {
return <h2>Loading...</h2>; return <h2>Loading...</h2>;
@@ -280,6 +346,9 @@ const App = () => {
<AlbumIndex /> <AlbumIndex />
</div> </div>
</Route> </Route>
<Route exact path="/lookup/:albumId">
<AlbumRoute showUI={showUI} sleepTimeSeconds={sleepTimeSeconds} />
</Route>
<Route exact path="/:albumId"> <Route exact path="/:albumId">
<AlbumRoute showUI={showUI} sleepTimeSeconds={sleepTimeSeconds} /> <AlbumRoute showUI={showUI} sleepTimeSeconds={sleepTimeSeconds} />
</Route> </Route>

View File

@@ -3,6 +3,8 @@ import ReactDOM from 'react-dom';
import './index.css'; import './index.css';
import App from './App'; import App from './App';
import * as serviceWorker from './serviceWorker'; import * as serviceWorker from './serviceWorker';
// Importing the Bootstrap CSS
import 'bootstrap/dist/css/bootstrap.min.css';
ReactDOM.render(<App />, document.getElementById('root')); ReactDOM.render(<App />, document.getElementById('root'));

View File

@@ -22,7 +22,7 @@ Random.prototype.next = function () {
/** /**
* Returns a pseudo-random floating point number in range [0, 1). * Returns a pseudo-random floating point number in range [0, 1).
*/ */
Random.prototype.nextFloat = function (opt_minOrMax, opt_max) { Random.prototype.nextFloat = function () {
// We know that result of next() will be 1 to 2147483646 (inclusive). // We know that result of next() will be 1 to 2147483646 (inclusive).
return (this.next() - 1) / 2147483646; return (this.next() - 1) / 2147483646;
}; };

View File

@@ -1,6 +1,6 @@
{ {
"compilerOptions": { "compilerOptions": {
"target": "es5", "target": "es6",
"lib": [ "lib": [
"dom", "dom",
"dom.iterable", "dom.iterable",

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,7 @@
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
pub mod library; pub mod library;
pub mod web; pub mod rweb;

View File

@@ -1,284 +1,102 @@
use std::fs;
use std::fs::File;
use std::io; use std::io;
use std::io::BufReader; use std::io::Read;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use cacher::s3::S3CacherError;
use cacher::S3Cacher;
use google_photoslibrary1 as photos; use google_photoslibrary1 as photos;
use image::imageops; use image::imageops;
use image::DynamicImage; use imageutils::{load_image_buffer, resize, resize_to_fill, save_to_jpeg_bytes, FilterType};
use image::GenericImage; use log::{error, info};
use image::GenericImageView; use photos::schemas::{Album, MediaItem};
use image::ImageBuffer; use rusoto_core::RusotoError;
use image::ImageFormat; use rusoto_s3::GetObjectError;
use image::ImageResult; use thiserror::Error;
use jpeg_decoder::Decoder;
use log::error;
use log::info;
use log::warn;
use photos::schemas::Album;
use photos::schemas::MediaItem;
use rocksdb::Direction;
use rocksdb::IteratorMode;
use rocksdb::DB;
// Used to ensure DB is invalidated after schema changes. // Used to ensure DB is invalidated after schema changes.
const LIBRARY_GENERATION: &'static str = "14"; const LIBRARY_GENERATION: &'static str = "16";
#[derive(Error, Debug)]
pub enum LibraryError {
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("s3 error: {0}")]
S3CacherError(#[from] S3CacherError),
#[error("json error: {0}")]
JsonError(#[from] serde_json::Error),
}
#[derive(Clone)] #[derive(Clone)]
pub struct Library { pub struct Library {
root: PathBuf, s3: S3Cacher,
originals_dir: PathBuf,
cache_db: Arc<DB>,
}
pub fn load_image<P>(
path: P,
width_hint: Option<u32>,
height_hint: Option<u32>,
) -> Result<DynamicImage, Box<dyn std::error::Error>>
where
P: AsRef<Path>,
{
// TODO(wathiede): fall back to image::load_image when jpeg decoding fails.
let file = File::open(path).expect("failed to open file");
let mut decoder = Decoder::new(BufReader::new(file));
let (w, h) = match (width_hint, height_hint) {
(Some(w), Some(h)) => {
let got = decoder.scale(w as u16, h as u16)?;
//info!("Hinted at {}x{}, got {}x{}", w, h, got.0, got.1);
(got.0 as u32, got.1 as u32)
}
// TODO(wathiede): handle partial hints by grabbing info and then computing the absent
// dimenison.
_ => {
decoder.read_info()?;
let info = decoder.info().unwrap();
(info.width as u32, info.height as u32)
}
};
let pixels = decoder.decode().expect("failed to decode image");
Ok(DynamicImage::ImageRgb8(
ImageBuffer::from_raw(w, h, pixels).expect("pixels to small for given dimensions"),
))
}
#[derive(Clone, Copy, Debug)]
pub enum FilterType {
Builtin(imageops::FilterType),
Nearest,
}
/// fill_size computes the largest rectangle that fits in src with the aspect ratio of dst.
fn fill_size(src: (u32, u32), dst: (u32, u32)) -> (u32, u32) {
debug_assert!(src.0 >= dst.0);
debug_assert!(src.1 >= dst.1);
let x_scale = src.0 as f32 / dst.0 as f32;
let y_scale = src.1 as f32 / dst.1 as f32;
if x_scale > y_scale {
// Height will fill, width will crop.
(
(dst.0 as f32 * y_scale) as u32,
(dst.1 as f32 * y_scale) as u32,
)
} else {
// Width will fill, height will crop.
(
(dst.0 as f32 * x_scale) as u32,
(dst.1 as f32 * x_scale) as u32,
)
}
}
fn resize_to_fill_nearest(w: u32, h: u32, img: &DynamicImage) -> DynamicImage {
let mut dst = DynamicImage::new_rgb8(w, h);
let (src_w, src_h) = img.dimensions();
let (crop_w, crop_h) = fill_size((src_w, src_h), (w, h));
let off_x = (src_w - crop_w) / 2;
let off_y = (src_h - crop_h) / 2;
let src = img.view(off_x, off_y, crop_w, crop_h);
let x_scale = crop_w as f32 / w as f32;
let y_scale = crop_h as f32 / h as f32;
for y in 0..h {
for x in 0..w {
let x_idx = (x as f32 * x_scale).round() as u32;
let y_idx = (y as f32 * y_scale).round() as u32;
dst.put_pixel(x, y, src.get_pixel(x_idx, y_idx))
}
}
dst
}
pub fn resize(
img: &DynamicImage,
dimensions: (Option<u32>, Option<u32>),
filter: FilterType,
) -> DynamicImage {
let (w, h) = dimensions;
let (orig_w, orig_h) = img.dimensions();
let (w, h) = match (w, h) {
(Some(w), Some(h)) => (w, h),
(Some(w), None) => (w, orig_h * w / orig_w),
(None, Some(h)) => (orig_w * h / orig_h, h),
(None, None) => (orig_w, orig_h),
};
match filter {
FilterType::Builtin(filter) => img.resize(w, h, filter),
FilterType::Nearest => unimplemented!(), //resize_to_fill_nearest(w, h, img),
}
}
pub fn resize_to_fill(
img: &DynamicImage,
dimensions: (Option<u32>, Option<u32>),
filter: FilterType,
) -> DynamicImage {
let (w, h) = dimensions;
let (orig_w, orig_h) = img.dimensions();
let (w, h) = match (w, h) {
(Some(w), Some(h)) => (w, h),
(Some(w), None) => (w, orig_h * w / orig_w),
(None, Some(h)) => (orig_w * h / orig_h, h),
(None, None) => (orig_w, orig_h),
};
match filter {
FilterType::Builtin(filter) => img.resize_to_fill(w, h, filter),
FilterType::Nearest => resize_to_fill_nearest(w, h, img),
}
}
pub fn save_to_jpeg_bytes(img: &DynamicImage) -> ImageResult<Vec<u8>> {
let mut buf = Vec::new();
img.write_to(&mut buf, ImageFormat::Jpeg)?;
Ok(buf)
} }
impl Library { impl Library {
pub fn new(root: PathBuf) -> Result<Library, Box<dyn std::error::Error>> { pub fn new(s3: S3Cacher) -> Result<Library, Box<dyn std::error::Error>> {
let db = DB::open_default(root.join("cache"))?; let lib = Library { s3 };
let cache_db = Arc::new(db);
let lib = Library {
originals_dir: root.join("images").join("originals"),
cache_db,
root,
};
let cnt = lib.clean_db()?;
if cnt != 0 {
info!("Deleted {} entries", cnt);
}
if !lib.originals_dir.exists() {
info!(
"create originals dir {}",
&lib.originals_dir.to_string_lossy()
);
fs::create_dir_all(&lib.originals_dir)?;
}
Ok(lib) Ok(lib)
} }
// Removes all data in the database from older schema. pub fn create_album_index(&self, albums: &Vec<Album>) -> Result<(), LibraryError> {
pub fn clean_db(&self) -> Result<usize, rocksdb::Error> {
Library::gc(LIBRARY_GENERATION, &self.cache_db)
}
fn gc(generation: &str, db: &DB) -> Result<usize, rocksdb::Error> {
let gen = format!("{}/", generation);
// '0' is the next character after '/', so iterator's starting there would be after the
// last `gen` entry.
let next_gen = format!("{}0", generation);
let mut del_cnt = 0;
for (k, _v) in db.iterator(IteratorMode::From(gen.as_bytes(), Direction::Reverse)) {
if !k.starts_with(gen.as_bytes()) {
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
db.delete(k)?;
del_cnt += 1;
}
}
for (k, _v) in db.iterator(IteratorMode::From(next_gen.as_bytes(), Direction::Forward)) {
if !k.starts_with(gen.as_bytes()) {
info!("deleting stale key: {}", String::from_utf8_lossy(&k));
db.delete(k)?;
del_cnt += 1;
}
}
Ok(del_cnt)
}
pub fn create_album_index(&self, albums: &Vec<Album>) -> io::Result<()> {
// Serialize it to a JSON string. // Serialize it to a JSON string.
let j = serde_json::to_string(albums)?; let j = serde_json::to_string(albums)?;
let path = self.root.join("albums.json"); let filename = "albums.json";
info!("saving {}", path.to_string_lossy());
fs::write(path, j) self.s3
.set(&Library::generational_key(filename), j.as_ref())?;
Ok(())
} }
pub fn create_album<P: AsRef<Path>>( pub fn create_album(
&self, &self,
album_id: P, album_id: &str,
media_items: &Vec<MediaItem>, media_items: &Vec<MediaItem>,
) -> io::Result<()> { ) -> Result<(), LibraryError> {
let album_dir = self.root.join(album_id); let relpath = format!("{}.json", &album_id);
if !album_dir.exists() {
info!("making album directory {}", album_dir.to_string_lossy());
fs::create_dir_all(&album_dir)?;
}
let j = serde_json::to_string(&media_items)?; let j = serde_json::to_string(&media_items)?;
let path = album_dir.join("album.json");
info!("saving {}", path.to_string_lossy()); self.s3
fs::write(path, j) .set(&Library::generational_key(&relpath), j.as_ref())?;
Ok(())
} }
pub fn albums(&self) -> Result<Vec<Album>, Box<dyn std::error::Error>> { pub fn albums(&self) -> Result<Vec<Album>, Box<dyn std::error::Error>> {
let albums_path = self.root.join("albums.json"); let filename = "albums.json";
info!("loading {}", albums_path.to_string_lossy());
let bytes = fs::read(albums_path)?; let bytes = self.s3.get(&Library::generational_key(filename))?;
Ok(serde_json::from_slice(&bytes)?) let album: Vec<Album> = serde_json::from_slice(&bytes)?;
Ok(album)
} }
pub fn album(&self, album_id: &str) -> Result<Vec<MediaItem>, Box<dyn std::error::Error>> { pub fn album(&self, album_id: &str) -> Result<Vec<MediaItem>, Box<dyn std::error::Error>> {
let album_path = self.root.join(album_id).join("album.json"); let relpath = format!("{}.json", &album_id);
let bytes = fs::read(album_path)?; let bytes = self.s3.get(&Library::generational_key(&relpath))?;
Ok(serde_json::from_slice(&bytes)?) let mis: Vec<MediaItem> = serde_json::from_slice(&bytes)?;
Ok(mis)
} }
pub fn download_image( pub fn download_image(
&self, &self,
filename: &str, _filename: &str,
media_items_id: &str, media_items_id: &str,
base_url: &str, base_url: &str,
) -> Result<PathBuf, Box<dyn std::error::Error>> { ) -> Result<PathBuf, Box<dyn std::error::Error>> {
// Put images from all albums in common directory. let filename = Library::generational_key(&format!("images/originals/{}", media_items_id));
let image_path = self.originals_dir.join(media_items_id); if !self.s3.contains_key(&filename) {
if image_path.exists() {
info!(
"Skipping already downloaded {} @ {}",
&filename,
image_path.to_string_lossy()
);
} else {
let download_path = image_path.with_extension("download");
let url = format!("{}=d", base_url); let url = format!("{}=d", base_url);
let mut r = reqwest::blocking::get(&url)?; let mut r = reqwest::blocking::get(&url)?;
let mut w = File::create(&download_path)?; let mut buf = Vec::new();
info!("Downloading {}", &url); info!("Downloading {}", &url);
let _n = io::copy(&mut r, &mut w)?; r.read_to_end(&mut buf)?;
info!( self.s3.set(&filename, &buf)?;
"Rename {} -> {}",
download_path.to_string_lossy(),
image_path.to_string_lossy()
);
fs::rename(download_path, &image_path)?;
} }
Ok(image_path) Ok(filename.into())
}
pub fn original(&self, media_items_id: &str) -> Option<PathBuf> {
let path = self.originals_dir.join(media_items_id);
if path.exists() {
Some(path)
} else {
None
} }
pub fn original_buffer(&self, media_items_id: &str) -> Result<Vec<u8>, LibraryError> {
let filename = Library::generational_key(&format!("images/originals/{}", media_items_id));
let bytes = self.s3.get(&filename)?;
Ok(bytes)
} }
// TODO(wathiede): make this a macro like format! to skip the second string create and copy. // TODO(wathiede): make this a macro like format! to skip the second string create and copy.
fn generational_key(generation: &str, key: &str) -> String { fn generational_key(key: &str) -> String {
format!("{}/{}", generation, key) format!("{}/{}", LIBRARY_GENERATION, key)
} }
pub fn generate_thumbnail( pub fn generate_thumbnail(
@@ -288,25 +106,23 @@ impl Library {
filter: FilterType, filter: FilterType,
fill: bool, fill: bool,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> { ) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
match self.original(&media_items_id) { let buf = self.original_buffer(&media_items_id)?;
None => { let dimension_hint = match dimensions {
warn!("Couldn't find original {}", &media_items_id); (Some(w), Some(h)) => Some((w, h)),
Err(io::Error::new(io::ErrorKind::NotFound, format!("{}", media_items_id)).into()) // Partial dimensions should be handled by the caller of this function. So all
} // other options are None.
Some(path) => { _ => None,
let orig_img = load_image(&path, dimensions.0, dimensions.1)?; };
let orig_img = load_image_buffer(buf, dimension_hint)?;
//.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; //.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
let img = if fill { let img = if fill {
resize_to_fill(&orig_img, dimensions, filter) resize_to_fill(&orig_img, dimensions, filter)
} else { } else {
resize(&orig_img, dimensions, filter) resize(&orig_img, dimensions, filter)
}; };
let buf = save_to_jpeg_bytes(&img) let buf = save_to_jpeg_bytes(&img).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
Ok(buf) Ok(buf)
} }
}
}
pub fn thumbnail( pub fn thumbnail(
&self, &self,
media_items_id: &str, media_items_id: &str,
@@ -320,15 +136,17 @@ impl Library {
(None, Some(h)) => format!("-h={}", h), (None, Some(h)) => format!("-h={}", h),
(None, None) => "".to_string(), (None, None) => "".to_string(),
}; };
Library::generational_key(LIBRARY_GENERATION, &format!("{}{}", media_items_id, dim)) Library::generational_key(&format!("images/thumbnails/{}-{}", media_items_id, dim))
} }
let key = cache_key(media_items_id, dimensions); let key = cache_key(media_items_id, dimensions);
let db = self.cache_db.clone(); match self.s3.get(&key) {
match db.get(key.as_bytes()) { Ok(bytes) => return Some(bytes),
// Cache hit, return bytes as-is. Err(S3CacherError::GetObjectError(RusotoError::Service(
Ok(Some(bytes)) => Some(bytes), GetObjectError::NoSuchKey(msg),
// Cache miss, fill cache and return. ))) => info!("Missing thumbnail {} in s3: {}", key, msg),
Ok(None) => { Err(e) => error!("Error fetching thumbnail {} from s3: {}", key, e),
};
info!("cache MISS {}", key); info!("cache MISS {}", key);
let bytes = match self.generate_thumbnail( let bytes = match self.generate_thumbnail(
media_items_id, media_items_id,
@@ -342,135 +160,9 @@ impl Library {
return None; return None;
} }
}; };
match db.put(key.as_bytes(), &bytes) { if let Err(e) = self.s3.set(&key, &bytes) {
Ok(_) => Some(bytes), error!("Failed to put thumbnail {}: {}", &key, e);
Err(e) => { }
error!("Failed to put bytes to {}: {}", key, e); Some(bytes)
None
}
}
}
// RocksDB error.
Err(e) => {
error!("Failed to search DB for {}: {}", key, e);
None
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use tempdir::TempDir;
fn compare_images(lhs: DynamicImage, rhs: DynamicImage) {
let lhs = lhs.to_rgb();
let rhs = rhs.to_rgb();
// Based on https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio#Definition
//
let mut mse: [i64; 3] = [0, 0, 0];
for (l, r) in lhs.pixels().zip(rhs.pixels()) {
let image::Rgb(l_pix) = l;
let image::Rgb(r_pix) = r;
{
for i in 0..3 {
let d = l_pix[i] as i64 - r_pix[i] as i64;
let d2 = d * d;
mse[i] += d2;
}
}
// assert_eq!(l_pix, r_pix, "{:?} != {:?} @ {} x {} ", l_pix, r_pix, x, y);
}
let (w, h) = lhs.dimensions();
let mn = (w * h) as i64;
mse.iter_mut().for_each(|i| *i = *i / mn);
let psnr: Vec<_> = mse
.iter()
.map(|i| 20. * 255_f32.log10() - 10. * (*i as f32).log10())
.collect();
// Uncomment to explore differences
/*
lhs.save("/tmp/lhs.png").expect("failed to write lhs.png");
rhs.save("/tmp/rhs.png").expect("failed to write rhs.png");
assert!(false, "MSE {:?} PSNR {:?} dB", mse, psnr);
*/
}
#[test]
fn fill_sizes() {
let srcs = vec![(400, 300), (300, 400)];
let dsts = vec![(225, 300), (300, 225), (100, 100)];
let want = vec![
(225, 300),
(400, 300),
(300, 300),
(300, 400),
(300, 225),
(300, 300),
];
let mut i = 0;
for s in &srcs {
for d in &dsts {
let w = want[i];
dbg!(s, d, w);
let got = fill_size(*s, *d);
assert_eq!(
got, w,
"{}. src {:?} dst {:?} want {:?} got {:?}",
i, s, d, w, got
);
i += 1;
}
}
}
#[test]
fn resize_to_fill_nearest() {
let w = 256;
let h = 256;
const TEST_IMAGE_PATH: &'static str = "testdata/image.jpg";
let img = load_image(TEST_IMAGE_PATH).expect("failed to load test image");
let reference = resize(
&img,
(Some(w), Some(h)),
FilterType::Builtin(imageops::FilterType::Nearest),
);
let got = resize(&img, (Some(w), Some(h)), FilterType::Nearest);
compare_images(reference, got);
}
#[test]
fn clean_db() {
let td = TempDir::new("photosync_test").expect("failed to create temporary directory");
eprintln!("creating database in {}", td.path().to_string_lossy());
let db = DB::open_default(td.path()).expect("failed to open DB");
let keys = vec!["one", "two", "three"];
fn get_keys(db: &DB) -> Vec<String> {
db.iterator(rocksdb::IteratorMode::Start)
.map(|(k, _v)| String::from_utf8(k.to_vec()).expect("key not utf-8"))
.collect()
}
for k in &keys {
for g in vec!["1", "2", "3"] {
db.put(Library::generational_key(g, k), k)
.expect("failed to put");
}
}
assert_eq!(
get_keys(&db),
vec![
"1/one", "1/three", "1/two", "2/one", "2/three", "2/two", "3/one", "3/three",
"3/two"
]
);
Library::gc("2", &db).expect("failed to GC DB");
assert_eq!(get_keys(&db), vec!["2/one", "2/three", "2/two",]);
} }
} }

View File

@@ -2,19 +2,46 @@ use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::PathBuf; use std::path::PathBuf;
use std::thread;
use std::time;
use cacher::S3Cacher;
use google_api_auth; use google_api_auth;
use google_photoslibrary1 as photos; use google_photoslibrary1 as photos;
use hexihasher; use hexihasher;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use log::{debug, info}; use log::{debug, error, info};
use photos::schemas::{Album, MediaItem, SearchMediaItemsRequest}; use photos::schemas::{Album, MediaItem, SearchMediaItemsRequest};
use regex::Regex; use regex::Regex;
use structopt::StructOpt; use structopt::StructOpt;
use yup_oauth2::{Authenticator, InstalledFlow}; use yup_oauth2::{Authenticator, InstalledFlow};
use photosync::library::Library; use photosync::library::Library;
use photosync::web; use photosync::rweb;
fn parse_duration(src: &str) -> Result<time::Duration, std::num::ParseIntError> {
let secs = str::parse::<u64>(src)?;
Ok(time::Duration::from_secs(secs))
}
#[derive(Debug, StructOpt)]
struct Sync {
#[structopt(flatten)]
auth: Auth,
/// Optional album title to filter. Default will mirror all albums.
#[structopt(short, long)]
title_filter: Option<Regex>,
/// S3 bucket holding metadata and images.
#[structopt(long, default_value = "photosync-dev")]
s3_bucket: String,
}
#[derive(Debug, StructOpt)]
struct Serve {
/// HTTP address to listen for web requests.
#[structopt(long = "addr", default_value = "0.0.0.0:0")]
addr: SocketAddr,
}
#[derive(Debug, StructOpt)] #[derive(Debug, StructOpt)]
enum Command { enum Command {
@@ -31,16 +58,22 @@ enum Command {
}, },
Sync { Sync {
#[structopt(flatten)] #[structopt(flatten)]
auth: Auth, sync: Sync,
/// Optional album title to filter. Default will mirror all albums.
#[structopt(short, long)]
title_filter: Option<Regex>,
/// Directory to store sync.
output: PathBuf,
}, },
Serve { Serve {
/// Directory of data fetched by `sync`. #[structopt(flatten)]
root: PathBuf, serve: Serve,
/// S3 bucket holding metadata and images.
#[structopt(default_value = "photosync-dev")]
s3_bucket: String,
},
ServeAndSync {
/// Sync albums at given interval.
#[structopt(parse(try_from_str = parse_duration))]
interval: time::Duration,
#[structopt(flatten)]
sync: Sync,
/// HTTP address to listen for web requests. /// HTTP address to listen for web requests.
#[structopt(long = "addr", default_value = "0.0.0.0:0")] #[structopt(long = "addr", default_value = "0.0.0.0:0")]
addr: SocketAddr, addr: SocketAddr,
@@ -192,10 +225,9 @@ lazy_static! {
fn sync_albums( fn sync_albums(
client: &photos::Client, client: &photos::Client,
title_filter: Option<Regex>, title_filter: &Option<Regex>,
output_dir: PathBuf, lib: &Library,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let lib = Library::new(output_dir)?;
let albums = list_albums(client, title_filter)?; let albums = list_albums(client, title_filter)?;
info!("albums {:?}", albums); info!("albums {:?}", albums);
lib.create_album_index(&albums)?; lib.create_album_index(&albums)?;
@@ -236,12 +268,18 @@ fn print_albums(albums: Vec<Album>) {
fn list_albums( fn list_albums(
client: &photos::Client, client: &photos::Client,
title_filter: Option<Regex>, title_filter: &Option<Regex>,
) -> Result<Vec<Album>, Box<dyn Error>> { ) -> Result<Vec<Album>, Box<dyn Error>> {
Ok(client Ok(client
.albums()
.list()
.iter_albums_with_all_fields()
.chain(
client
.shared_albums() .shared_albums()
.list() .list()
.iter_shared_albums_with_all_fields() .iter_shared_albums_with_all_fields(),
)
.filter_map(|a| a.ok()) .filter_map(|a| a.ok())
.filter(|a| { .filter(|a| {
match (&title_filter, &a.title) { match (&title_filter, &a.title) {
@@ -258,8 +296,23 @@ fn list_albums(
.collect()) .collect())
} }
pub fn serve(addr: SocketAddr, root: PathBuf) -> Result<(), Box<dyn Error>> { fn background_sync(
web::run(addr, root) client: photos::Client,
interval: time::Duration,
title_filter: Option<Regex>,
lib: Library,
) -> Result<(), Box<dyn Error>> {
thread::spawn(move || loop {
if let Err(err) = sync_albums(&client, &title_filter, &lib) {
error!("Error syncing: {}", err);
}
thread::sleep(interval);
});
Ok(())
}
pub fn serve(addr: SocketAddr, lib: Library) -> Result<(), Box<dyn Error>> {
rweb::run(addr, lib)
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
@@ -273,7 +326,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
match opt.cmd { match opt.cmd {
Command::ListAlbums { auth, title_filter } => { Command::ListAlbums { auth, title_filter } => {
let client = new_client(&auth.credentials, &auth.token_cache)?; let client = new_client(&auth.credentials, &auth.token_cache)?;
print_albums(list_albums(&client, title_filter)?); print_albums(list_albums(&client, &title_filter)?);
Ok(()) Ok(())
} }
Command::SearchMediaItems { auth, album_id } => { Command::SearchMediaItems { auth, album_id } => {
@@ -282,14 +335,44 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(()) Ok(())
} }
Command::Sync { Command::Sync {
sync:
Sync {
auth, auth,
title_filter, title_filter,
output, s3_bucket,
},
} => { } => {
let s3 = S3Cacher::new(s3_bucket.clone())?;
let client = new_client(&auth.credentials, &auth.token_cache)?; let client = new_client(&auth.credentials, &auth.token_cache)?;
sync_albums(&client, title_filter, output)?; let lib = Library::new(s3)?;
sync_albums(&client, &title_filter, &lib)?;
Ok(())
}
Command::Serve {
serve: Serve { addr },
s3_bucket,
} => {
let s3 = S3Cacher::new(s3_bucket.clone())?;
let lib = Library::new(s3)?;
serve(addr, lib)
}
Command::ServeAndSync {
interval,
sync:
Sync {
auth,
title_filter,
s3_bucket,
},
addr,
} => {
let s3 = S3Cacher::new(s3_bucket.clone())?;
let client = new_client(&auth.credentials, &auth.token_cache)?;
let lib = Library::new(s3)?;
background_sync(client, interval, title_filter, lib.clone())?;
serve(addr, lib)?;
Ok(()) Ok(())
} }
Command::Serve { addr, root } => serve(addr, root),
} }
} }

148
src/rweb.rs Normal file
View File

@@ -0,0 +1,148 @@
use std::error::Error;
use std::io::Write;
use std::net::SocketAddr;
use std::path::PathBuf;
use google_photoslibrary1 as photos;
use log::error;
use photos::schemas::{Album, MediaItem};
use prometheus::Encoder;
use rocket::config::{Config, Environment};
use rocket::http::ContentType;
use rocket::response::status::NotFound;
use rocket::response::Content;
use rocket::State;
use rocket_contrib::json::Json;
use rust_embed::RustEmbed;
use crate::library::Library;
#[get("/metrics")]
fn metrics() -> Content<Vec<u8>> {
let mut buffer = Vec::new();
let encoder = prometheus::TextEncoder::new();
// Gather the metrics.
let metric_families = prometheus::gather();
// Encode them to send.
encoder.encode(&metric_families, &mut buffer).unwrap();
// TODO(wathiede): see if there's a wrapper like html()
Content(ContentType::Plain, buffer)
}
#[get("/")]
fn index() -> Result<Content<Vec<u8>>, NotFound<String>> {
file("index.html")
}
// This is the catch-all handler, it has a high rank so it is the last match in any tie-breaks.
#[get("/<path..>", rank = 99)]
fn path(path: PathBuf) -> Result<Content<Vec<u8>>, NotFound<String>> {
let path = path.to_str().unwrap();
let path = if path.ends_with("/") {
format!("{}index.html", path.to_string())
} else {
path.to_string()
};
file(&path)
}
fn file(path: &str) -> Result<Content<Vec<u8>>, NotFound<String>> {
match Asset::get(path) {
Some(bytes) => {
let mime = mime_guess::from_path(path).first_or_octet_stream();
let ct = ContentType::parse_flexible(mime.essence_str()).unwrap_or(ContentType::Binary);
Ok(Content(ct, bytes.into()))
}
None => Err(NotFound(path.to_string())),
}
}
#[get("/api/albums")]
fn albums(lib: State<Library>) -> Result<Json<Vec<Album>>, NotFound<String>> {
let albums = lib
.albums()
.map_err(|e| NotFound(format!("Couldn't find albums: {}", e)))?;
Ok(Json(albums))
}
#[get("/api/album/<id>")]
fn album(id: String, lib: State<Library>) -> Result<Json<Vec<MediaItem>>, NotFound<String>> {
let album = lib
.album(&id)
.map_err(|e| NotFound(format!("Couldn't find album {}: {}", id, e)))?;
Ok(Json(album))
}
#[get("/api/image/<media_items_id>?<w>&<h>&<fill>")]
fn image(
media_items_id: String,
w: Option<u32>,
h: Option<u32>,
fill: Option<bool>,
lib: State<Library>,
) -> Result<Content<Vec<u8>>, NotFound<String>> {
// TODO(wathiede): add caching headers.
match lib.thumbnail(&media_items_id, (w, h), fill.unwrap_or(false)) {
None => Err(NotFound(format!(
"Couldn't find original {}",
&media_items_id
))),
Some(bytes) => Ok(Content(ContentType::JPEG, bytes.into())),
}
}
#[derive(RustEmbed)]
#[folder = "react-slideshow/build/"]
struct Asset;
#[get("/embedz")]
fn embedz() -> Content<Vec<u8>> {
let mut w = Vec::new();
write!(
w,
r#"<html><table><tbody><tr><th>size</th><th style="text-align: left;">path</th></tr>"#
)
.unwrap();
for path in Asset::iter() {
write!(
w,
r#"<tr><td style="text-align: right;">{0}</td><td><a href="{1}">{1}</a></td</tr>"#,
Asset::get(&path).unwrap().len(),
path
)
.unwrap();
}
Content(ContentType::HTML, w)
}
pub fn run(addr: SocketAddr, lib: Library) -> Result<(), Box<dyn Error>> {
let config = Config::build(Environment::Development)
.address(addr.ip().to_string())
.port(addr.port())
.finalize()?;
let e = rocket::custom(config)
.manage(lib)
.mount(
"/",
routes![album, albums, image, embedz, metrics, index, path],
)
.launch();
match e.kind() {
rocket::error::LaunchErrorKind::Collision(v) => {
error!("Route collisions:");
for (r1, r2) in v {
error!(" R1 {}", r1);
error!(" R2 {}", r2);
}
for (r1, r2) in v {
error!(" R1 {:#?}", r1);
error!(" R2 {:#?}", r2);
}
}
_ => (),
};
return Err(e.into());
}

View File

@@ -1,137 +0,0 @@
use std::error::Error;
use std::net::SocketAddr;
use std::path::PathBuf;
use log::warn;
use prometheus::Encoder;
use rust_embed::RustEmbed;
use serde::Deserialize;
use warp;
use warp::http::header::{HeaderMap, HeaderValue};
use warp::reject::Rejection;
use warp::Filter;
use crate::library::Library;
fn metrics() -> impl Filter<Extract = (impl warp::reply::Reply,), Error = Rejection> + Clone {
let mut text_headers = HeaderMap::new();
text_headers.insert("content-type", HeaderValue::from_static("text/plain"));
warp::path("metrics")
.map(|| {
let mut buffer = Vec::new();
let encoder = prometheus::TextEncoder::new();
// Gather the metrics.
let metric_families = prometheus::gather();
// Encode them to send.
encoder.encode(&metric_families, &mut buffer).unwrap();
// TODO(wathiede): see if there's a wrapper like html()
buffer
})
.with(warp::reply::with::headers(text_headers))
}
// TODO(wathiede): add caching for hashed files. Add at least etag for everything.
fn index(path: warp::path::FullPath) -> Result<impl warp::Reply, warp::Rejection> {
let path = path.as_str();
let path = if path.ends_with("/") {
format!("{}index.html", path.to_string())
} else {
path.to_string()
};
let path = &path[1..];
match Asset::get(path) {
Some(bytes) => {
let mime = mime_guess::from_path(path).first_or_octet_stream();
Ok(warp::http::Response::builder()
.header("Content-Type", mime.essence_str())
.body(bytes.into_owned()))
}
None => Err(warp::reject::not_found()),
}
}
fn albums(lib: Library) -> Result<impl warp::Reply, warp::Rejection> {
let albums = lib.albums().map_err(|e| {
warn!("Couldn't find albums: {}", e);
warp::reject::not_found()
})?;
Ok(warp::reply::json(&albums))
}
fn album(lib: Library, id: String) -> Result<impl warp::Reply, warp::Rejection> {
let album = lib.album(&id).map_err(|e| {
warn!("Couldn't find album {}: {}", id, e);
warp::reject::not_found()
})?;
Ok(warp::reply::json(&album))
}
#[derive(Debug, Deserialize)]
struct ImageParams {
w: Option<u32>,
h: Option<u32>,
fill: Option<bool>,
}
fn image(
lib: Library,
media_items_id: String,
params: ImageParams,
) -> Result<impl warp::Reply, warp::Rejection> {
// TODO(wathiede): add caching headers.
match lib.thumbnail(
&media_items_id,
(params.w, params.h),
params.fill.unwrap_or(false),
) {
None => {
warn!("Couldn't find original {}", &media_items_id);
Err(warp::reject::not_found())
}
Some(bytes) => Ok(warp::http::Response::builder()
.header("Content-Type", "image/jpeg")
.body(bytes)),
}
}
#[derive(RustEmbed)]
#[folder = "react-debug/build/"]
struct Asset;
pub fn run(addr: SocketAddr, root: PathBuf) -> Result<(), Box<dyn Error>> {
let lib = Library::new(root)?;
let lib = warp::any().map(move || lib.clone());
let index = warp::get2().and(warp::path::full()).and_then(index);
let albums = warp::path("albums").and(lib.clone()).and_then(albums);
let album = warp::path("album")
.and(lib.clone())
.and(warp::path::param())
.and_then(album);
let image = warp::path("image")
.and(lib.clone())
.and(warp::path::param())
.and(warp::query::<ImageParams>())
.and_then(image);
let api = albums.or(album).or(image);
let api = warp::path("api").and(api);
// Fallback, always keep this last.
let api = api.or(index);
//let api = api.with(warp::log("photosync"));
// We don't want metrics & heath checking filling up the logs, so we add this handler after
// wrapping with the log filter.
let routes = metrics().or(api);
warp::serve(routes).run(addr);
Ok(())
}

BIN
testdata/image.jpg vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.1 MiB