Compare commits

..

84 Commits

Author SHA1 Message Date
86805f38e3 Load slurp config from toml file 2024-09-21 12:52:08 -07:00
62b17bd6a6 Bumping version to 0.0.29 2024-09-20 08:56:58 -07:00
c0bac99d5a server: add slurp config for zsa blog 2024-09-20 08:56:45 -07:00
3b69c5e74b Bumping version to 0.0.28 2024-09-19 17:06:03 -07:00
539fd469cc server: create index when missing 2024-09-19 17:05:47 -07:00
442688c35c web: lint 2024-09-19 16:54:18 -07:00
da27f02237 Bumping version to 0.0.27 2024-09-19 16:52:35 -07:00
9460e354b7 server: cargo sqlx prepare 2024-09-19 16:52:26 -07:00
6bab128ed9 Bumping version to 0.0.26 2024-09-19 16:33:50 -07:00
3856b4ca5a server: try different cacher url 2024-09-19 16:33:40 -07:00
bef39eefa5 Bumping version to 0.0.25 2024-09-19 16:08:20 -07:00
b0366c7b4d server: try non-https to see if that works 2024-09-19 16:07:59 -07:00
ca02d84d63 Bumping version to 0.0.24 2024-09-19 16:01:55 -07:00
461d5de886 server: change internal git url 2024-09-19 16:01:41 -07:00
f8134dad7a Bumping version to 0.0.23 2024-09-19 15:53:56 -07:00
30f510bb03 server: WIP tantivy, cache slurps, use shared::compute_color, 2024-09-19 15:53:09 -07:00
e7cbf9cc45 shared: remove debug logging 2024-09-19 13:54:47 -07:00
5108213af5 web: use shared compute_color 2024-09-19 13:49:24 -07:00
d148f625ac shared: add compute_color 2024-09-19 13:48:56 -07:00
a9b8f5a88f Bumping version to 0.0.22 2024-09-16 20:00:16 -07:00
539b584d9b web: fix broken build 2024-09-16 20:00:06 -07:00
2f8d83fc4b Bumping version to 0.0.21 2024-09-16 19:52:28 -07:00
86ee1257fa web: better progress bar 2024-09-16 19:52:20 -07:00
03f1035e0e Bumping version to 0.0.20 2024-09-12 22:38:18 -07:00
bd578191a8 web: add scroll to top button and squelch some debug logging 2024-09-12 22:37:58 -07:00
d4fc2e2ef1 Bumping version to 0.0.19 2024-09-12 15:41:01 -07:00
cde30de81c web: explicitly set progress to zero when not in thread/news view 2024-09-12 15:40:42 -07:00
96be74e3ee Bumping version to 0.0.18 2024-09-12 15:32:30 -07:00
b78d34b27e web: disable bulma styling for .number 2024-09-12 15:32:18 -07:00
b4b64c33a6 Bumping version to 0.0.17 2024-09-12 10:07:00 -07:00
47b1875022 server: tweak cloudflare and prusa slurp config 2024-09-12 10:06:46 -07:00
b06cbd1381 Bumping version to 0.0.16 2024-09-12 10:03:26 -07:00
9e35f8ca6c web: fix <em> looking like a button 2024-09-12 10:01:58 -07:00
8eaefde67d Bumping version to 0.0.15 2024-09-12 09:28:14 -07:00
d5a3324837 server: slurp config for prusa blog and squelch some info logging 2024-09-12 09:27:57 -07:00
f5c90d8770 Bumping version to 0.0.14 2024-09-11 11:46:04 -07:00
825a125a62 web: redox specific styling 2024-09-11 11:45:53 -07:00
da7cf37dae Bumping version to 0.0.13 2024-09-11 11:41:27 -07:00
1985ae1f49 server: add slurp configs for facebook and redox 2024-09-11 11:41:09 -07:00
91eb3019f9 Bumping version to 0.0.12 2024-09-09 20:31:07 -07:00
66e8e00a9b web: remove dead code 2024-09-09 20:21:51 -07:00
4b8923d852 web: more accurate reading progress bar 2024-09-09 20:21:13 -07:00
baba720749 Bumping version to 0.0.11 2024-09-02 13:36:18 -07:00
1ec22599cc web: make pre blocks look like code blocks in email 2024-09-02 13:35:58 -07:00
c69017bc36 Bumping version to 0.0.10 2024-09-02 13:19:11 -07:00
48bf57fbbe web: more pleasant color scheme for code blocks in email 2024-09-02 13:18:49 -07:00
3491856784 Bumping version to 0.0.9 2024-09-01 16:17:35 -07:00
f887c15b46 web: address lint 2024-09-01 16:17:27 -07:00
7786f850d1 Bumping version to 0.0.8 2024-09-01 16:16:09 -07:00
cad778734e web: rename Msg::Reload->Refresh and create proper Reload 2024-09-01 16:15:38 -07:00
1210f7038a Bumping version to 0.0.7 2024-09-01 16:09:14 -07:00
f9ab7284a3 web: remove obsolete Makefile 2024-09-01 16:09:04 -07:00
100865c923 server: use same html cleanup idiom in nm as we do in newreader 2024-09-01 16:08:25 -07:00
b8c1710a83 dev: watch for git commits and rebuild on change 2024-09-01 16:07:22 -07:00
215b8cd41d shared: ignore dirty, if git is present we're developing
When developing dirty can get out of between client and server if you're
only doing development in one.
2024-09-01 15:57:02 -07:00
487d7084c3 Bumping version to 0.0.6 2024-09-01 15:48:41 -07:00
b1e761b26f web: don't show progress bar until 400px have scrolled 2024-09-01 15:48:11 -07:00
3efe90ca21 Update release makefile 2024-09-01 15:40:19 -07:00
61649e1e04 Bumping version to 0.0.5 2024-09-01 15:38:39 -07:00
13ac352a10 Helpers to bump version number 2024-09-01 15:37:00 -07:00
5ca7a25e8d Bumping version to 0.0.4 2024-09-01 15:36:48 -07:00
7bb8ef0938 Bumping version to :?} 2024-09-01 15:36:36 -07:00
5c55a290ac Bumping version to :?} 2024-09-01 15:34:53 -07:00
4e3e1b075d Setting crate version to 0.2.0-a8c5a16 2024-09-01 15:30:37 -07:00
a8c5a164ff web: clean up version string and reload on mismatch 2024-09-01 15:02:34 -07:00
1f393f1c7f Add server and client build versions 2024-09-01 14:55:51 -07:00
fdaff70231 server: improve cloudflare and grafana image and iframe rendering 2024-09-01 11:05:07 -07:00
7218c13b9e server: address lint 2024-08-31 16:18:47 -07:00
934cb9d91b web: address lint 2024-08-31 16:11:49 -07:00
4faef5e017 web: add scrollbar for read progress 2024-08-31 16:08:06 -07:00
5c813e7350 web: style improvements for figure captions 2024-08-31 15:04:19 -07:00
fb754469ce web: let pullquotes on grafana blog be full width 2024-08-31 14:46:38 -07:00
548b5a0ab0 server: extract image title and alt attributes into figure captions 2024-08-31 14:43:04 -07:00
f77d0776c4 web: style tweaks for <em> 2024-08-31 14:42:19 -07:00
e73f70af8f Fix new post read/unread handling 2024-08-31 13:49:03 -07:00
a9e6120f81 web: don't make slashdot pull quotes italic 2024-08-31 13:36:21 -07:00
090a010a63 server: fix thread id for news posts 2024-08-31 13:23:25 -07:00
85c762a297 web: add class for mail vs news-post bodies 2024-08-31 11:54:19 -07:00
a8d5617cf2 Treat email and news posts as distinct types on the frontend and backend 2024-08-31 11:40:06 -07:00
760cec01a8 Refactor thread responses into an enum.
Lays ground work for different types of views, i.e. email, news, docs, etc.
2024-08-26 21:48:53 -07:00
446fcfe37f server: fix url for graphiql 2024-08-26 21:48:25 -07:00
71de3ef8ae server: add ability to slurp contents from site 2024-08-25 19:37:53 -07:00
d98d429b5c notmuch: add TODO 2024-08-25 19:37:37 -07:00
cf5a6fadfd server: sort dependencies 2024-08-24 09:26:52 -07:00
36 changed files with 3609 additions and 560 deletions

2294
Cargo.lock generated

File diff suppressed because it is too large Load Diff

6
Makefile Normal file
View File

@@ -0,0 +1,6 @@
.PHONEY: release
release:
bash scripts/update-crate-version.sh
git push
all: release

4
dev.sh
View File

@@ -1,7 +1,7 @@
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
tmux new-session -d -s letterbox-dev
tmux rename-window web
tmux send-keys "cd web; trunk serve -w ../shared -w ../notmuch -w ./" C-m
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
tmux new-window -n server
tmux send-keys "cd server; cargo watch -c -x run -w ../shared -w ../notmuch -w ./" C-m
tmux send-keys "cd server; cargo watch -c -x run -w ../.git -w ../shared -w ../notmuch -w ./" C-m
tmux attach -d -t letterbox-dev

View File

@@ -1,6 +1,6 @@
[package]
name = "notmuch"
version = "0.1.0"
version = "0.0.29"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -518,6 +518,7 @@ impl Notmuch {
}
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
// TODO: compare speed of notmuch count for * w/ and w/o --output=threads
let res = self.run_notmuch(["count", "--output=threads", query])?;
// Strip '\n' from res.
let s = std::str::from_utf8(&res[..res.len() - 1])?;

View File

@@ -1,6 +1,6 @@
[package]
name = "procmail2notmuch"
version = "0.1.0"
version = "0.0.29"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -0,0 +1,5 @@
#!env bash
set -e -x
cargo-set-version set-version --bump patch
VERSION="$(awk -F\" '/^version/ {print $2}' server/Cargo.toml)"
git commit Cargo.lock */Cargo.toml -m "Bumping version to ${VERSION:?}"

View File

@@ -0,0 +1,62 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n site,\n title,\n summary,\n link,\n date,\n is_read,\n uid,\n id\nFROM post\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "summary",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "link",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "date",
"type_info": "Timestamp"
},
{
"ordinal": 5,
"name": "is_read",
"type_info": "Bool"
},
{
"ordinal": 6,
"name": "uid",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "id",
"type_info": "Int4"
}
],
"parameters": {
"Left": []
},
"nullable": [
true,
true,
true,
true,
true,
true,
false,
false
]
},
"hash": "1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7"
}

View File

@@ -1,34 +1,42 @@
[package]
name = "server"
version = "0.1.0"
version = "0.0.29"
edition = "2021"
default-run = "server"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
notmuch = { path = "../notmuch" }
shared = { path = "../shared" }
serde_json = "1.0.87"
thiserror = "1.0.37"
serde = { version = "1.0.147", features = ["derive"] }
log = "0.4.17"
tokio = "1.26.0"
glog = "0.1.0"
urlencoding = "2.1.3"
ammonia = "3.3.0"
anyhow = "1.0.79"
async-graphql = { version = "6.0.11", features = ["log"] }
async-graphql-rocket = "6.0.11"
rocket_cors = "0.6.0"
memmap = "0.7.0"
mailparse = "0.15.0"
ammonia = "3.3.0"
lol_html = "1.2.0"
async-trait = "0.1.81"
build-info = "0.0.38"
cacher = {git = "http://git-private.h.xinu.tv/wathiede/cacher.git"}
css-inline = "0.13.0"
anyhow = "1.0.79"
maplit = "1.0.2"
linkify = "0.10.0"
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
url = "2.5.2"
glog = "0.1.0"
html-escape = "0.2.13"
linkify = "0.10.0"
log = "0.4.17"
lol_html = "1.2.0"
mailparse = "0.15.0"
maplit = "1.0.2"
memmap = "0.7.0"
notmuch = { path = "../notmuch" }
reqwest = { version = "0.12.7", features = ["blocking"] }
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
rocket_cors = "0.6.0"
scraper = "0.20.0"
serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.87"
shared = { path = "../shared" }
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
tantivy = "0.22.0"
thiserror = "1.0.37"
tokio = "1.26.0"
url = "2.5.2"
urlencoding = "2.1.3"
[build-dependencies]
build-info-build = "0.0.38"

View File

@@ -1,6 +1,8 @@
[release]
address = "0.0.0.0"
port = 9345
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
newsreader_tantivy_db_path = "../target/database/newsreader"
[debug]
address = "0.0.0.0"
@@ -8,3 +10,45 @@ port = 9345
# Uncomment to make it production like.
#log_level = "critical"
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
newsreader_tantivy_db_path = "../target/database/newsreader"
slurp_cache_path = "/net/nasx/x/letterbox/slurp"
[debug.slurp_site_selectors]
"atmeta.com" = [
"div.entry-content"
]
"blog.prusa3d.com" = [
"article.content .post-block"
]
"blog.cloudflare.com" = [
".author-lists .author-name-tooltip",
".post-full-content"
]
"blog.zsa.io" = [
"section.blog-article"
]
"engineering.fb.com" = [
"article"
]
"hackaday.com" = [
"div.entry-featured-image",
"div.entry-content"
]
"mitchellh.com" = [
"div.w-full"
]
"natwelch.com" = [
"article div.prose"
]
"slashdot.org" = [
"span.story-byline",
"div.p"
]
"www.redox-os.org" = [
"div.content"
]
"www.smbc-comics.com" = [
"img#cc-comic",
"div#aftercomic img"
]

5
server/build.rs Normal file
View File

@@ -0,0 +1,5 @@
fn main() {
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
// and `build_info::format!` in the main program.
build_info_build::build_script();
}

10
server/sql/all-posts.sql Normal file
View File

@@ -0,0 +1,10 @@
SELECT
site,
title,
summary,
link,
date,
is_read,
uid,
id
FROM post

View File

@@ -1,3 +1,6 @@
// Rocket generates a lot of warnings for handlers
// TODO: figure out why
#![allow(unreachable_patterns)]
#[macro_use]
extern crate rocket;
use std::{error::Error, io::Cursor, str::FromStr};
@@ -15,18 +18,14 @@ use rocket::{
Response, State,
};
use rocket_cors::{AllowedHeaders, AllowedOrigins};
use serde::Deserialize;
use server::{
config::Config,
error::ServerError,
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
nm::{attachment_bytes, cid_attachment_bytes},
};
use sqlx::postgres::PgPool;
#[derive(Deserialize)]
struct Config {
newsreader_database_url: String,
}
use tantivy::{Index, IndexWriter};
#[get("/refresh")]
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
@@ -164,7 +163,127 @@ async fn original(
#[rocket::get("/")]
fn graphiql() -> content::RawHtml<String> {
content::RawHtml(GraphiQLSource::build().endpoint("/graphql").finish())
content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
}
#[rocket::post("/create-news-db")]
fn create_news_db(config: &State<Config>) -> Result<String, Debug<ServerError>> {
create_news_db_impl(config)?;
Ok(format!(
"DB created in {}\n",
config.newsreader_tantivy_db_path
))
}
fn create_news_db_impl(config: &Config) -> Result<(), ServerError> {
std::fs::remove_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
std::fs::create_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
use tantivy::schema::*;
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("site", STRING | STORED);
schema_builder.add_text_field("title", TEXT | STORED);
schema_builder.add_text_field("summary", TEXT);
schema_builder.add_text_field("link", STRING | STORED);
schema_builder.add_date_field("date", FAST);
schema_builder.add_bool_field("is_read", FAST);
schema_builder.add_text_field("uid", STRING | STORED);
schema_builder.add_i64_field("id", FAST);
let schema = schema_builder.build();
Index::create_in_dir(&config.newsreader_tantivy_db_path, schema).map_err(ServerError::from)?;
Ok(())
}
#[rocket::post("/reindex-news-db")]
async fn reindex_news_db(
pool: &State<PgPool>,
config: &State<Config>,
) -> Result<String, Debug<ServerError>> {
use tantivy::{doc, Term};
let start_time = std::time::Instant::now();
let pool: &PgPool = pool;
let index =
Index::open_in_dir(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
let mut index_writer = index.writer(50_000_000).map_err(ServerError::from)?;
let schema = index.schema();
let site = schema.get_field("site").map_err(ServerError::from)?;
let title = schema.get_field("title").map_err(ServerError::from)?;
let summary = schema.get_field("summary").map_err(ServerError::from)?;
let link = schema.get_field("link").map_err(ServerError::from)?;
let date = schema.get_field("date").map_err(ServerError::from)?;
let is_read = schema.get_field("is_read").map_err(ServerError::from)?;
let uid = schema.get_field("uid").map_err(ServerError::from)?;
let id = schema.get_field("id").map_err(ServerError::from)?;
let rows = sqlx::query_file!("sql/all-posts.sql")
.fetch_all(pool)
.await
.map_err(ServerError::from)?;
let total = rows.len();
for (i, r) in rows.into_iter().enumerate() {
if i % 10_000 == 0 {
info!(
"{i}/{total} processed, elapsed {:.2}s",
start_time.elapsed().as_secs_f32()
);
}
let id_term = Term::from_field_text(uid, &r.uid);
index_writer.delete_term(id_term);
index_writer
.add_document(doc!(
site => r.site.expect("UNKOWN_SITE"),
title => r.title.expect("UNKOWN_TITLE"),
// TODO: clean and extract text from HTML
summary => r.summary.expect("UNKNOWN_SUMMARY"),
link => r.link.expect("link"),
date => tantivy::DateTime::from_primitive(r.date.expect("date")),
is_read => r.is_read.expect("is_read"),
uid => r.uid,
id => r.id as i64,
))
.map_err(ServerError::from)?;
}
index_writer.commit().map_err(ServerError::from)?;
info!("took {:.2}s to reindex", start_time.elapsed().as_secs_f32());
Ok(format!(
"DB openned in {}\n",
config.newsreader_tantivy_db_path
))
}
#[rocket::get("/search-news-db")]
fn search_news_db(
index: &State<tantivy::Index>,
reader: &State<tantivy::IndexReader>,
) -> Result<String, Debug<ServerError>> {
use tantivy::{collector::TopDocs, query::QueryParser, Document, TantivyDocument};
let searcher = reader.searcher();
let schema = index.schema();
let site = schema.get_field("site").map_err(ServerError::from)?;
let title = schema.get_field("title").map_err(ServerError::from)?;
let summary = schema.get_field("summary").map_err(ServerError::from)?;
let query_parser = QueryParser::for_index(&index, vec![site, title, summary]);
let query = query_parser
.parse_query("grapheme")
.map_err(ServerError::from)?;
let top_docs = searcher
.search(&query, &TopDocs::with_limit(10))
.map_err(ServerError::from)?;
let mut results = vec![];
info!("search found {} docs", top_docs.len());
for (_score, doc_address) in top_docs {
let retrieved_doc: TantivyDocument =
searcher.doc(doc_address).map_err(ServerError::from)?;
results.push(format!("{}", retrieved_doc.to_json(&schema)));
}
Ok(format!("{}", results.join(" ")))
}
#[rocket::get("/graphql?<query..>")]
@@ -190,6 +309,8 @@ async fn main() -> Result<(), Box<dyn Error>> {
..Default::default()
})
.unwrap();
build_info::build_info!(fn bi);
info!("Build Info: {}", shared::build_version(bi));
let allowed_origins = AllowedOrigins::all();
let cors = rocket_cors::CorsOptions {
allowed_origins,
@@ -207,6 +328,9 @@ async fn main() -> Result<(), Box<dyn Error>> {
.mount(
shared::urls::MOUNT_POINT,
routes![
create_news_db,
reindex_news_db,
search_news_db,
original,
refresh,
show_pretty,
@@ -223,14 +347,33 @@ async fn main() -> Result<(), Box<dyn Error>> {
.attach(AdHoc::config::<Config>());
let config: Config = rkt.figment().extract()?;
info!("Config:\n{config:#?}");
if !std::fs::exists(&config.slurp_cache_path)? {
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
std::fs::create_dir_all(&config.slurp_cache_path)?;
}
let pool = PgPool::connect(&config.newsreader_database_url).await?;
let tantivy_newsreader_index = match Index::open_in_dir(&config.newsreader_tantivy_db_path) {
Ok(idx) => idx,
Err(_) => {
create_news_db_impl(&config)?;
Index::open_in_dir(&config.newsreader_tantivy_db_path)?
}
};
let tantivy_newsreader_reader = tantivy_newsreader_index.reader()?;
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
.data(Notmuch::default())
.data(config)
.data(pool.clone())
.extension(async_graphql::extensions::Logger)
.finish();
let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default());
let rkt = rkt
.manage(schema)
.manage(pool)
.manage(Notmuch::default())
.manage(tantivy_newsreader_index)
.manage(tantivy_newsreader_reader);
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
rkt.launch().await?;

23
server/src/config.rs Normal file
View File

@@ -0,0 +1,23 @@
use std::{collections::HashMap, fmt::Display, str::FromStr};
use scraper::Selector;
use serde::{de, Deserialize, Deserializer};
#[derive(Debug)]
pub struct DeSelector(pub Selector);
impl<'de> Deserialize<'de> for DeSelector {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(DeSelector(Selector::parse(&s).map_err(de::Error::custom)?))
}
}
#[derive(Debug, Deserialize)]
pub struct Config {
pub newsreader_database_url: String,
pub newsreader_tantivy_db_path: String,
pub slurp_cache_path: String,
pub slurp_site_selectors: HashMap<String, Vec<DeSelector>>,
}

View File

@@ -1,6 +1,8 @@
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
use mailparse::MailParseError;
use tantivy::TantivyError;
use tantivy::query::QueryParserError;
use thiserror::Error;
use crate::TransformError;
@@ -29,6 +31,10 @@ pub enum ServerError {
StringError(String),
#[error("invalid url: {0}")]
UrlParseError(#[from] url::ParseError),
#[error("tantivy error: {0}")]
TantivyError(#[from] TantivyError),
#[error("tantivy query parse error: {0}")]
QueryParseError(#[from] QueryParserError),
#[error("impossible: {0}")]
InfaillibleError(#[from] Infallible),
}

View File

@@ -8,7 +8,7 @@ use notmuch::Notmuch;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use crate::{newsreader, nm, Query};
use crate::{config::Config, newsreader, nm, Query};
/// # Number of seconds since the Epoch
pub type UnixTime = isize;
@@ -32,8 +32,26 @@ pub struct ThreadSummary {
pub tags: Vec<String>,
}
#[derive(Debug, Union)]
pub enum Thread {
Email(EmailThread),
News(NewsPost),
}
#[derive(Debug, SimpleObject)]
pub struct Thread {
pub struct NewsPost {
pub thread_id: String,
pub is_read: bool,
pub slug: String,
pub site: String,
pub title: String,
pub body: String,
pub url: String,
pub timestamp: i64,
}
#[derive(Debug, SimpleObject)]
pub struct EmailThread {
pub thread_id: String,
pub subject: String,
pub messages: Vec<Message>,
@@ -211,6 +229,10 @@ struct SearchCursor {
pub struct QueryRoot;
#[Object]
impl QueryRoot {
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
build_info::build_info!(fn bi);
Ok(shared::build_version(bi))
}
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
@@ -362,6 +384,7 @@ impl QueryRoot {
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
let config = ctx.data_unchecked::<Config>();
let debug_content_tree = ctx
.look_ahead()
.field("messages")
@@ -370,7 +393,7 @@ impl QueryRoot {
.exists();
// TODO: look at thread_id and conditionally load newsreader
if newsreader::is_newsreader_thread(&thread_id) {
Ok(newsreader::thread(pool, thread_id).await?)
Ok(newsreader::thread(config, pool, thread_id).await?)
} else {
Ok(nm::thread(nm, thread_id, debug_content_tree).await?)
}

View File

@@ -1,41 +1,54 @@
pub mod config;
pub mod error;
pub mod graphql;
pub mod newsreader;
pub mod nm;
use std::{convert::Infallible, str::FromStr};
use std::{collections::HashMap, convert::Infallible, str::FromStr, sync::Arc};
use async_trait::async_trait;
use cacher::{Cacher, FilesystemCacher};
use css_inline::{CSSInliner, InlineError, InlineOptions};
use linkify::{LinkFinder, LinkKind};
use log::{error, info};
use lol_html::{element, errors::RewritingError, rewrite_str, text, RewriteStrSettings};
use log::{error, info, warn};
use lol_html::{
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
RewriteStrSettings,
};
use maplit::{hashmap, hashset};
use scraper::{Html, Selector};
use thiserror::Error;
use tokio::sync::Mutex;
use url::Url;
use crate::newsreader::{
extract_thread_id, is_newsreader_search, is_newsreader_thread, make_news_tag,
use crate::{
config::DeSelector,
newsreader::{extract_thread_id, is_newsreader_thread},
};
const NON_EXISTENT_SITE_NAME: &'static str = "NO-SUCH-SITE";
// TODO: figure out how to use Cow
trait Transformer {
fn should_run(&self, _html: &str) -> bool {
#[async_trait]
trait Transformer: Send + Sync {
fn should_run(&self, _addr: &Option<Url>, _html: &str) -> bool {
true
}
// TODO: should html be something like `html_escape` uses:
// <S: ?Sized + AsRef<str>>(text: &S) -> Cow<str>
fn transform(&self, html: &str) -> Result<String, TransformError>;
async fn transform(&self, addr: &Option<Url>, html: &str) -> Result<String, TransformError>;
}
// TODO: how would we make this more generic to allow good implementations of Transformer outside
// of this module?
#[derive(Error, Debug)]
pub enum TransformError {
#[error("lol-html rewrite error")]
#[error("lol-html rewrite error: {0}")]
RewritingError(#[from] RewritingError),
#[error("css inline error")]
#[error("css inline error: {0}")]
InlineError(#[from] InlineError),
#[error("failed to fetch url error: {0}")]
ReqwestError(#[from] reqwest::Error),
#[error("failed to parse HTML: {0}")]
HtmlParsingError(String),
}
struct SanitizeHtml<'a> {
@@ -43,31 +56,34 @@ struct SanitizeHtml<'a> {
base_url: &'a Option<Url>,
}
#[async_trait]
impl<'a> Transformer for SanitizeHtml<'a> {
fn transform(&self, html: &str) -> Result<String, TransformError> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(sanitize_html(html, self.cid_prefix, self.base_url)?)
}
}
struct EscapeHtml;
#[async_trait]
impl Transformer for EscapeHtml {
fn should_run(&self, html: &str) -> bool {
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
html.contains("&")
}
fn transform(&self, html: &str) -> Result<String, TransformError> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(html_escape::decode_html_entities(html).to_string())
}
}
struct StripHtml;
#[async_trait]
impl Transformer for StripHtml {
fn should_run(&self, html: &str) -> bool {
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
// Lame test
html.contains("<")
}
fn transform(&self, html: &str) -> Result<String, TransformError> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
let mut text = String::new();
let element_content_handlers = vec![text!("*", |t| {
text += t.as_str();
@@ -87,8 +103,9 @@ impl Transformer for StripHtml {
struct InlineStyle;
#[async_trait]
impl Transformer for InlineStyle {
fn transform(&self, html: &str) -> Result<String, TransformError> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
let css = concat!(
"/* chrome-default.css */\n",
include_str!("chrome-default.css"),
@@ -98,16 +115,17 @@ impl Transformer for InlineStyle {
include_str!("custom.css"),
);
let inline_opts = InlineOptions {
inline_style_tags: false,
inline_style_tags: true,
keep_style_tags: false,
keep_link_tags: false,
keep_link_tags: true,
base_url: None,
load_remote_stylesheets: false,
load_remote_stylesheets: true,
extra_css: Some(css.into()),
preallocate_node_capacity: 32,
..InlineOptions::default()
};
//info!("HTML:\n{html}");
Ok(match CSSInliner::new(inline_opts).inline(&html) {
Ok(inlined_html) => inlined_html,
Err(err) => {
@@ -118,29 +136,142 @@ impl Transformer for InlineStyle {
}
}
struct AddOutlink(Option<url::Url>);
/// Process images will extract any alt or title tags on images and place them as labels below said
/// image. It also handles data-src and data-cfsrc attributes
struct FrameImages;
impl Transformer for AddOutlink {
fn should_run(&self, html: &str) -> bool {
if let Some(link) = &self.0 {
return link.scheme().starts_with("http") && !html.contains(link.as_str());
}
false
#[async_trait]
impl Transformer for FrameImages {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![
element!("img[data-src]", |el| {
let src = el
.get_attribute("data-src")
.unwrap_or("https://placehold.co/600x400".to_string());
el.set_attribute("src", &src)?;
Ok(())
}),
element!("img[data-cfsrc]", |el| {
let src = el
.get_attribute("data-cfsrc")
.unwrap_or("https://placehold.co/600x400".to_string());
el.set_attribute("src", &src)?;
Ok(())
}),
element!("img[alt], img[title]", |el| {
let src = el
.get_attribute("src")
.unwrap_or("https://placehold.co/600x400".to_string());
let alt = el.get_attribute("alt");
let title = el.get_attribute("title");
let mut frags =
vec!["<figure>".to_string(), format!(r#"<img src="{src}">"#)];
alt.map(|t| {
if !t.is_empty() {
frags.push(format!("<figcaption>Alt: {t}</figcaption>"))
}
});
title.map(|t| {
if !t.is_empty() {
frags.push(format!("<figcaption>Title: {t}</figcaption>"))
}
});
frags.push("</figure>".to_string());
el.replace(&frags.join("\n"), ContentType::Html);
Ok(())
}),
],
..RewriteStrSettings::default()
},
)?)
}
fn transform(&self, html: &str) -> Result<String, TransformError> {
if let Some(url) = &self.0 {
}
struct AddOutlink;
#[async_trait]
impl Transformer for AddOutlink {
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
if let Some(link) = link {
link.scheme().starts_with("http") && !html.contains(link.as_str())
} else {
false
}
}
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
if let Some(link) = link {
Ok(format!(
r#"
{html}
<div><a href="{}">View on site</a></div>
"#,
url
link
))
} else {
Ok(html.to_string())
}
}
}
struct SlurpContents<'h> {
cacher: Arc<Mutex<FilesystemCacher>>,
site_selectors: &'h HashMap<String, Vec<DeSelector>>,
}
impl<'h> SlurpContents<'h> {
fn get_selectors(&self, link: &Url) -> Option<&[DeSelector]> {
for (host, selector) in self.site_selectors.iter() {
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
return Some(&selector);
}
}
None
}
}
#[async_trait]
impl<'h> Transformer for SlurpContents<'h> {
fn should_run(&self, link: &Option<Url>, _: &str) -> bool {
if let Some(link) = link {
return self.get_selectors(link).is_some();
}
false
}
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
let Some(link) = link else {
return Ok(html.to_string());
};
let Some(selectors) = self.get_selectors(&link) else {
return Ok(html.to_string());
};
let mut cacher = self.cacher.lock().await;
let body = if let Some(body) = cacher.get(link.as_str()) {
info!("cache hit for {link}");
String::from_utf8_lossy(&body).to_string()
} else {
let body = reqwest::get(link.as_str()).await?.text().await?;
cacher.set(link.as_str(), body.as_bytes());
body
};
let doc = Html::parse_document(&body);
let mut results = Vec::new();
for selector in selectors {
for frag in doc.select(&selector.0) {
results.push(frag.html())
// TODO: figure out how to warn if there were no hits
//warn!("couldn't find '{:?}' in {}", selector, link);
}
}
Ok(results.join(""))
}
}
pub fn linkify_html(text: &str) -> String {
let mut finder = LinkFinder::new();
let finder = finder.url_must_have_scheme(false).kinds(&[LinkKind::Url]);
@@ -174,6 +305,24 @@ pub fn sanitize_html(
cid_prefix: &str,
base_url: &Option<Url>,
) -> Result<String, TransformError> {
let inline_opts = InlineOptions {
inline_style_tags: true,
keep_style_tags: true,
keep_link_tags: false,
base_url: None,
load_remote_stylesheets: false,
extra_css: None,
preallocate_node_capacity: 32,
..InlineOptions::default()
};
let html = match CSSInliner::new(inline_opts).inline(&html) {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline CSS: {err}");
html.to_string()
}
};
let mut element_content_handlers = vec![
// Open links in new tab
element!("a[href]", |el| {
@@ -201,6 +350,30 @@ pub fn sanitize_html(
el.set_attribute("src", &src)?;
Ok(())
}),
// Add https to href with //<domain name>
element!("link[href]", |el| {
info!("found link[href] {el:?}");
let mut href = el.get_attribute("href").expect("href was required");
if href.starts_with("//") {
warn!("adding https to {href}");
href.insert_str(0, "https:");
}
el.set_attribute("href", &href)?;
Ok(())
}),
// Add https to src with //<domain name>
element!("style[src]", |el| {
let mut src = el.get_attribute("src").expect("src was required");
if src.starts_with("//") {
src.insert_str(0, "https:");
}
el.set_attribute("src", &src)?;
Ok(())
}),
];
@@ -224,25 +397,13 @@ pub fn sanitize_html(
}),
]);
}
let inline_opts = InlineOptions {
inline_style_tags: true,
keep_style_tags: false,
keep_link_tags: false,
base_url: None,
load_remote_stylesheets: false,
extra_css: None,
preallocate_node_capacity: 32,
..InlineOptions::default()
};
let inlined_html = match CSSInliner::new(inline_opts).inline(&html) {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline CSS: {err}");
html.to_string()
}
};
let html = rewrite_str(
&html,
RewriteStrSettings {
element_content_handlers,
..RewriteStrSettings::default()
},
)?;
// Default's don't allow style, but we want to preserve that.
// TODO: remove 'class' if rendering mails moves to a two phase process where abstract message
// types are collected, santized, and then grouped together as one big HTML doc
@@ -290,6 +451,7 @@ pub fn sanitize_html(
"hgroup",
"hr",
"i",
"iframe", // wathiede
"img",
"ins",
"kbd",
@@ -298,6 +460,7 @@ pub fn sanitize_html(
"map",
"mark",
"nav",
"noscript", // wathiede
"ol",
"p",
"pre",
@@ -351,6 +514,9 @@ pub fn sanitize_html(
"hr" => hashset![
"align", "size", "width"
],
"iframe" => hashset![
"src", "allow", "allowfullscreen"
],
"img" => hashset![
"align", "alt", "height", "src", "width"
],
@@ -386,21 +552,14 @@ pub fn sanitize_html(
],
];
let rewritten_html = rewrite_str(
&inlined_html,
RewriteStrSettings {
element_content_handlers,
..RewriteStrSettings::default()
},
)?;
let clean_html = ammonia::Builder::default()
let html = ammonia::Builder::default()
.tags(tags)
.tag_attributes(tag_attributes)
.generic_attributes(attributes)
.clean(&rewritten_html)
.clean(&html)
.to_string();
Ok(clean_html)
Ok(html)
}
fn compute_offset_limit(
@@ -479,7 +638,6 @@ impl FromStr for Query {
let mut tag = None;
let mut uid = None;
let mut remainder = Vec::new();
let site_prefix = make_news_tag("");
let mut is_notmuch = false;
let mut is_newsreader = false;
for word in s.split_whitespace() {

View File

@@ -1,21 +1,26 @@
use std::hash::{DefaultHasher, Hash, Hasher};
use std::sync::Arc;
use cacher::FilesystemCacher;
use log::info;
use maplit::hashmap;
use scraper::Selector;
use shared::compute_color;
use sqlx::postgres::PgPool;
use tokio::sync::Mutex;
use url::Url;
use crate::Query;
const TAG_PREFIX: &'static str = "News/";
const THREAD_PREFIX: &'static str = "news:";
use crate::{
compute_offset_limit,
config::Config,
error::ServerError,
graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary},
AddOutlink, EscapeHtml, InlineStyle, SanitizeHtml, StripHtml, Transformer,
graphql::{NewsPost, Tag, Thread, ThreadSummary},
AddOutlink, EscapeHtml, FrameImages, InlineStyle, Query, SanitizeHtml, SlurpContents,
StripHtml, Transformer,
};
const TAG_PREFIX: &'static str = "News/";
const THREAD_PREFIX: &'static str = "news:";
pub fn is_newsreader_search(query: &str) -> bool {
query.contains(TAG_PREFIX)
}
@@ -89,36 +94,34 @@ pub async fn search(
.fetch_all(pool)
.await?;
Ok(rows
.into_iter()
.enumerate()
.map(|(i, r)| {
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
let mut tags = vec![format!("{TAG_PREFIX}{site}")];
if !r.is_read.unwrap_or(true) {
tags.push("unread".to_string());
};
let mut title = r.title.unwrap_or("NO TITLE".to_string());
title = clean_title(&title).expect("failed to clean title");
(
i as i32 + offset,
ThreadSummary {
thread: format!("{THREAD_PREFIX}{}", r.uid),
timestamp: r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp() as isize,
date_relative: "TODO date_relative".to_string(),
matched: 0,
total: 1,
authors: r.name.unwrap_or_else(|| site.clone()),
subject: title,
tags,
},
)
})
.collect())
let mut res = Vec::new();
for (i, r) in rows.into_iter().enumerate() {
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
let mut tags = vec![format!("{TAG_PREFIX}{site}")];
if !r.is_read.unwrap_or(true) {
tags.push("unread".to_string());
};
let mut title = r.title.unwrap_or("NO TITLE".to_string());
title = clean_title(&title).await.expect("failed to clean title");
res.push((
i as i32 + offset,
ThreadSummary {
thread: format!("{THREAD_PREFIX}{}", r.uid),
timestamp: r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp() as isize,
date_relative: "TODO date_relative".to_string(),
matched: 0,
total: 1,
authors: r.name.unwrap_or_else(|| site.clone()),
subject: title,
tags,
},
));
}
Ok(res)
}
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
@@ -127,11 +130,9 @@ pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, Server
let tags = tags
.into_iter()
.map(|tag| {
let mut hasher = DefaultHasher::new();
tag.site.hash(&mut hasher);
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
let hex = compute_color(&name);
Tag {
name,
fg_color: "white".to_string(),
@@ -143,7 +144,11 @@ pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, Server
Ok(tags)
}
pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerError> {
pub async fn thread(
config: &Config,
pool: &PgPool,
thread_id: String,
) -> Result<Thread, ServerError> {
let id = thread_id
.strip_prefix(THREAD_PREFIX)
.expect("news thread doesn't start with '{THREAD_PREFIX}'")
@@ -153,23 +158,9 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
.fetch_one(pool)
.await?;
let site = r.site.unwrap_or("NO TAG".to_string());
let mut tags = vec![format!("{TAG_PREFIX}{site}")];
if r.is_read.unwrap_or(true) {
tags.push("unread".to_string());
};
let slug = r.site.unwrap_or("no-slug".to_string());
let site = r.name.unwrap_or("NO SITE".to_string());
let default_homepage = "http://no-homepage";
let homepage = Url::parse(
&r.homepage
.map(|h| {
if h.is_empty() {
default_homepage.to_string()
} else {
h
}
})
.unwrap_or(default_homepage.to_string()),
)?;
let link = &r
.link
.as_ref()
@@ -182,66 +173,50 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
})
.map(|h| Url::parse(&h).ok())
.flatten();
let addr = r.link.as_ref().map(|link| {
if link.contains('@') {
link.clone()
} else {
if let Ok(url) = homepage.join(&link) {
url.to_string()
} else {
link.clone()
}
}
});
let mut body = r.summary.unwrap_or("NO SUMMARY".to_string());
// TODO: add site specific cleanups. For example:
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolent
let mut body_tranformers: Vec<Box<dyn Transformer>> = vec![
Box::new(AddOutlink(link.clone())),
let cacher = Arc::new(Mutex::new(FilesystemCacher::new(&config.slurp_cache_path)?));
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
Box::new(SlurpContents {
cacher,
site_selectors: &config.slurp_site_selectors,
}),
Box::new(FrameImages),
Box::new(AddOutlink),
Box::new(EscapeHtml),
Box::new(InlineStyle),
Box::new(SanitizeHtml {
cid_prefix: "",
base_url: &link,
}),
Box::new(InlineStyle),
];
for t in body_tranformers.iter() {
if t.should_run(&body) {
body = t.transform(&body)?;
if t.should_run(&link, &body) {
body = t.transform(&link, &body).await?;
}
}
let body = Body::Html(Html {
html: body,
content_tree: "".to_string(),
});
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string()))?;
let from = Some(Email {
name: r.name,
addr: addr.map(|a| a.to_string()),
});
Ok(Thread {
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
let is_read = r.is_read.unwrap_or(false);
let timestamp = r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp();
Ok(Thread::News(NewsPost {
thread_id,
subject: title.clone(),
messages: vec![Message {
id,
from,
to: Vec::new(),
cc: Vec::new(),
subject: Some(title),
timestamp: Some(
r.date
.expect("post missing date")
.assume_utc()
.unix_timestamp(),
),
headers: Vec::new(),
body,
path: "".to_string(),
attachments: Vec::new(),
tags,
}],
})
is_read,
slug,
site,
title,
body,
url: link
.as_ref()
.map(|url| url.to_string())
.unwrap_or("NO URL".to_string()),
timestamp,
}))
}
pub async fn set_read_status<'ctx>(
pool: &PgPool,
@@ -254,7 +229,7 @@ pub async fn set_read_status<'ctx>(
.await?;
Ok(true)
}
fn clean_title(title: &str) -> Result<String, ServerError> {
async fn clean_title(title: &str) -> Result<String, ServerError> {
// Make title HTML so html parsers work
let mut title = format!("<html>{title}</html>");
let title_tranformers: Vec<Box<dyn Transformer>> =
@@ -262,8 +237,8 @@ fn clean_title(title: &str) -> Result<String, ServerError> {
// Make title HTML so html parsers work
title = format!("<html>{title}</html>");
for t in title_tranformers.iter() {
if t.should_run(&title) {
title = t.transform(&title)?;
if t.should_run(&None, &title) {
title = t.transform(&None, &title).await?;
}
}
Ok(title)

View File

@@ -14,10 +14,10 @@ use crate::{
compute_offset_limit,
error::ServerError,
graphql::{
Attachment, Body, DispositionType, Email, Header, Html, Message, PlainText, Tag, Thread,
ThreadSummary, UnhandledContentType,
Attachment, Body, DispositionType, Email, EmailThread, Header, Html, Message, PlainText,
Tag, Thread, ThreadSummary, UnhandledContentType,
},
linkify_html, sanitize_html,
linkify_html, InlineStyle, SanitizeHtml, Transformer,
};
const TEXT_PLAIN: &'static str = "text/plain";
@@ -169,17 +169,29 @@ pub async fn thread(
};
Body::Html(Html {
html: format!(
r#"<p class="view-part-text-plain">{}</p>"#,
// Trim newlines to prevent excessive white space at the beginning/end of
// presenation. Leave tabs and spaces incase plain text attempts to center a
// header on the first line.
sanitize_html(
&linkify_html(&text.trim_matches('\n')),
&cid_prefix,
&base_url
)?
),
html: {
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
Box::new(InlineStyle),
Box::new(SanitizeHtml {
cid_prefix: &cid_prefix,
base_url: &base_url,
}),
];
let mut html = linkify_html(&text.trim_matches('\n'));
for t in body_tranformers.iter() {
if t.should_run(&None, &html) {
html = t.transform(&None, &html).await?;
}
}
format!(
r#"<p class="view-part-text-plain">{}</p>"#,
// Trim newlines to prevent excessive white space at the beginning/end of
// presenation. Leave tabs and spaces incase plain text attempts to center a
// header on the first line.
html
)
},
content_tree: if debug_content_tree {
render_content_type_tree(&m)
} else {
@@ -187,8 +199,27 @@ pub async fn thread(
},
})
}
Body::Html(Html { html, content_tree }) => Body::Html(Html {
html: sanitize_html(&html, &cid_prefix, &base_url)?,
Body::Html(Html {
mut html,
content_tree,
}) => Body::Html(Html {
html: {
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
// TODO: this breaks things like emails from calendar
//Box::new(InlineStyle),
Box::new(SanitizeHtml {
cid_prefix: &cid_prefix,
base_url: &base_url,
}),
];
for t in body_tranformers.iter() {
if t.should_run(&None, &html) {
html = t.transform(&None, &html).await?;
}
}
html
},
content_tree: if debug_content_tree {
render_content_type_tree(&m)
} else {
@@ -246,11 +277,11 @@ pub async fn thread(
.next()
.and_then(|m| m.subject.clone())
.unwrap_or("(NO SUBJECT)".to_string());
Ok(Thread {
Ok(Thread::Email(EmailThread {
thread_id,
subject,
messages,
})
}))
}
fn email_addresses(

View File

@@ -1,10 +1,11 @@
[package]
name = "shared"
version = "0.1.0"
version = "0.0.29"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
build-info = "0.0.38"
notmuch = { path = "../notmuch" }
serde = { version = "1.0.147", features = ["derive"] }

View File

@@ -1,3 +1,6 @@
use std::hash::{DefaultHasher, Hash, Hasher};
use build_info::{BuildInfo, VersionControl};
use notmuch::SearchSummary;
use serde::{Deserialize, Serialize};
@@ -33,3 +36,23 @@ pub mod urls {
}
}
}
pub fn build_version(bi: fn() -> &'static BuildInfo) -> String {
fn commit(git: &Option<VersionControl>) -> String {
let Some(VersionControl::Git(git)) = git else {
return String::new();
};
let mut s = vec!["-".to_string(), git.commit_short_id.clone()];
if let Some(branch) = &git.branch {
s.push(format!(" ({branch})"));
}
s.join("")
}
let bi = bi();
format!("v{}{}", bi.crate_info.version, commit(&bi.version_control)).to_string()
}
pub fn compute_color(data: &str) -> String {
let mut hasher = DefaultHasher::new();
data.hash(&mut hasher);
format!("#{:06x}", hasher.finish() % (1 << 24))
}

View File

@@ -1,5 +1,5 @@
[package]
version = "0.1.0"
version = "0.0.29"
name = "letterbox"
repository = "https://github.com/seed-rs/seed-quickstart"
authors = ["Bill Thiede <git@xinu.tv>"]
@@ -9,6 +9,9 @@ license = "MIT"
readme = "./README.md"
edition = "2018"
[build-dependencies]
build-info-build = "0.0.38"
[dev-dependencies]
wasm-bindgen-test = "0.3.33"
@@ -29,6 +32,7 @@ thiserror = "1.0.50"
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
gloo-net = { version = "0.4.0", features = ["json", "serde_json"] }
human_format = "1.1.0"
build-info = "0.0.38"
[package.metadata.wasm-pack.profile.release]
wasm-opt = ['-Os']
@@ -37,7 +41,9 @@ wasm-opt = ['-Os']
version = "0.3.58"
features = [
"Clipboard",
"DomRect",
"Element",
"MediaQueryList",
"Navigator",
"Window"
"Window",
]

View File

@@ -1,8 +0,0 @@
.PHONY: all
APP=letterbox
# Build in release mode and push to minio for serving.
all:
trunk build --release
mc mirror m/$(APP)/ /tmp/$(APP)-$(shell date +%s)
mc mirror --overwrite --remove dist/ m/$(APP)/

5
web/build.rs Normal file
View File

@@ -0,0 +1,5 @@
fn main() {
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
// and `build_info::format!` in the main program.
build_info_build::build_script();
}

View File

@@ -22,4 +22,5 @@ query FrontPageQuery($query: String!, $after: String $before: String, $first: In
fgColor
unread
}
version
}

View File

@@ -290,6 +290,73 @@
"name": "Email",
"possibleTypes": null
},
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "subject",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "messages",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "Message",
"ofType": null
}
}
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "EmailThread",
"possibleTypes": null
},
{
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
"enumValues": null,
@@ -791,6 +858,145 @@
"name": "Mutation",
"possibleTypes": null
},
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "isRead",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "slug",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "site",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "title",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "body",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "url",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "timestamp",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "NewsPost",
"possibleTypes": null
},
{
"description": "Information about pagination in a connection",
"enumValues": null,
@@ -905,6 +1111,22 @@
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "version",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [
{
@@ -1056,7 +1278,7 @@
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"kind": "UNION",
"name": "Thread",
"ofType": null
}
@@ -1157,69 +1379,23 @@
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "subject",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "messages",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "Message",
"ofType": null
}
}
}
}
}
],
"fields": null,
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"interfaces": null,
"kind": "UNION",
"name": "Thread",
"possibleTypes": null
"possibleTypes": [
{
"kind": "OBJECT",
"name": "EmailThread",
"ofType": null
},
{
"kind": "OBJECT",
"name": "NewsPost",
"ofType": null
}
]
},
{
"description": null,

View File

@@ -1,47 +1,60 @@
query ShowThreadQuery($threadId: String!) {
thread(threadId: $threadId) {
threadId,
subject
messages {
id
subject
tags
from {
name
addr
}
to {
name
addr
}
cc {
name
addr
}
__typename ... on NewsPost{
threadId
isRead
slug
site
title
body
url
timestamp
body {
__typename
... on UnhandledContentType {
contents
contentTree
}
... on PlainText {
contents
contentTree
}
... on Html {
contents
contentTree
}
}
path
attachments {
# TODO: unread
}
__typename ... on EmailThread{
threadId,
subject
messages {
id
idx
filename
contentType
contentId
size
subject
tags
from {
name
addr
}
to {
name
addr
}
cc {
name
addr
}
timestamp
body {
__typename
... on UnhandledContentType {
contents
contentTree
}
... on PlainText {
contents
contentTree
}
... on Html {
contents
contentTree
}
}
path
attachments {
id
idx
filename
contentType
contentId
size
}
}
}
}
@@ -51,4 +64,5 @@ query ShowThreadQuery($threadId: String!) {
fgColor
unread
}
version
}

View File

@@ -1,4 +1,4 @@
DEV_HOST=localhost
DEV_PORT=9345
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/graphql --output schema.json
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
git diff schema.json

View File

@@ -22,6 +22,7 @@
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet">
<link data-trunk rel="css" href="static/site-specific.css" />
</head>
<body>

View File

@@ -1,9 +1,10 @@
use std::collections::HashSet;
use graphql_client::GraphQLQuery;
use log::{error, info};
use log::{debug, error, info, warn};
use seed::{prelude::*, *};
use thiserror::Error;
use web_sys::HtmlElement;
use crate::{
api,
@@ -27,6 +28,8 @@ pub fn unread_query() -> &'static str {
// `init` describes what should happen when your app started.
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
let version = shared::build_version(bi);
info!("Build Info: {}", version);
if url.hash().is_none() {
orders.request_url(urls::search(unread_query(), 0));
} else {
@@ -37,12 +40,20 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
// 'notmuch new' on the server periodically?
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
orders.subscribe(on_url_changed);
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
build_info::build_info!(fn bi);
Model {
context: Context::None,
query: "".to_string(),
refreshing_state: RefreshingState::None,
tags: None,
read_completion_ratio: 0.,
content_el: ElRef::<HtmlElement>::default(),
versions: Version {
client: version,
server: None,
},
}
}
@@ -108,10 +119,16 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} else {
RefreshingState::None
};
orders.perform_cmd(async move { Msg::Reload });
orders.perform_cmd(async move { Msg::Refresh });
}
Msg::Refresh => {
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
}
Msg::Reload => {
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
window()
.location()
.reload()
.expect("failed to reload window");
}
Msg::OnResize => (),
@@ -286,7 +303,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
.map(|t| Tag {
name: t.name,
bg_color: t.bg_color,
fg_color: t.fg_color,
unread: t.unread,
})
.collect(),
@@ -315,6 +331,9 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
pager: data.search.page_info,
selected_threads,
};
orders.send_msg(Msg::UpdateServerVersion(data.version));
// Generate signal so progress bar is reset
orders.send_msg(Msg::WindowScrolled);
}
Msg::ShowThreadRequest { thread_id } => {
@@ -336,30 +355,37 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
.map(|t| Tag {
name: t.name,
bg_color: t.bg_color,
fg_color: t.fg_color,
unread: t.unread,
})
.collect(),
);
let mut open_messages: HashSet<_> = data
.thread
.messages
.iter()
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
.map(|msg| msg.id.clone())
.collect();
if open_messages.is_empty() {
open_messages = data
.thread
.messages
.iter()
.map(|msg| msg.id.clone())
.collect();
match &data.thread {
graphql::show_thread_query::ShowThreadQueryThread::EmailThread(
ShowThreadQueryThreadOnEmailThread { messages, .. },
) => {
let mut open_messages: HashSet<_> = messages
.iter()
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
.map(|msg| msg.id.clone())
.collect();
if open_messages.is_empty() {
open_messages = messages.iter().map(|msg| msg.id.clone()).collect();
}
model.context = Context::ThreadResult {
thread: data.thread,
open_messages,
};
}
graphql::show_thread_query::ShowThreadQueryThread::NewsPost(..) => {
model.context = Context::ThreadResult {
thread: data.thread,
open_messages: HashSet::new(),
};
}
}
model.context = Context::ThreadResult {
thread: data.thread,
open_messages,
};
orders.send_msg(Msg::UpdateServerVersion(data.version));
// Generate signal so progress bar is reset
orders.send_msg(Msg::WindowScrolled);
}
Msg::ShowThreadResult(bad) => {
error!("show_thread_query error: {bad:#?}");
@@ -480,6 +506,55 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
.expect("failed to copy to clipboard");
});
}
Msg::WindowScrolled => {
if let Some(el) = model.content_el.get() {
let ih = window()
.inner_height()
.expect("window height")
.unchecked_into::<js_sys::Number>()
.value_of();
let r = el.get_bounding_client_rect();
if r.height() < ih {
// The whole content fits in the window, no scrollbar
orders.send_msg(Msg::SetProgress(0.));
return;
}
let end: f64 = r.height() - ih;
if end < 0. {
orders.send_msg(Msg::SetProgress(0.));
return;
}
// Flip Y, normally it's 0-point when the top of the content hits the top of the
// screen and goes negative from there.
let y = -r.y();
let ratio: f64 = (y / end).max(0.);
debug!(
"WindowScrolled ih {ih} end {end} ratio {ratio:.02} {}x{} @ {},{}",
r.width(),
r.height(),
r.x(),
r.y()
);
orders.send_msg(Msg::SetProgress(ratio));
} else {
orders.send_msg(Msg::SetProgress(0.));
}
}
Msg::SetProgress(ratio) => {
model.read_completion_ratio = ratio;
}
Msg::UpdateServerVersion(version) => {
if version != model.versions.client {
warn!(
"Server ({}) and client ({}) version mismatch, reloading",
version, model.versions.client
);
orders.send_msg(Msg::Reload);
}
model.versions.server = Some(version);
}
}
}
// `Model` describes our app state.
@@ -488,6 +563,15 @@ pub struct Model {
pub context: Context,
pub refreshing_state: RefreshingState,
pub tags: Option<Vec<Tag>>,
pub read_completion_ratio: f64,
pub content_el: ElRef<HtmlElement>,
pub versions: Version,
}
#[derive(Debug)]
pub struct Version {
pub client: String,
pub server: Option<String>,
}
#[derive(Error, Debug)]
@@ -521,7 +605,6 @@ pub enum Context {
pub struct Tag {
pub name: String,
pub bg_color: String,
pub fg_color: String,
pub unread: i64,
}
@@ -535,6 +618,8 @@ pub enum RefreshingState {
pub enum Msg {
Noop,
// Tell the client to refresh its state
Refresh,
// Tell the client to reload whole page from server
Reload,
// Window has changed size
OnResize,
@@ -570,6 +655,8 @@ pub enum Msg {
SelectionSetNone,
SelectionSetAll,
SelectionAddTag(String),
#[allow(dead_code)]
// TODO
SelectionRemoveTag(String),
SelectionMarkAsRead,
SelectionMarkAsUnread,
@@ -581,4 +668,8 @@ pub enum Msg {
MultiMsg(Vec<Msg>),
CopyToClipboard(String),
WindowScrolled,
SetProgress(f64),
UpdateServerVersion(String),
}

View File

@@ -1,23 +1,26 @@
use seed::{prelude::*, *};
use seed_hooks::{state_access::CloneState, topo, use_state};
use seed_hooks::topo;
use crate::{
api::urls,
graphql::show_thread_query::*,
state::{Context, Model, Msg},
view::{self, view_header, view_search_results, view_tags},
view::{self, reading_progress, view_header, view_search_results},
};
#[topo::nested]
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = true;
// Do two queries, one without `unread` so it loads fast, then a second with unread.
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
thread: ShowThreadQueryThread::EmailThread(thread),
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
Context::ThreadResult {
thread: ShowThreadQueryThread::NewsPost(post),
..
} => view::news_post(post, show_icon_text, &model.content_el),
Context::SearchResult {
query,
results,
@@ -35,7 +38,8 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
};
div![
C!["main-content"],
view_tags(model),
reading_progress(model.read_completion_ratio),
div![view::tags(model), view::versions(&model.versions)],
div![
view_header(&model.query, &model.refreshing_state),
content,

View File

@@ -4,23 +4,26 @@ use seed::{prelude::*, *};
use crate::{
api::urls,
graphql::front_page_query::*,
graphql::{front_page_query::*, show_thread_query::*},
state::{Context, Model, Msg},
view::{
self, human_age, pretty_authors, search_toolbar, set_title, tags_chiclet, view_header,
view_tags,
self, human_age, pretty_authors, reading_progress, search_toolbar, set_title, tags_chiclet,
view_header,
},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = false;
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
thread: ShowThreadQueryThread::EmailThread(thread),
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
Context::ThreadResult {
thread: ShowThreadQueryThread::NewsPost(post),
..
} => view::news_post(post, show_icon_text, &model.content_el),
Context::SearchResult {
query,
results,
@@ -37,10 +40,11 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
),
};
div![
reading_progress(model.read_completion_ratio),
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
view_tags(model),
div![view::tags(model), view::versions(&model.versions)]
]
}

View File

@@ -1,14 +1,13 @@
use std::{
collections::{hash_map::DefaultHasher, HashSet},
hash::{Hash, Hasher},
};
use std::collections::HashSet;
use chrono::{DateTime, Datelike, Duration, Local, Utc};
use human_format::{Formatter, Scales};
use itertools::Itertools;
use log::{error, info};
use log::{debug, error, info};
use seed::{prelude::*, *};
use seed_hooks::{state_access::CloneState, topo, use_state};
use shared::compute_color;
use web_sys::HtmlElement;
use crate::{
api::urls,
@@ -28,12 +27,6 @@ fn set_title(title: &str) {
seed::document().set_title(&format!("lb: {}", title));
}
fn compute_color(data: &str) -> String {
let mut hasher = DefaultHasher::new();
data.hash(&mut hasher);
format!("#{:06x}", hasher.finish() % (1 << 24))
}
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
tags.iter().map(move |tag| {
let hex = compute_color(tag);
@@ -394,9 +387,9 @@ macro_rules! implement_email {
}
implement_email!(
ShowThreadQueryThreadMessagesTo,
ShowThreadQueryThreadMessagesCc,
ShowThreadQueryThreadMessagesFrom
ShowThreadQueryThreadOnEmailThreadMessagesTo,
ShowThreadQueryThreadOnEmailThreadMessagesCc,
ShowThreadQueryThreadOnEmailThreadMessagesFrom
);
fn raw_text_message(contents: &str) -> Node<Msg> {
@@ -467,13 +460,13 @@ fn render_avatar(avatar: Option<String>, from: &str) -> Node<Msg> {
}
}
fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<Msg> {
let (from, from_detail) = match &msg.from {
Some(ShowThreadQueryThreadMessagesFrom {
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
name: Some(name),
addr,
}) => (name.to_string(), addr.clone()),
Some(ShowThreadQueryThreadMessagesFrom {
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
addr: Some(addr), ..
}) => (addr.to_string(), None),
_ => (String::from("UNKNOWN"), None),
@@ -516,15 +509,15 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
if i>0 { ", " }else { "" },
{
let to = match to {
ShowThreadQueryThreadMessagesTo {
ShowThreadQueryThreadOnEmailThreadMessagesTo {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesTo {
ShowThreadQueryThreadOnEmailThreadMessagesTo {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesTo {
ShowThreadQueryThreadOnEmailThreadMessagesTo {
addr: Some(addr), ..
} => format!("{addr}"),
_ => String::from("UNKNOWN"),
@@ -553,15 +546,15 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
if i>0 { ", " }else { "" },
{
let cc = match cc {
ShowThreadQueryThreadMessagesCc {
ShowThreadQueryThreadOnEmailThreadMessagesCc {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesCc {
ShowThreadQueryThreadOnEmailThreadMessagesCc {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesCc {
ShowThreadQueryThreadOnEmailThreadMessagesCc {
addr: Some(addr), ..
} => format!("<{addr}>"),
_ => String::from("UNKNOWN"),
@@ -609,12 +602,12 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
]
}
fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
fn render_closed_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<Msg> {
let from: String = match &msg.from {
Some(ShowThreadQueryThreadMessagesFrom {
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
name: Some(name), ..
}) => name.to_string(),
Some(ShowThreadQueryThreadMessagesFrom {
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
addr: Some(addr), ..
}) => addr.to_string(),
_ => String::from("UNKNOWN"),
@@ -683,7 +676,7 @@ fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
]
}
fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg> {
fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool) -> Node<Msg> {
let expand_id = msg.id.clone();
div![
C!["message"],
@@ -705,18 +698,18 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
],
IF!(open =>
div![
C!["body"],
C!["body", "mail"],
match &msg.body {
ShowThreadQueryThreadMessagesBody::UnhandledContentType(
ShowThreadQueryThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
ShowThreadQueryThreadOnEmailThreadMessagesBody::UnhandledContentType(
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
) => div![
raw_text_message(&contents),
div![C!["error"],
view_content_tree(&content_tree),
]
],
ShowThreadQueryThreadMessagesBody::PlainText(
ShowThreadQueryThreadMessagesBodyOnPlainText {
ShowThreadQueryThreadOnEmailThreadMessagesBody::PlainText(
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnPlainText {
contents,
content_tree,
},
@@ -724,8 +717,8 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
raw_text_message(&contents),
view_content_tree(&content_tree),
],
ShowThreadQueryThreadMessagesBody::Html(
ShowThreadQueryThreadMessagesBodyOnHtml {
ShowThreadQueryThreadOnEmailThreadMessagesBody::Html(
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnHtml {
contents,
content_tree,
},
@@ -792,9 +785,10 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
#[topo::nested]
fn thread(
thread: &ShowThreadQueryThread,
thread: &ShowThreadQueryThreadOnEmailThread,
open_messages: &HashSet<String>,
show_icon_text: bool,
content_el: &ElRef<HtmlElement>,
) -> Node<Msg> {
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
let subject = if thread.subject.is_empty() {
@@ -867,7 +861,7 @@ fn thread(
],
],
],
messages,
div![el_ref(content_el), messages, click_to_top()],
/* TODO(wathiede): plumb in orignal id
a![
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
@@ -976,7 +970,7 @@ pub fn view(model: &Model) -> Node<Msg> {
_ => div![C!["desktop"], desktop::view(model)],
},]
}
pub fn view_tags(model: &Model) -> Node<Msg> {
pub fn tags(model: &Model) -> Node<Msg> {
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
let href = if search_unread {
urls::search(&format!("is:unread tag:{}", t.name), 0)
@@ -1028,7 +1022,6 @@ pub fn view_tags(model: &Model) -> Node<Msg> {
&Tag {
name: parts[..i + 1].join("/"),
bg_color: "#fff".to_string(),
fg_color: "#000".to_string(),
unread: 0,
},
search_unread,
@@ -1075,3 +1068,161 @@ pub fn view_tags(model: &Model) -> Node<Msg> {
]
]
}
fn news_post(
post: &ShowThreadQueryThreadOnNewsPost,
show_icon_text: bool,
content_el: &ElRef<HtmlElement>,
) -> Node<Msg> {
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
let subject = &post.title;
set_title(subject);
let read_thread_id = post.thread_id.clone();
let unread_thread_id = post.thread_id.clone();
div![
C!["thread"],
h3![C!["is-size-5"], subject],
div![
C!["level", "is-mobile"],
div![
C!["level-item"],
div![
C!["buttons", "has-addons"],
button![
C!["button", "mark-read"],
attrs! {At::Title => "Mark as read"},
span![C!["icon", "is-small"], i![C!["far", "fa-envelope-open"]]],
IF!(show_icon_text=>span!["Read"]),
ev(Ev::Click, move |_| Msg::SetUnread(read_thread_id, false)),
],
button![
C!["button", "mark-unread"],
attrs! {At::Title => "Mark as unread"},
span![C!["icon", "is-small"], i![C!["far", "fa-envelope"]]],
IF!(show_icon_text=>span!["Unread"]),
ev(Ev::Click, move |_| Msg::SetUnread(unread_thread_id, true)),
],
],
],
// This would be the holder for spam buttons on emails, needed to keep layout
// consistent
div![C!["level-item"], div![]]
],
div![
C!["message"],
div![C!["header"], render_news_post_header(&post)],
div![
C!["body", "news-post", format!("site-{}", post.slug)],
el_ref(content_el),
raw![&post.body]
]
],
/* TODO(wathiede): plumb in orignal id
a![
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
"Original"
],
*/
click_to_top(),
]
}
fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> {
let from = &post.site;
let from_detail = post.url.clone();
let avatar: Option<String> = None;
//let avatar: Option<String> = Some(String::from("https://bulma.io/images/placeholders/64x64.png"));
let id = post.thread_id.clone();
let is_unread = !post.is_read;
let img = render_avatar(avatar, &from);
article![
C!["media"],
figure![C!["media-left"], p![C!["image", "is-64x64"], img]],
div![
C!["media-content"],
div![
C!["content"],
p![
strong![from],
br![],
small![
&from_detail,
" ",
span![
i![C!["far", "fa-clone"]],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::CopyToClipboard(from_detail.to_string())
})
]
],
table![tr![td![
attrs! {At::ColSpan=>2},
span![C!["header"], human_age(post.timestamp)]
]]],
],
],
],
div![
C!["media-right"],
span![
C!["read-status"],
i![C![
"far",
if is_unread {
"fa-envelope"
} else {
"fa-envelope-open"
},
]]
],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::SetUnread(id, !is_unread)
})
]
]
}
fn reading_progress(ratio: f64) -> Node<Msg> {
let percent = ratio * 100.;
progress![
C![
"read-progress",
"progress",
"is-success",
"is-small",
IF!(percent<1. => "is-invisible")
],
attrs! {
At::Value=>percent,
At::Max=>"100"
},
format!("{percent}%")
]
}
pub fn versions(versions: &crate::state::Version) -> Node<Msg> {
debug!("versions {versions:?}");
aside![
C!["tags-menu", "menu"],
p![C!["menu-label"], "Versions"],
ul![
C!["menu-list"],
li!["Client"],
li![span![C!["tag-indent"], &versions.client]]
],
versions.server.as_ref().map(|v| ul![
C!["menu-list"],
li!["Server"],
li![span![C!["tag-indent"], v]]
])
]
}
fn click_to_top() -> Node<Msg> {
button![
C!["button", "is-danger", "is-small"],
span!["Top"],
span![C!["icon"], i![C!["fas", "fa-arrow-turn-up"]]],
ev(Ev::Click, move |_| web_sys::window()
.unwrap()
.scroll_to_with_x_and_y(0., 0.))
]
}

View File

@@ -1,20 +1,24 @@
use seed::{prelude::*, *};
use crate::{
graphql::show_thread_query::*,
state::{Context, Model, Msg},
view::{self, view_header, view_search_results, view_tags},
view::{self, reading_progress, view_header, view_search_results},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = false;
// Do two queries, one without `unread` so it loads fast, then a second with unread.
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
thread: ShowThreadQueryThread::EmailThread(thread),
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
Context::ThreadResult {
thread: ShowThreadQueryThread::NewsPost(post),
..
} => view::news_post(post, show_icon_text, &model.content_el),
Context::SearchResult {
query,
results,
@@ -33,10 +37,12 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
div![
C!["main-content"],
div![
reading_progress(model.read_completion_ratio),
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
view_tags(model),
view::tags(model),
view::versions(&model.versions)
]
]
}

View File

@@ -0,0 +1,55 @@
.body figcaption {
color: var(--color-text) !important;
}
.body.news-post em {
border: 0 !important;
font-style: italic;
margin: inherit !important;
padding: inherit !important;
}
.body.news-post .number {
align-items: inherit;
background-color: inherit;
border-radius: inherit;
display: inherit;
font-size: inherit;
height: inherit;
justify-content: inherit;
margin-right: inherit;
min-width: inherit;
padding: inherit;
text-align: inherit;
vertical-align: inherit;
}
.body.news-post.site-saturday-morning-breakfast-cereal {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
}
.body.news-post.site-slashdot i {
border-left: 2px solid #ddd;
display: block;
font-style: normal !important;
margin-bottom: 1em;
margin-top: 1em;
padding-left: 1em;
}
.body.news-post.site-news-on-redox-your-next-gen-os h1,
.body.news-post.site-news-on-redox-your-next-gen-os h2,
.body.news-post.site-news-on-redox-your-next-gen-os h3,
.body.news-post.site-news-on-redox-your-next-gen-os h4,
.body.news-post.site-news-on-redox-your-next-gen-os h5 {
color: var(--color-text) !important;
}
.body.mail code,
.body.mail pre {
color: var(--color-text);
background-color: var(--color-bg-secondary);
}

View File

@@ -18,9 +18,11 @@
--justify-important: center;
--justify-normal: left;
--line-height: 1.5;
/*
--width-card: 285px;
--width-card-medium: 460px;
--width-card-wide: 800px;
*/
--width-content: 1080px;
}
@@ -341,3 +343,14 @@ display: none;
.button.spam {
color: #f00;
}
progress.read-progress {
border-radius: 0;
position: fixed;
top: 0;
z-index: 999;
}
progress.read-progress.is-small {
height: .25rem;
}