Compare commits

...

27 Commits

Author SHA1 Message Date
39bef1ea87 WIP 2023-07-15 16:58:15 -07:00
458bd356dd cargo update 2023-07-15 16:58:05 -07:00
8420e4b4d3 cargo update to get around compiler error. 2023-07-02 09:02:07 -07:00
8b04bd8059 Remove 'Pretty' variants. 2023-07-01 09:14:31 -07:00
fc83d56c0c Refactor, moving API and notmuch code into submodule.
This is on the path to make the UI more simple and move email parsing to
server side.
2023-07-01 09:03:35 -07:00
f8e86dc5cc Specific resolver in root workspace per cargo warning. 2023-07-01 09:03:06 -07:00
72622032ad web: add pagination to bottom of search results 2023-04-12 07:34:30 -07:00
ec1a12ca11 Change to containing directory before running. 2023-04-12 07:33:44 -07:00
f5f4d666d5 Address lint. 2023-03-19 20:48:39 -07:00
7bfef154d9 Add rule to preserve unread tag when run with --remove-all. 2023-03-19 20:42:35 -07:00
fbe7dade54 Quote notmuch rules to account for spaces. 2023-03-19 20:09:26 -07:00
321eca38e2 Fix test 2023-03-13 21:02:32 -07:00
1a86204561 Cargo update 2023-03-13 20:58:44 -07:00
fd721c53d8 Use <a> instead of event handlers for functioning history. 2023-03-13 20:49:35 -07:00
4390d24492 Add a bit more structure to searching. 2023-03-11 21:15:59 -08:00
cb8b00f8d1 Add glog dependency. 2023-03-11 21:15:35 -08:00
eba362a7f2 Add pagination to search results.
Move to shared definition of json requests between client/server.
2023-03-09 18:04:55 -08:00
f16860dd09 Update watch paths. 2023-03-09 18:04:36 -08:00
e7a0e5b662 Fix clicking on subjects. 2023-03-08 19:59:40 -08:00
371a8b98eb Default message body to B&W 2023-03-08 19:58:44 -08:00
57ded3c076 Move tags file to dotfiles repo. 2023-03-08 19:58:25 -08:00
332225b9d4 procmail2notmuch: remove unprocessed when rule applies. 2023-03-08 19:31:51 -08:00
0d0a9d88ae Add tool to convert procmailrc into notmuch tag file. 2023-03-06 09:15:37 -08:00
24cf7d12f1 Initial batch tag file. 2023-03-05 18:42:50 -08:00
6adb567cd6 Change up site CSS and inline messages' CSS 2023-03-05 18:42:14 -08:00
9e4b97e2e5 Comment on how to run ignored tests. 2023-03-05 18:41:59 -08:00
e17751a992 Release build frontend, or it won't work. 2023-03-05 18:40:51 -08:00
16 changed files with 1780 additions and 1076 deletions

1653
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,11 @@
[workspace] [workspace]
resolver = "2"
members = [ members = [
"web", "web",
"server", "server",
"notmuch", "notmuch",
"procmail2notmuch",
"shared"
] ]
[profile.release] [profile.release]

5
dev.sh
View File

@@ -1,6 +1,7 @@
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
tmux new-session -d -s letterbox-dev tmux new-session -d -s letterbox-dev
tmux rename-window web tmux rename-window web
tmux send-keys "cd web; trunk serve --address 0.0.0.0 --port 6758 --proxy-backend http://localhost:9345/ --proxy-rewrite=/api/" C-m tmux send-keys "cd web; trunk serve --release --address 0.0.0.0 --port 6758 --proxy-backend http://localhost:9345/ --proxy-rewrite=/api/ -w ../shared -w ../notmuch -w ./" C-m
tmux new-window -n server tmux new-window -n server
tmux send-keys "cd server; cargo watch -x run" C-m tmux send-keys "cd server; cargo watch -x run -w ../shared -w ../notmuch -w ./" C-m
tmux attach -d -t letterbox-dev tmux attach -d -t letterbox-dev

View File

@@ -478,8 +478,19 @@ impl Notmuch {
self.run_notmuch(std::iter::empty::<&str>()) self.run_notmuch(std::iter::empty::<&str>())
} }
pub fn search(&self, query: &str) -> Result<SearchSummary, NotmuchError> { pub fn search(
let res = self.run_notmuch(["search", "--format=json", "--limit=20", query])?; &self,
query: &str,
offset: usize,
limit: usize,
) -> Result<SearchSummary, NotmuchError> {
let res = self.run_notmuch([
"search",
"--format=json",
&format!("--offset={offset}"),
&format!("--limit={limit}"),
query,
])?;
Ok(serde_json::from_slice(&res)?) Ok(serde_json::from_slice(&res)?)
} }
@@ -597,7 +608,7 @@ mod tests {
fn search() -> Result<(), NotmuchError> { fn search() -> Result<(), NotmuchError> {
let nm = Notmuch::with_config("testdata/notmuch.config"); let nm = Notmuch::with_config("testdata/notmuch.config");
nm.new()?; nm.new()?;
let res = nm.search("goof")?; let res = nm.search("goof", 0, 100)?;
assert_eq!(res.0.len(), 1); assert_eq!(res.0.len(), 1);
Ok(()) Ok(())
} }

View File

@@ -5,12 +5,11 @@ use std::{
}; };
use itertools::Itertools; use itertools::Itertools;
use notmuch::{Notmuch, NotmuchError, SearchSummary, ThreadSet};
use rayon::iter::{ParallelBridge, ParallelIterator}; use rayon::iter::{ParallelBridge, ParallelIterator};
use notmuch::{Notmuch, NotmuchError, SearchSummary, ThreadSet};
#[test] #[test]
#[ignore] // it is too expensive #[ignore] // it is too expensive, run with `cargo test -- --ignored`
fn parse_one() -> Result<(), Box<dyn Error>> { fn parse_one() -> Result<(), Box<dyn Error>> {
// take_hook() returns the default hook in case when a custom one is not set // take_hook() returns the default hook in case when a custom one is not set
let orig_hook = std::panic::take_hook(); let orig_hook = std::panic::take_hook();

View File

@@ -0,0 +1,9 @@
[package]
name = "procmail2notmuch"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.69"

View File

@@ -0,0 +1,255 @@
use std::{convert::Infallible, io::Write, str::FromStr};
#[derive(Debug, Default)]
enum MatchType {
From,
Sender,
To,
Cc,
Subject,
List,
DeliveredTo,
XForwardedTo,
ReplyTo,
XOriginalTo,
XSpam,
Body,
#[default]
Unknown,
}
#[derive(Debug, Default)]
struct Match {
match_type: MatchType,
needle: String,
}
#[derive(Debug, Default)]
struct Rule {
matches: Vec<Match>,
tags: Vec<String>,
}
fn unescape(s: &str) -> String {
s.replace('\\', "")
}
fn cleanup_match(prefix: &str, s: &str) -> String {
unescape(&s[prefix.len()..]).replace(".*", "")
}
mod matches {
pub const TO: &'static str = "TO";
pub const CC: &'static str = "Cc";
pub const TOCC: &'static str = "(TO|Cc)";
pub const FROM: &'static str = "From";
pub const SENDER: &'static str = "Sender";
pub const SUBJECT: &'static str = "Subject";
pub const DELIVERED_TO: &'static str = "Delivered-To";
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
pub const REPLY_TO: &'static str = "Reply-To";
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
pub const LIST_ID: &'static str = "List-ID";
pub const X_SPAM: &'static str = "X-Spam";
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
}
impl FromStr for Match {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Examples:
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
// "* ^TOsonyrewards.com@xinu.tv"
let mut it = s.split_whitespace().skip(1);
let mut needle = it.next().unwrap();
if needle == "1^0" {
needle = it.next().unwrap();
}
let mut needle = vec![needle];
needle.extend(it);
let needle = needle.join(" ");
let first = needle.chars().nth(0).unwrap_or(' ');
use matches::*;
if first == '^' {
let needle = &needle[1..];
if needle.starts_with(TO) {
return Ok(Match {
match_type: MatchType::To,
needle: cleanup_match(TO, needle),
});
} else if needle.starts_with(FROM) {
return Ok(Match {
match_type: MatchType::From,
needle: cleanup_match(FROM, needle),
});
} else if needle.starts_with(CC) {
return Ok(Match {
match_type: MatchType::Cc,
needle: cleanup_match(CC, needle),
});
} else if needle.starts_with(TOCC) {
return Ok(Match {
match_type: MatchType::To,
needle: cleanup_match(TOCC, needle),
});
} else if needle.starts_with(SENDER) {
return Ok(Match {
match_type: MatchType::Sender,
needle: cleanup_match(SENDER, needle),
});
} else if needle.starts_with(SUBJECT) {
return Ok(Match {
match_type: MatchType::Subject,
needle: cleanup_match(SUBJECT, needle),
});
} else if needle.starts_with(X_ORIGINAL_TO) {
return Ok(Match {
match_type: MatchType::XOriginalTo,
needle: cleanup_match(X_ORIGINAL_TO, needle),
});
} else if needle.starts_with(LIST_ID) {
return Ok(Match {
match_type: MatchType::List,
needle: cleanup_match(LIST_ID, needle),
});
} else if needle.starts_with(REPLY_TO) {
return Ok(Match {
match_type: MatchType::ReplyTo,
needle: cleanup_match(REPLY_TO, needle),
});
} else if needle.starts_with(X_SPAM_FLAG) {
return Ok(Match {
match_type: MatchType::XSpam,
needle: '*'.to_string(),
});
} else if needle.starts_with(X_SPAM) {
return Ok(Match {
match_type: MatchType::XSpam,
needle: '*'.to_string(),
});
} else if needle.starts_with(DELIVERED_TO) {
return Ok(Match {
match_type: MatchType::DeliveredTo,
needle: cleanup_match(DELIVERED_TO, needle),
});
} else if needle.starts_with(X_FORWARDED_TO) {
return Ok(Match {
match_type: MatchType::XForwardedTo,
needle: cleanup_match(X_FORWARDED_TO, needle),
});
} else {
unreachable!("needle: '{needle}'")
}
} else {
return Ok(Match {
match_type: MatchType::Body,
needle: cleanup_match("", &needle),
});
}
}
}
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
// faster.
let mut lines = Vec::new();
for r in rules {
for m in &r.matches {
for t in &r.tags {
if let MatchType::Unknown = m.match_type {
eprintln!("rule has unknown match {:?}", r);
continue;
}
let rule = match m.match_type {
MatchType::From => "from:",
// TODO(wathiede): something more specific?
MatchType::Sender => "from:",
MatchType::To => "to:",
MatchType::Cc => "to:",
MatchType::Subject => "subject:",
MatchType::List => "List-ID:",
MatchType::Body => "",
// TODO(wathiede): these will probably require adding fields to notmuch
// index. Handle them later.
MatchType::DeliveredTo
| MatchType::XForwardedTo
| MatchType::ReplyTo
| MatchType::XOriginalTo
| MatchType::XSpam => continue,
MatchType::Unknown => unreachable!(),
};
// Preserve unread status if run with --remove-all
lines.push(format!(
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
t, rule, m.needle
));
lines.push(format!(
// TODO(wathiede): this assumes `notmuch new` is configured to add
// `tag:unprocessed` to all new mail.
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
t, rule, m.needle
));
}
}
}
lines.sort();
for l in lines {
writeln!(w, "{l}")?;
}
Ok(())
}
fn main() -> anyhow::Result<()> {
let input = "/home/wathiede/dotfiles/procmailrc";
let mut rules = Vec::new();
let mut cur_rule = Rule::default();
for l in std::fs::read_to_string(input)?.lines() {
let l = if let Some(idx) = l.find('#') {
&l[..idx]
} else {
l
}
.trim();
if l.is_empty() {
continue;
}
if l.find('=').is_some() {
// Probably a variable assignment, skip line
continue;
}
let first = l.chars().nth(0).unwrap_or(' ');
match first {
':' => {
// start of rule
}
'*' => {
// add to current rule
let m: Match = l.parse()?;
cur_rule.matches.push(m);
}
'.' => {
// delivery to folder
cur_rule.tags.push(cleanup_match(
"",
&l.replace('.', "/")
.replace(' ', "")
.trim_matches('/')
.to_string(),
));
rules.push(cur_rule);
cur_rule = Rule::default();
}
'|' => cur_rule = Rule::default(), // external command
'$' => {
// TODO(wathiede): tag messages with no other tag as 'inbox'
cur_rule.tags.push(cleanup_match("", "inbox"));
rules.push(cur_rule);
cur_rule = Rule::default();
} // variable, should only be $DEFAULT in my config
_ => panic!("Unhandled first character '{}' {}", first, l),
}
}
notmuch_from_rules(std::io::stdout(), &rules)?;
Ok(())
}

View File

@@ -9,11 +9,13 @@ edition = "2021"
rocket = { version = "0.5.0-rc.2", features = [ "json" ] } rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
rocket_cors = { git = "https://github.com/lawliet89/rocket_cors", branch = "master" } rocket_cors = { git = "https://github.com/lawliet89/rocket_cors", branch = "master" }
notmuch = { path = "../notmuch" } notmuch = { path = "../notmuch" }
shared = { path = "../shared" }
serde_json = "1.0.87" serde_json = "1.0.87"
thiserror = "1.0.37" thiserror = "1.0.37"
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }
log = "0.4.17" log = "0.4.17"
tokio = "1.26.0" tokio = "1.26.0"
glog = "0.1.0"
[dependencies.rocket_contrib] [dependencies.rocket_contrib]
version = "0.4.11" version = "0.4.11"

View File

@@ -1,9 +1,12 @@
#[macro_use] #[macro_use]
extern crate rocket; extern crate rocket;
mod error;
mod nm;
use std::{error::Error, io::Cursor, str::FromStr}; use std::{error::Error, io::Cursor, str::FromStr};
use notmuch::{Notmuch, NotmuchError, SearchSummary, ThreadSet}; use glog::Flags;
use notmuch::{Notmuch, NotmuchError};
use rocket::{ use rocket::{
http::{ContentType, Header}, http::{ContentType, Header},
request::Request, request::Request,
@@ -13,6 +16,8 @@ use rocket::{
}; };
use rocket_cors::{AllowedHeaders, AllowedOrigins}; use rocket_cors::{AllowedHeaders, AllowedOrigins};
use crate::error::ServerError;
#[get("/")] #[get("/")]
fn hello() -> &'static str { fn hello() -> &'static str {
"Hello, world!" "Hello, world!"
@@ -22,32 +27,40 @@ fn hello() -> &'static str {
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> { async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string())) Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
} }
#[get("/search")] #[get("/search")]
async fn search_all(nm: &State<Notmuch>) -> Result<Json<SearchSummary>, Debug<NotmuchError>> { async fn search_all(
search(nm, "*").await nm: &State<Notmuch>,
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
search(nm, "*", None, None).await
} }
#[get("/search/<query>")] #[get("/search/<query>?<page>&<results_per_page>")]
async fn search( async fn search(
nm: &State<Notmuch>, nm: &State<Notmuch>,
query: &str, query: &str,
) -> Result<Json<SearchSummary>, Debug<NotmuchError>> { page: Option<usize>,
let res = nm.search(query)?; results_per_page: Option<usize>,
Ok(Json(res)) ) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
} let page = page.unwrap_or(0);
let results_per_page = results_per_page.unwrap_or(10);
#[get("/show/<query>/pretty")] info!(" search '{query}'");
async fn show_pretty( let res = shared::SearchResult {
nm: &State<Notmuch>, summary: nm.search(query, page * results_per_page, results_per_page)?,
query: &str, query: query.to_string(),
) -> Result<Json<ThreadSet>, Debug<NotmuchError>> { page,
let res = nm.show(query)?; results_per_page,
total: nm.count(query)?,
};
Ok(Json(res)) Ok(Json(res))
} }
#[get("/show/<query>")] #[get("/show/<query>")]
async fn show(nm: &State<Notmuch>, query: &str) -> Result<Json<ThreadSet>, Debug<NotmuchError>> { async fn show(
let res = nm.show(query)?; nm: &State<Notmuch>,
query: &str,
) -> Result<Json<Vec<shared::Message>>, Debug<ServerError>> {
let res = nm::threadset_to_messages(nm.show(query).map_err(|e| -> ServerError { e.into() })?)?;
Ok(Json(res)) Ok(Json(res))
} }
@@ -107,6 +120,14 @@ async fn original(
#[rocket::main] #[rocket::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
glog::new()
.init(Flags {
colorlogtostderr: true,
//alsologtostderr: true, // use logtostderr to only write to stderr and not to files
logtostderr: true,
..Default::default()
})
.unwrap();
let allowed_origins = AllowedOrigins::all(); let allowed_origins = AllowedOrigins::all();
let cors = rocket_cors::CorsOptions { let cors = rocket_cors::CorsOptions {
allowed_origins, allowed_origins,
@@ -130,7 +151,6 @@ async fn main() -> Result<(), Box<dyn Error>> {
refresh, refresh,
search_all, search_all,
search, search,
show_pretty,
show show
], ],
) )

10
shared/Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
name = "shared"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
notmuch = { path = "../notmuch" }
serde = { version = "1.0.147", features = ["derive"] }

35
shared/src/lib.rs Normal file
View File

@@ -0,0 +1,35 @@
use notmuch::SearchSummary;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct SearchResult {
pub summary: SearchSummary,
pub query: String,
pub page: usize,
pub results_per_page: usize,
pub total: usize,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ShowResult {
messages: Vec<Message>,
}
pub type AttachementId = String;
/// # Number of seconds since the Epoch
pub type UnixTime = isize;
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct Message {
pub from: String,
pub to: Option<String>,
pub cc: Option<String>,
pub timestamp: UnixTime, // date header as unix time
pub date_relative: String, // user-friendly timestamp
pub tags: Vec<String>,
// HTML formatted body
pub body: String,
pub attachment: Vec<AttachementId>,
}

View File

@@ -22,6 +22,7 @@ seed = "0.9.2"
console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"} console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"}
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }
notmuch = {path = "../notmuch"} notmuch = {path = "../notmuch"}
shared = {path = "../shared"}
itertools = "0.10.5" itertools = "0.10.5"
serde_json = { version = "1.0.93", features = ["unbounded_depth"] } serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
wasm-timer = "0.2.5" wasm-timer = "0.2.5"

View File

@@ -6,13 +6,15 @@
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link rel="modulepreload" href="/pkg/package.js" as="script" type="text/javascript"> <link rel="modulepreload" href="/pkg/package.js" as="script" type="text/javascript">
<link rel="preload" href="/pkg/package_bg.wasm" as="fetch" type="application/wasm" crossorigin="anonymous"> <link rel="preload" href="/pkg/package_bg.wasm" as="fetch" type="application/wasm" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css"> <link rel="stylesheet", href="https://jenil.github.io/bulmaswatch/cyborg/bulmaswatch.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.3.0/css/all.min.css" integrity="sha512-SzlrxWUlpfuzQ+pcUCosxcglQRNAq/DZjVsC0lE40xsADsfeQoEypE+enwcOiGjk/bSuGGKHEyjSoQ1zVisanQ==" crossorigin="anonymous" referrerpolicy="no-referrer" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.3.0/css/all.min.css" integrity="sha512-SzlrxWUlpfuzQ+pcUCosxcglQRNAq/DZjVsC0lE40xsADsfeQoEypE+enwcOiGjk/bSuGGKHEyjSoQ1zVisanQ==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<style> <style>
.message { .message {
padding-left: 0.5em; padding-left: 0.5em;
} }
.body { .body {
background: white;
color: black;
padding-bottom: 1em; padding-bottom: 1em;
} }
.error { .error {
@@ -35,6 +37,7 @@ iframe {
} }
.footer { .footer {
background-color: #eee; background-color: #eee;
color: #222;
position: fixed; position: fixed;
bottom: 0; bottom: 0;
left: 0; left: 0;
@@ -56,14 +59,25 @@ iframe {
animation-iteration-count: infinite; animation-iteration-count: infinite;
animation-timing-function: linear; animation-timing-function: linear;
} }
@keyframes spin { @keyframes spin {
from { from {
transform:rotate(0deg); transform:rotate(0deg);
} }
to { to {
transform:rotate(360deg); transform:rotate(360deg);
} }
}
@media (max-width: 768px) {
.section {
padding: 1.5em;
} }
}
input, .input {
color: #000;
}
input::placeholder, .input::placeholder{
color: #555;
}
</style> </style>
</head> </head>

16
web/src/api.rs Normal file
View File

@@ -0,0 +1,16 @@
use seed::Url;
const BASE_URL: &str = "/api";
pub fn refresh() -> String {
format!("{BASE_URL}/refresh")
}
pub fn search(query: &str, page: usize, results_per_page: usize) -> String {
let query = Url::encode_uri_component(query);
format!("{BASE_URL}/search/{query}?page={page}&results_per_page={results_per_page}")
}
pub fn show(tid: &str) -> String {
format!("{BASE_URL}/show/{tid}")
}
pub fn original(message_id: &str) -> String {
format!("{BASE_URL}/original/{message_id}")
}

View File

@@ -1,64 +1,99 @@
// (Lines like the one below ignore selected Clippy rules mod api;
// - it's useful when you want to check your code with `cargo make verify` mod nm;
// but some rules are too "annoying" or are not applicable for your case.)
#![allow(clippy::wildcard_imports)]
use std::{ use std::{
collections::hash_map::DefaultHasher, collections::hash_map::DefaultHasher,
hash::{Hash, Hasher}, hash::{Hash, Hasher},
}; };
use itertools::Itertools; use itertools::Itertools;
use log::{debug, error, info, warn, Level}; use log::{debug, error, info, Level};
use notmuch::{Content, Part, SearchSummary, Thread, ThreadNode, ThreadSet}; use notmuch::ThreadSet;
use seed::{prelude::*, *}; use seed::{prelude::*, *};
use serde::de::Deserialize; use serde::Deserialize;
use wasm_timer::Instant; use wasm_timer::Instant;
const SEARCH_RESULTS_PER_PAGE: usize = 20;
// ------ ------ // ------ ------
// Init // Init
// ------ ------ // ------ ------
// `init` describes what should happen when your app started. // `init` describes what should happen when your app started.
fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model { fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
warn!("init called"); orders
log!(url); .subscribe(on_url_changed)
let mut url = url.clone(); .notify(subs::UrlChanged(url.clone()));
let mut query = "".to_string();
let hpp = url.next_hash_path_part();
log!(hpp);
match hpp {
Some("t") => {
let tid = url.next_hash_path_part().unwrap_or("").to_string();
orders.send_msg(Msg::ShowPrettyRequest(tid));
}
Some("s") => {
query = url.next_hash_path_part().unwrap_or("").to_string();
orders.send_msg(Msg::SearchRequest(query.clone()));
}
p => {
log!(p);
orders.send_msg(Msg::SearchRequest("".to_string()));
}
};
orders.subscribe(|uc: subs::UrlChanged| {
info!("uc {:#?}", uc);
});
info!("init query '{}'", query);
Model { Model {
context: Context::None, context: Context::None,
query, query: "".to_string(),
refreshing_state: RefreshingState::None, refreshing_state: RefreshingState::None,
} }
} }
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
let mut url = uc.0;
info!(
"url changed '{}', history {}",
url,
history().length().unwrap_or(0)
);
let hpp = url.remaining_hash_path_parts();
match hpp.as_slice() {
["t", tid] => Msg::ShowRequest(tid.to_string()),
["s", query] => {
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
Msg::SearchRequest {
query,
page: 0,
results_per_page: SEARCH_RESULTS_PER_PAGE,
}
}
["s", query, page] => {
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
let page = page[1..].parse().unwrap_or(0);
Msg::SearchRequest {
query,
page,
results_per_page: SEARCH_RESULTS_PER_PAGE,
}
}
p => {
if !p.is_empty() {
info!("Unhandled path '{p:?}'");
}
Msg::SearchRequest {
query: "".to_string(),
page: 0,
results_per_page: SEARCH_RESULTS_PER_PAGE,
}
}
}
}
mod urls {
use seed::Url;
pub fn search(query: &str, page: usize) -> Url {
let query = Url::encode_uri_component(query);
if page > 0 {
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
} else {
Url::new().set_hash_path(["s", &query])
}
}
pub fn thread(tid: &str) -> Url {
Url::new().set_hash_path(["t", tid])
}
}
// ------ ------ // ------ ------
// Model // Model
// ------ ------ // ------ ------
enum Context { enum Context {
None, None,
Search(SearchSummary), Search(shared::SearchResult),
Thread(ThreadSet), Thread(Vec<shared::Message>),
} }
// `Model` describes our app state. // `Model` describes our app state.
@@ -81,16 +116,20 @@ enum RefreshingState {
// (Remove the line below once any of your `Msg` variants doesn't implement `Copy`.) // (Remove the line below once any of your `Msg` variants doesn't implement `Copy`.)
// `Msg` describes the different events you can modify state with. // `Msg` describes the different events you can modify state with.
enum Msg { pub enum Msg {
Noop, Noop,
RefreshStart, RefreshStart,
RefreshDone(Option<FetchError>), RefreshDone(Option<FetchError>),
SearchRequest(String), SearchRequest {
SearchResult(fetch::Result<SearchSummary>), query: String,
page: usize,
results_per_page: usize,
},
SearchResult(fetch::Result<shared::SearchResult>),
ShowRequest(String), ShowRequest(String),
ShowResult(fetch::Result<ThreadSet>), ShowResult(fetch::Result<Vec<shared::Message>>),
ShowPrettyRequest(String), NextPage,
ShowPrettyResult(fetch::Result<ThreadSet>), PreviousPage,
} }
// `update` describes how to handle each `Msg`. // `update` describes how to handle each `Msg`.
@@ -105,17 +144,30 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
model.refreshing_state = if let Some(err) = err { model.refreshing_state = if let Some(err) = err {
RefreshingState::Error(format!("{:?}", err)) RefreshingState::Error(format!("{:?}", err))
} else { } else {
// If looking at search page, refresh the search to view update on the server side.
if let Context::Search(sr) = &model.context {
let query = sr.query.clone();
let page = sr.page;
let results_per_page = sr.results_per_page;
orders.perform_cmd(async move {
Msg::SearchResult(search_request(&query, page, results_per_page).await)
});
}
RefreshingState::None RefreshingState::None
}; };
} }
Msg::SearchRequest(query) => { Msg::SearchRequest {
query,
page,
results_per_page,
} => {
info!("searching for '{query}' pg {page} # / pg {results_per_page}");
model.query = query.clone(); model.query = query.clone();
let url = Url::new().set_hash_path(["s", &query]); orders.skip().perform_cmd(async move {
orders.request_url(url); Msg::SearchResult(search_request(&query, page, results_per_page).await)
orders });
.skip()
.perform_cmd(async move { Msg::SearchResult(search_request(&query).await) });
} }
Msg::SearchResult(Ok(response_data)) => { Msg::SearchResult(Ok(response_data)) => {
debug!("fetch ok {:#?}", response_data); debug!("fetch ok {:#?}", response_data);
@@ -126,8 +178,6 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
Msg::ShowRequest(tid) => { Msg::ShowRequest(tid) => {
let url = Url::new().set_hash_path(["t", &tid]);
orders.request_url(url);
orders orders
.skip() .skip()
.perform_cmd(async move { Msg::ShowResult(show_request(&tid).await) }); .perform_cmd(async move { Msg::ShowResult(show_request(&tid).await) });
@@ -139,26 +189,46 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::ShowResult(Err(fetch_error)) => { Msg::ShowResult(Err(fetch_error)) => {
error!("fetch failed {:?}", fetch_error); error!("fetch failed {:?}", fetch_error);
} }
Msg::NextPage => {
Msg::ShowPrettyRequest(tid) => { match &model.context {
let url = Url::new().set_hash_path(["t", &tid]); Context::Search(sr) => {
orders.request_url(url); orders.request_url(urls::search(&sr.query, sr.page + 1));
orders
.skip()
.perform_cmd(async move { Msg::ShowPrettyResult(show_pretty_request(&tid).await) });
} }
Msg::ShowPrettyResult(Ok(response_data)) => { Context::Thread(_) => (), // do nothing (yet?)
debug!("fetch ok {:#?}", response_data); Context::None => (), // do nothing (yet?)
model.context = Context::Thread(response_data); };
} }
Msg::ShowPrettyResult(Err(fetch_error)) => { Msg::PreviousPage => {
error!("fetch failed {:?}", fetch_error); match &model.context {
Context::Search(sr) => {
orders.request_url(urls::search(&sr.query, sr.page.saturating_sub(1)));
}
Context::Thread(_) => (), // do nothing (yet?)
Context::None => (), // do nothing (yet?)
};
} }
} }
} }
async fn search_request(query: &str) -> fetch::Result<SearchSummary> { pub async fn show_request(tid: &str) -> fetch::Result<Vec<shared::Message>> {
Request::new(api::search(query)) let b = Request::new(api::show(tid))
.method(Method::Get)
.fetch()
.await?
.check_status()?
.bytes()
.await?;
let mut deserializer = serde_json::Deserializer::from_slice(&b);
deserializer.disable_recursion_limit();
Ok(Vec::<shared::Message>::deserialize(&mut deserializer)
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
}
async fn search_request(
query: &str,
page: usize,
results_per_page: usize,
) -> fetch::Result<shared::SearchResult> {
Request::new(api::search(query, page, results_per_page))
.method(Method::Get) .method(Method::Get)
.fetch() .fetch()
.await? .await?
@@ -167,25 +237,6 @@ async fn search_request(query: &str) -> fetch::Result<SearchSummary> {
.await .await
} }
mod api {
const BASE_URL: &str = "/api";
pub fn refresh() -> String {
format!("{BASE_URL}/refresh")
}
pub fn search(query: &str) -> String {
format!("{BASE_URL}/search/{query}")
}
pub fn show(tid: &str) -> String {
format!("{BASE_URL}/show/{tid}")
}
pub fn show_pretty(tid: &str) -> String {
format!("{BASE_URL}/show/{tid}/pretty")
}
pub fn original(message_id: &str) -> String {
format!("{BASE_URL}/original/{message_id}")
}
}
async fn refresh_request() -> fetch::Result<()> { async fn refresh_request() -> fetch::Result<()> {
let t = Request::new(api::refresh()) let t = Request::new(api::refresh())
.method(Method::Get) .method(Method::Get)
@@ -198,170 +249,37 @@ async fn refresh_request() -> fetch::Result<()> {
Ok(()) Ok(())
} }
async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
let b = Request::new(api::show(tid))
.method(Method::Get)
.fetch()
.await?
.check_status()?
.bytes()
.await?;
let mut deserializer = serde_json::Deserializer::from_slice(&b);
deserializer.disable_recursion_limit();
Ok(ThreadSet::deserialize(&mut deserializer)
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
}
async fn show_pretty_request(tid: &str) -> fetch::Result<ThreadSet> {
Request::new(api::show_pretty(tid))
.method(Method::Get)
.fetch()
.await?
.check_status()?
.json()
.await
}
// ------ ------ // ------ ------
// View // View
// ------ ------ // ------ ------
// <subject>
// <tags>
//
// <from1> <date>
// <to1>
// <content1>
// <zippy>
// <children1>
// </zippy>
//
// <from2> <date>
// <to2>
// <body2>
fn view_message(thread: &ThreadNode) -> Node<Msg> {
let message = thread.0.as_ref().expect("ThreadNode missing Message");
let children = &thread.1;
div![
C!["message"],
/* TODO(wathiede): collect all the tags and show them here. */
/* TODO(wathiede): collect all the attachments from all the subparts */
div![C!["header"], "From: ", &message.headers.from],
div![C!["header"], "Date: ", &message.headers.date],
div![C!["header"], "To: ", &message.headers.to],
hr![],
div![
C!["body"],
match &message.body {
Some(body) => view_body(body.as_slice()),
None => div!["<no body>"],
},
],
children.iter().map(view_message)
]
}
fn view_body(body: &[Part]) -> Node<Msg> {
div![body.iter().map(view_part)]
}
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
match &content {
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
_ => div![
C!["error"],
format!("Unhandled content enum for text/plain"),
],
}
}
fn view_part(part: &Part) -> Node<Msg> {
match part.content_type.as_str() {
"text/plain" => view_text_plain(&part.content),
"text/html" => {
if let Some(Content::String(html)) = &part.content {
let inlined = css_inline::inline(html).expect("failed to inline CSS");
return div![C!["view-part-text-html"], div!["TEST"], raw![&inlined]];
} else {
div![
C!["error"],
format!("Unhandled content enum for multipart/mixed"),
]
}
}
// https://en.wikipedia.org/wiki/MIME#alternative
// RFC1341 states: In general, user agents that compose multipart/alternative entities
// should place the body parts in increasing order of preference, that is, with the
// preferred format last.
"multipart/alternative" => {
if let Some(Content::Multipart(parts)) = &part.content {
for part in parts.iter().rev() {
if part.content_type == "text/html" {
if let Some(Content::String(html)) = &part.content {
let inliner = css_inline::CSSInliner::options()
.load_remote_stylesheets(false)
.remove_style_tags(true)
.build();
let inlined = inliner.inline(html).expect("failed to inline CSS");
return div![Node::from_html(None, &inlined)];
}
}
if part.content_type == "text/plain" {
return view_text_plain(&part.content);
}
}
div!["No known multipart/alternative parts"]
} else {
div![
C!["error"],
format!("multipart/alternative with non-multipart content"),
]
}
}
"multipart/mixed" => match &part.content {
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
_ => div![
C!["error"],
format!("Unhandled content enum for multipart/mixed"),
],
},
_ => div![
C!["error"],
format!("Unhandled content type: {}", part.content_type)
],
}
}
fn first_subject(thread: &ThreadNode) -> Option<String> {
if let Some(msg) = &thread.0 {
return Some(msg.headers.subject.clone());
} else {
for tn in &thread.1 {
if let Some(s) = first_subject(&tn) {
return Some(s);
}
}
}
None
}
fn set_title(title: &str) { fn set_title(title: &str) {
seed::document().set_title(&format!("lb: {}", title)); seed::document().set_title(&format!("lb: {}", title));
} }
fn tags_chiclet(tags: &[String]) -> impl Iterator<Item = Node<Msg>> + '_ { fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
tags.iter().map(|tag| { tags.iter().map(move |tag| {
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
tag.hash(&mut hasher); tag.hash(&mut hasher);
let hex = format!("#{:06x}", hasher.finish() % (1 << 24)); let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
let style = style! {St::BackgroundColor=>hex}; let style = style! {St::BackgroundColor=>hex};
let classes = C!["tag", IF!(is_mobile => "is-small")];
let tag = tag.clone();
a![
attrs! {
At::Href => urls::search(&format!("tag:{tag}"), 0)
},
match tag.as_str() { match tag.as_str() {
"attachment" => span![C!["tag"], style, "📎"], "attachment" => span![classes, style, "📎"],
"replied" => span![C!["tag"], style, i![C!["fa-solid", "fa-reply"]]], "replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
_ => span![C!["tag"], style, tag], _ => span![classes, style, &tag],
} },
ev(Ev::Click, move |_| Msg::SearchRequest {
query: format!("tag:{tag}"),
page: 0,
results_per_page: SEARCH_RESULTS_PER_PAGE,
})
]
}) })
} }
@@ -390,13 +308,14 @@ fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
) )
} }
fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg> { fn view_mobile_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
if query.is_empty() { if query.is_empty() {
set_title("all mail"); set_title("all mail");
} else { } else {
set_title(query); set_title(query);
} }
let rows = search_results.0.iter().map(|r| { let summaries = &search_results.summary.0;
let rows = summaries.iter().map(|r| {
/* /*
let tid = r.thread.clone(); let tid = r.thread.clone();
tr![ tr![
@@ -407,29 +326,41 @@ fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> No
], ],
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject], td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
td![C!["date"], &r.date_relative], td![C!["date"], &r.date_relative],
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)), ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
] ]
*/ */
let tid = r.thread.clone();
div![ div![
p![C!["subject"], &r.subject], div![
C!["subject"],
&r.subject,
ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
],
div![ div![
span![C!["from"], pretty_authors(&r.authors)], span![C!["from"], pretty_authors(&r.authors)],
span![C!["tags"], tags_chiclet(&r.tags)], span![C!["tags"], tags_chiclet(&r.tags, true)],
], ],
span![C!["date"], &r.date_relative], span![C!["date"], &r.date_relative],
hr![], hr![],
] ]
}); });
div![h1!["Search results"], rows] let first = search_results.page * search_results.results_per_page;
div![
h1!["Search results"],
view_search_pager(first, summaries.len(), search_results.total),
rows,
view_search_pager(first, summaries.len(), search_results.total)
]
} }
fn view_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg> { fn view_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
if query.is_empty() { if query.is_empty() {
set_title("all mail"); set_title("all mail");
} else { } else {
set_title(query); set_title(query);
} }
let rows = search_results.0.iter().map(|r| { let summaries = &search_results.summary.0;
let rows = summaries.iter().map(|r| {
let tid = r.thread.clone(); let tid = r.thread.clone();
tr![ tr![
td![ td![
@@ -437,12 +368,25 @@ fn view_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg>
pretty_authors(&r.authors), pretty_authors(&r.authors),
IF!(r.total>1 => small![" ", r.total.to_string()]), IF!(r.total>1 => small![" ", r.total.to_string()]),
], ],
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject], td![
td![C!["date"], &r.date_relative], C!["subject"],
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)), tags_chiclet(&r.tags, false),
" ",
a![
C!["has-text-light"],
attrs! {
At::Href => urls::thread(&tid)
},
&r.subject,
]
],
td![C!["date"], &r.date_relative]
] ]
}); });
div![table![ let first = search_results.page * search_results.results_per_page;
div![
view_search_pager(first, summaries.len(), search_results.total),
table![
C![ C![
"table", "table",
"index", "index",
@@ -457,55 +401,36 @@ fn view_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg>
th![C!["date"], "Date"] th![C!["date"], "Date"]
]], ]],
tbody![rows] tbody![rows]
]]
}
fn view_thread(thread_set: &ThreadSet) -> Node<Msg> {
assert_eq!(thread_set.0.len(), 1);
let thread = &thread_set.0[0];
assert_eq!(thread.0.len(), 1);
let thread_node = &thread.0[0];
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
set_title(&subject);
div![
h1![subject],
a![
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
"Original"
], ],
view_message(&thread_node), view_search_pager(first, summaries.len(), search_results.total)
div![
C!["debug"],
"Add zippy for debug dump",
view_debug_thread_set(thread_set)
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
] ]
} }
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> { fn view_search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
ul![thread_set let is_first = start <= 0;
.0 let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
.iter() nav![
.enumerate() C!["pagination"],
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })] a![
} C![
fn view_debug_thread(thread: &Thread) -> Node<Msg> { "pagination-previous",
ul![thread "button",
.0 IF!(is_first => "is-static"),
.iter() IF!(is_first => "is-info"),
.enumerate() ],
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })] "<",
} ev(Ev::Click, |_| Msg::PreviousPage)
],
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> { a![
C!["pagination-next", "button", IF!(is_last => "is-static")],
IF!(is_last => attrs!{ At::Disabled=>true }),
">",
ev(Ev::Click, |_| Msg::NextPage)
],
ul![ ul![
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]), C!["pagination-list"],
thread_node.1.iter().enumerate().map(|(i, tn)| li![ li![format!("{} - {} of {}", start, start + count, total)],
"tn", ],
i,
": ",
view_debug_thread_node(tn)
])
] ]
} }
@@ -517,7 +442,7 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
} else { } else {
false false
}; };
let query = query.to_string(); let query = Url::decode_uri_component(query).unwrap_or("".to_string());
nav![ nav![
C!["navbar"], C!["navbar"],
attrs! {At::Role=>"navigation"}, attrs! {At::Role=>"navigation"},
@@ -535,13 +460,17 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
], ],
a![ a![
C!["navbar-item", "button"], C!["navbar-item", "button"],
attrs! {
At::Href => urls::search("is:unread", 0)
},
"Unread", "Unread",
ev(Ev::Click, |_| Msg::SearchRequest("is:unread".to_string())),
], ],
a![ a![
C!["navbar-item", "button"], C!["navbar-item", "button"],
attrs! {
At::Href => urls::search("", 0)
},
"All", "All",
ev(Ev::Click, |_| Msg::SearchRequest("".to_string())),
], ],
input![ input![
C!["navbar-item", "input"], C!["navbar-item", "input"],
@@ -550,10 +479,18 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
At::AutoFocus => true.as_at_value(); At::AutoFocus => true.as_at_value();
At::Value => query, At::Value => query,
}, },
input_ev(Ev::Input, Msg::SearchRequest), input_ev(Ev::Input, |q| Msg::SearchRequest {
query: Url::encode_uri_component(q),
page: 0,
results_per_page: SEARCH_RESULTS_PER_PAGE,
}),
// Resend search on enter. // Resend search on enter.
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d { keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
Msg::SearchRequest(query) Msg::SearchRequest {
query: Url::encode_uri_component(query),
page: 0,
results_per_page: SEARCH_RESULTS_PER_PAGE,
}
} else { } else {
Msg::Noop Msg::Noop
}), }),
@@ -572,6 +509,13 @@ fn view_footer(render_time_ms: u128) -> Node<Msg> {
] ]
} }
fn view_thread(messages: &[shared::Message]) -> Node<Msg> {
div![
"MESSAGES GO HERE",
ol![messages.iter().map(|msg| li![format!("{:?}", msg)])]
]
}
fn view_desktop(model: &Model) -> Node<Msg> { fn view_desktop(model: &Model) -> Node<Msg> {
let content = match &model.context { let content = match &model.context {
Context::None => div![h1!["Loading"]], Context::None => div![h1!["Loading"]],
@@ -592,7 +536,7 @@ fn view_mobile(model: &Model) -> Node<Msg> {
}; };
div![ div![
view_header(&model.query, &model.refreshing_state), view_header(&model.query, &model.refreshing_state),
section![C!["section"], div![C!["container"], content],] section![C!["section"], div![C!["content"], content],]
] ]
} }

193
web/src/nm.rs Normal file
View File

@@ -0,0 +1,193 @@
use notmuch::{Content, Part, Thread, ThreadNode, ThreadSet};
use seed::{prelude::*, *};
use serde::de::Deserialize;
use crate::{api, set_title, Msg};
pub async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
let b = Request::new(api::show(tid))
.method(Method::Get)
.fetch()
.await?
.check_status()?
.bytes()
.await?;
let mut deserializer = serde_json::Deserializer::from_slice(&b);
deserializer.disable_recursion_limit();
Ok(ThreadSet::deserialize(&mut deserializer)
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
}
pub fn view_thread(thread_set: &ThreadSet) -> Node<Msg> {
assert_eq!(thread_set.0.len(), 1);
let thread = &thread_set.0[0];
assert_eq!(thread.0.len(), 1);
let thread_node = &thread.0[0];
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
set_title(&subject);
div![
h1![subject],
a![
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
"Original"
],
view_message(&thread_node),
div![
C!["debug"],
"Add zippy for debug dump",
view_debug_thread_set(thread_set)
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
]
}
// <subject>
// <tags>
//
// <from1> <date>
// <to1>
// <content1>
// <zippy>
// <children1>
// </zippy>
//
// <from2> <date>
// <to2>
// <body2>
fn view_message(thread: &ThreadNode) -> Node<Msg> {
let message = thread.0.as_ref().expect("ThreadNode missing Message");
let children = &thread.1;
div![
C!["message"],
/* TODO(wathiede): collect all the tags and show them here. */
/* TODO(wathiede): collect all the attachments from all the subparts */
div![C!["header"], "From: ", &message.headers.from],
div![C!["header"], "Date: ", &message.headers.date],
div![C!["header"], "To: ", &message.headers.to],
hr![],
div![
C!["body"],
match &message.body {
Some(body) => view_body(body.as_slice()),
None => div!["<no body>"],
},
],
children.iter().map(view_message)
]
}
fn view_body(body: &[Part]) -> Node<Msg> {
div![body.iter().map(view_part)]
}
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
match &content {
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
_ => div![
C!["error"],
format!("Unhandled content enum for text/plain"),
],
}
}
fn view_part(part: &Part) -> Node<Msg> {
match part.content_type.as_str() {
"text/plain" => view_text_plain(&part.content),
"text/html" => {
if let Some(Content::String(html)) = &part.content {
let inliner = css_inline::CSSInliner::options()
.load_remote_stylesheets(false)
.remove_style_tags(true)
.build();
let inlined = inliner.inline(html).expect("failed to inline CSS");
return div![C!["view-part-text-html"], div!["TEST"], raw![&inlined]];
} else {
div![
C!["error"],
format!("Unhandled content enum for multipart/mixed"),
]
}
}
// https://en.wikipedia.org/wiki/MIME#alternative
// RFC1341 states: In general, user agents that compose multipart/alternative entities
// should place the body parts in increasing order of preference, that is, with the
// preferred format last.
"multipart/alternative" => {
if let Some(Content::Multipart(parts)) = &part.content {
for part in parts.iter().rev() {
if part.content_type == "text/html" {
if let Some(Content::String(html)) = &part.content {
let inliner = css_inline::CSSInliner::options()
.load_remote_stylesheets(false)
.remove_style_tags(true)
.build();
let inlined = inliner.inline(html).expect("failed to inline CSS");
return div![Node::from_html(None, &inlined)];
}
}
if part.content_type == "text/plain" {
return view_text_plain(&part.content);
}
}
div!["No known multipart/alternative parts"]
} else {
div![
C!["error"],
format!("multipart/alternative with non-multipart content"),
]
}
}
"multipart/mixed" => match &part.content {
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
_ => div![
C!["error"],
format!("Unhandled content enum for multipart/mixed"),
],
},
_ => div![
C!["error"],
format!("Unhandled content type: {}", part.content_type)
],
}
}
fn first_subject(thread: &ThreadNode) -> Option<String> {
if let Some(msg) = &thread.0 {
return Some(msg.headers.subject.clone());
} else {
for tn in &thread.1 {
if let Some(s) = first_subject(&tn) {
return Some(s);
}
}
}
None
}
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> {
ul![thread_set
.0
.iter()
.enumerate()
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })]
}
fn view_debug_thread(thread: &Thread) -> Node<Msg> {
ul![thread
.0
.iter()
.enumerate()
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })]
}
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> {
ul![
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]),
thread_node.1.iter().enumerate().map(|(i, tn)| li![
"tn",
i,
": ",
view_debug_thread_node(tn)
])
]
}