Compare commits
18 Commits
e7a0e5b662
...
server-sid
| Author | SHA1 | Date | |
|---|---|---|---|
| 39bef1ea87 | |||
| 458bd356dd | |||
| 8420e4b4d3 | |||
| 8b04bd8059 | |||
| fc83d56c0c | |||
| f8e86dc5cc | |||
| 72622032ad | |||
| ec1a12ca11 | |||
| f5f4d666d5 | |||
| 7bfef154d9 | |||
| fbe7dade54 | |||
| 321eca38e2 | |||
| 1a86204561 | |||
| fd721c53d8 | |||
| 4390d24492 | |||
| cb8b00f8d1 | |||
| eba362a7f2 | |||
| f16860dd09 |
1644
Cargo.lock
generated
1644
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"web",
|
||||
"server",
|
||||
"notmuch",
|
||||
"procmail2notmuch",
|
||||
"shared"
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
|
||||
5
dev.sh
5
dev.sh
@@ -1,6 +1,7 @@
|
||||
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
|
||||
tmux new-session -d -s letterbox-dev
|
||||
tmux rename-window web
|
||||
tmux send-keys "cd web; trunk serve --release --address 0.0.0.0 --port 6758 --proxy-backend http://localhost:9345/ --proxy-rewrite=/api/" C-m
|
||||
tmux send-keys "cd web; trunk serve --release --address 0.0.0.0 --port 6758 --proxy-backend http://localhost:9345/ --proxy-rewrite=/api/ -w ../shared -w ../notmuch -w ./" C-m
|
||||
tmux new-window -n server
|
||||
tmux send-keys "cd server; cargo watch -x run" C-m
|
||||
tmux send-keys "cd server; cargo watch -x run -w ../shared -w ../notmuch -w ./" C-m
|
||||
tmux attach -d -t letterbox-dev
|
||||
|
||||
@@ -478,8 +478,19 @@ impl Notmuch {
|
||||
self.run_notmuch(std::iter::empty::<&str>())
|
||||
}
|
||||
|
||||
pub fn search(&self, query: &str) -> Result<SearchSummary, NotmuchError> {
|
||||
let res = self.run_notmuch(["search", "--format=json", "--limit=20", query])?;
|
||||
pub fn search(
|
||||
&self,
|
||||
query: &str,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
) -> Result<SearchSummary, NotmuchError> {
|
||||
let res = self.run_notmuch([
|
||||
"search",
|
||||
"--format=json",
|
||||
&format!("--offset={offset}"),
|
||||
&format!("--limit={limit}"),
|
||||
query,
|
||||
])?;
|
||||
Ok(serde_json::from_slice(&res)?)
|
||||
}
|
||||
|
||||
@@ -597,7 +608,7 @@ mod tests {
|
||||
fn search() -> Result<(), NotmuchError> {
|
||||
let nm = Notmuch::with_config("testdata/notmuch.config");
|
||||
nm.new()?;
|
||||
let res = nm.search("goof")?;
|
||||
let res = nm.search("goof", 0, 100)?;
|
||||
assert_eq!(res.0.len(), 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ enum MatchType {
|
||||
From,
|
||||
Sender,
|
||||
To,
|
||||
Cc,
|
||||
Subject,
|
||||
List,
|
||||
DeliveredTo,
|
||||
@@ -81,6 +82,11 @@ impl FromStr for Match {
|
||||
match_type: MatchType::From,
|
||||
needle: cleanup_match(FROM, needle),
|
||||
});
|
||||
} else if needle.starts_with(CC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Cc,
|
||||
needle: cleanup_match(CC, needle),
|
||||
});
|
||||
} else if needle.starts_with(TOCC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::To,
|
||||
@@ -88,7 +94,7 @@ impl FromStr for Match {
|
||||
});
|
||||
} else if needle.starts_with(SENDER) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::From,
|
||||
match_type: MatchType::Sender,
|
||||
needle: cleanup_match(SENDER, needle),
|
||||
});
|
||||
} else if needle.starts_with(SUBJECT) {
|
||||
@@ -140,7 +146,6 @@ impl FromStr for Match {
|
||||
needle: cleanup_match("", &needle),
|
||||
});
|
||||
}
|
||||
Ok(Match::default())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -155,29 +160,35 @@ fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()>
|
||||
eprintln!("rule has unknown match {:?}", r);
|
||||
continue;
|
||||
}
|
||||
|
||||
let rule = match m.match_type {
|
||||
MatchType::From => "from:",
|
||||
// TODO(wathiede): something more specific?
|
||||
MatchType::Sender => "from:",
|
||||
MatchType::To => "to:",
|
||||
MatchType::Cc => "to:",
|
||||
MatchType::Subject => "subject:",
|
||||
MatchType::List => "List-ID:",
|
||||
MatchType::Body => "",
|
||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||
// index. Handle them later.
|
||||
MatchType::DeliveredTo
|
||||
| MatchType::XForwardedTo
|
||||
| MatchType::ReplyTo
|
||||
| MatchType::XOriginalTo
|
||||
| MatchType::XSpam => continue,
|
||||
MatchType::Unknown => unreachable!(),
|
||||
};
|
||||
// Preserve unread status if run with --remove-all
|
||||
lines.push(format!(
|
||||
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
lines.push(format!(
|
||||
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
||||
// `tag:unprocessed` to all new mail.
|
||||
"-unprocessed +{} -- tag:unprocessed {}{}",
|
||||
t,
|
||||
match m.match_type {
|
||||
MatchType::From => "from:",
|
||||
// TODO(wathiede): something more specific?
|
||||
MatchType::Sender => "from:",
|
||||
MatchType::To => "to:",
|
||||
MatchType::Subject => "subject:",
|
||||
MatchType::List => "List-ID:",
|
||||
MatchType::Body => "",
|
||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||
// index. Handle them later.
|
||||
MatchType::DeliveredTo
|
||||
| MatchType::XForwardedTo
|
||||
| MatchType::ReplyTo
|
||||
| MatchType::XOriginalTo
|
||||
| MatchType::XSpam => continue,
|
||||
MatchType::Unknown => unreachable!(),
|
||||
},
|
||||
m.needle
|
||||
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,11 +9,13 @@ edition = "2021"
|
||||
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
|
||||
rocket_cors = { git = "https://github.com/lawliet89/rocket_cors", branch = "master" }
|
||||
notmuch = { path = "../notmuch" }
|
||||
shared = { path = "../shared" }
|
||||
serde_json = "1.0.87"
|
||||
thiserror = "1.0.37"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
log = "0.4.17"
|
||||
tokio = "1.26.0"
|
||||
glog = "0.1.0"
|
||||
|
||||
[dependencies.rocket_contrib]
|
||||
version = "0.4.11"
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
mod error;
|
||||
mod nm;
|
||||
|
||||
use std::{error::Error, io::Cursor, str::FromStr};
|
||||
|
||||
use notmuch::{Notmuch, NotmuchError, SearchSummary, ThreadSet};
|
||||
use glog::Flags;
|
||||
use notmuch::{Notmuch, NotmuchError};
|
||||
use rocket::{
|
||||
http::{ContentType, Header},
|
||||
request::Request,
|
||||
@@ -13,6 +16,8 @@ use rocket::{
|
||||
};
|
||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||
|
||||
use crate::error::ServerError;
|
||||
|
||||
#[get("/")]
|
||||
fn hello() -> &'static str {
|
||||
"Hello, world!"
|
||||
@@ -22,32 +27,40 @@ fn hello() -> &'static str {
|
||||
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
||||
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
||||
}
|
||||
|
||||
#[get("/search")]
|
||||
async fn search_all(nm: &State<Notmuch>) -> Result<Json<SearchSummary>, Debug<NotmuchError>> {
|
||||
search(nm, "*").await
|
||||
async fn search_all(
|
||||
nm: &State<Notmuch>,
|
||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
||||
search(nm, "*", None, None).await
|
||||
}
|
||||
|
||||
#[get("/search/<query>")]
|
||||
#[get("/search/<query>?<page>&<results_per_page>")]
|
||||
async fn search(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
) -> Result<Json<SearchSummary>, Debug<NotmuchError>> {
|
||||
let res = nm.search(query)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
#[get("/show/<query>/pretty")]
|
||||
async fn show_pretty(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
) -> Result<Json<ThreadSet>, Debug<NotmuchError>> {
|
||||
let res = nm.show(query)?;
|
||||
page: Option<usize>,
|
||||
results_per_page: Option<usize>,
|
||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
||||
let page = page.unwrap_or(0);
|
||||
let results_per_page = results_per_page.unwrap_or(10);
|
||||
info!(" search '{query}'");
|
||||
let res = shared::SearchResult {
|
||||
summary: nm.search(query, page * results_per_page, results_per_page)?,
|
||||
query: query.to_string(),
|
||||
page,
|
||||
results_per_page,
|
||||
total: nm.count(query)?,
|
||||
};
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
#[get("/show/<query>")]
|
||||
async fn show(nm: &State<Notmuch>, query: &str) -> Result<Json<ThreadSet>, Debug<NotmuchError>> {
|
||||
let res = nm.show(query)?;
|
||||
async fn show(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
) -> Result<Json<Vec<shared::Message>>, Debug<ServerError>> {
|
||||
let res = nm::threadset_to_messages(nm.show(query).map_err(|e| -> ServerError { e.into() })?)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
@@ -107,6 +120,14 @@ async fn original(
|
||||
|
||||
#[rocket::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
glog::new()
|
||||
.init(Flags {
|
||||
colorlogtostderr: true,
|
||||
//alsologtostderr: true, // use logtostderr to only write to stderr and not to files
|
||||
logtostderr: true,
|
||||
..Default::default()
|
||||
})
|
||||
.unwrap();
|
||||
let allowed_origins = AllowedOrigins::all();
|
||||
let cors = rocket_cors::CorsOptions {
|
||||
allowed_origins,
|
||||
@@ -130,7 +151,6 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
||||
refresh,
|
||||
search_all,
|
||||
search,
|
||||
show_pretty,
|
||||
show
|
||||
],
|
||||
)
|
||||
|
||||
10
shared/Cargo.toml
Normal file
10
shared/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "shared"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
notmuch = { path = "../notmuch" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
35
shared/src/lib.rs
Normal file
35
shared/src/lib.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use notmuch::SearchSummary;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SearchResult {
|
||||
pub summary: SearchSummary,
|
||||
pub query: String,
|
||||
pub page: usize,
|
||||
pub results_per_page: usize,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct ShowResult {
|
||||
messages: Vec<Message>,
|
||||
}
|
||||
|
||||
pub type AttachementId = String;
|
||||
|
||||
/// # Number of seconds since the Epoch
|
||||
pub type UnixTime = isize;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
pub struct Message {
|
||||
pub from: String,
|
||||
pub to: Option<String>,
|
||||
pub cc: Option<String>,
|
||||
pub timestamp: UnixTime, // date header as unix time
|
||||
pub date_relative: String, // user-friendly timestamp
|
||||
pub tags: Vec<String>,
|
||||
|
||||
// HTML formatted body
|
||||
pub body: String,
|
||||
pub attachment: Vec<AttachementId>,
|
||||
}
|
||||
@@ -22,6 +22,7 @@ seed = "0.9.2"
|
||||
console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"}
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
notmuch = {path = "../notmuch"}
|
||||
shared = {path = "../shared"}
|
||||
itertools = "0.10.5"
|
||||
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||
wasm-timer = "0.2.5"
|
||||
|
||||
@@ -37,6 +37,7 @@ iframe {
|
||||
}
|
||||
.footer {
|
||||
background-color: #eee;
|
||||
color: #222;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
@@ -71,6 +72,12 @@ iframe {
|
||||
padding: 1.5em;
|
||||
}
|
||||
}
|
||||
input, .input {
|
||||
color: #000;
|
||||
}
|
||||
input::placeholder, .input::placeholder{
|
||||
color: #555;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
|
||||
16
web/src/api.rs
Normal file
16
web/src/api.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use seed::Url;
|
||||
|
||||
const BASE_URL: &str = "/api";
|
||||
pub fn refresh() -> String {
|
||||
format!("{BASE_URL}/refresh")
|
||||
}
|
||||
pub fn search(query: &str, page: usize, results_per_page: usize) -> String {
|
||||
let query = Url::encode_uri_component(query);
|
||||
format!("{BASE_URL}/search/{query}?page={page}&results_per_page={results_per_page}")
|
||||
}
|
||||
pub fn show(tid: &str) -> String {
|
||||
format!("{BASE_URL}/show/{tid}")
|
||||
}
|
||||
pub fn original(message_id: &str) -> String {
|
||||
format!("{BASE_URL}/original/{message_id}")
|
||||
}
|
||||
536
web/src/lib.rs
536
web/src/lib.rs
@@ -1,64 +1,99 @@
|
||||
// (Lines like the one below ignore selected Clippy rules
|
||||
// - it's useful when you want to check your code with `cargo make verify`
|
||||
// but some rules are too "annoying" or are not applicable for your case.)
|
||||
#![allow(clippy::wildcard_imports)]
|
||||
mod api;
|
||||
mod nm;
|
||||
|
||||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
|
||||
use itertools::Itertools;
|
||||
use log::{debug, error, info, warn, Level};
|
||||
use notmuch::{Content, Part, SearchSummary, Thread, ThreadNode, ThreadSet};
|
||||
use log::{debug, error, info, Level};
|
||||
use notmuch::ThreadSet;
|
||||
use seed::{prelude::*, *};
|
||||
use serde::de::Deserialize;
|
||||
use serde::Deserialize;
|
||||
use wasm_timer::Instant;
|
||||
|
||||
const SEARCH_RESULTS_PER_PAGE: usize = 20;
|
||||
|
||||
// ------ ------
|
||||
// Init
|
||||
// ------ ------
|
||||
|
||||
// `init` describes what should happen when your app started.
|
||||
fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
warn!("init called");
|
||||
log!(url);
|
||||
let mut url = url.clone();
|
||||
let mut query = "".to_string();
|
||||
let hpp = url.next_hash_path_part();
|
||||
log!(hpp);
|
||||
match hpp {
|
||||
Some("t") => {
|
||||
let tid = url.next_hash_path_part().unwrap_or("").to_string();
|
||||
orders.send_msg(Msg::ShowPrettyRequest(tid));
|
||||
}
|
||||
Some("s") => {
|
||||
query = url.next_hash_path_part().unwrap_or("").to_string();
|
||||
orders.send_msg(Msg::SearchRequest(query.clone()));
|
||||
}
|
||||
p => {
|
||||
log!(p);
|
||||
orders.send_msg(Msg::SearchRequest("".to_string()));
|
||||
}
|
||||
};
|
||||
orders.subscribe(|uc: subs::UrlChanged| {
|
||||
info!("uc {}", uc.0);
|
||||
});
|
||||
orders
|
||||
.subscribe(on_url_changed)
|
||||
.notify(subs::UrlChanged(url.clone()));
|
||||
|
||||
info!("init query '{}'", query);
|
||||
Model {
|
||||
context: Context::None,
|
||||
query,
|
||||
query: "".to_string(),
|
||||
refreshing_state: RefreshingState::None,
|
||||
}
|
||||
}
|
||||
|
||||
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
let mut url = uc.0;
|
||||
info!(
|
||||
"url changed '{}', history {}",
|
||||
url,
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = url.remaining_hash_path_parts();
|
||||
match hpp.as_slice() {
|
||||
["t", tid] => Msg::ShowRequest(tid.to_string()),
|
||||
["s", query] => {
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
["s", query, page] => {
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
let page = page[1..].parse().unwrap_or(0);
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
p => {
|
||||
if !p.is_empty() {
|
||||
info!("Unhandled path '{p:?}'");
|
||||
}
|
||||
Msg::SearchRequest {
|
||||
query: "".to_string(),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod urls {
|
||||
use seed::Url;
|
||||
pub fn search(query: &str, page: usize) -> Url {
|
||||
let query = Url::encode_uri_component(query);
|
||||
if page > 0 {
|
||||
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
|
||||
} else {
|
||||
Url::new().set_hash_path(["s", &query])
|
||||
}
|
||||
}
|
||||
pub fn thread(tid: &str) -> Url {
|
||||
Url::new().set_hash_path(["t", tid])
|
||||
}
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// Model
|
||||
// ------ ------
|
||||
enum Context {
|
||||
None,
|
||||
Search(SearchSummary),
|
||||
Thread(ThreadSet),
|
||||
Search(shared::SearchResult),
|
||||
Thread(Vec<shared::Message>),
|
||||
}
|
||||
|
||||
// `Model` describes our app state.
|
||||
@@ -81,16 +116,20 @@ enum RefreshingState {
|
||||
|
||||
// (Remove the line below once any of your `Msg` variants doesn't implement `Copy`.)
|
||||
// `Msg` describes the different events you can modify state with.
|
||||
enum Msg {
|
||||
pub enum Msg {
|
||||
Noop,
|
||||
RefreshStart,
|
||||
RefreshDone(Option<FetchError>),
|
||||
SearchRequest(String),
|
||||
SearchResult(fetch::Result<SearchSummary>),
|
||||
SearchRequest {
|
||||
query: String,
|
||||
page: usize,
|
||||
results_per_page: usize,
|
||||
},
|
||||
SearchResult(fetch::Result<shared::SearchResult>),
|
||||
ShowRequest(String),
|
||||
ShowResult(fetch::Result<ThreadSet>),
|
||||
ShowPrettyRequest(String),
|
||||
ShowPrettyResult(fetch::Result<ThreadSet>),
|
||||
ShowResult(fetch::Result<Vec<shared::Message>>),
|
||||
NextPage,
|
||||
PreviousPage,
|
||||
}
|
||||
|
||||
// `update` describes how to handle each `Msg`.
|
||||
@@ -105,18 +144,30 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
model.refreshing_state = if let Some(err) = err {
|
||||
RefreshingState::Error(format!("{:?}", err))
|
||||
} else {
|
||||
// If looking at search page, refresh the search to view update on the server side.
|
||||
if let Context::Search(sr) = &model.context {
|
||||
let query = sr.query.clone();
|
||||
let page = sr.page;
|
||||
let results_per_page = sr.results_per_page;
|
||||
orders.perform_cmd(async move {
|
||||
Msg::SearchResult(search_request(&query, page, results_per_page).await)
|
||||
});
|
||||
}
|
||||
|
||||
RefreshingState::None
|
||||
};
|
||||
}
|
||||
|
||||
Msg::SearchRequest(query) => {
|
||||
info!("searching for '{query}'");
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page,
|
||||
results_per_page,
|
||||
} => {
|
||||
info!("searching for '{query}' pg {page} # / pg {results_per_page}");
|
||||
model.query = query.clone();
|
||||
let url = Url::new().set_hash_path(["s", &query]);
|
||||
orders.request_url(url);
|
||||
orders
|
||||
.skip()
|
||||
.perform_cmd(async move { Msg::SearchResult(search_request(&query).await) });
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::SearchResult(search_request(&query, page, results_per_page).await)
|
||||
});
|
||||
}
|
||||
Msg::SearchResult(Ok(response_data)) => {
|
||||
debug!("fetch ok {:#?}", response_data);
|
||||
@@ -127,8 +178,6 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
}
|
||||
|
||||
Msg::ShowRequest(tid) => {
|
||||
let url = Url::new().set_hash_path(["t", &tid]);
|
||||
orders.request_url(url);
|
||||
orders
|
||||
.skip()
|
||||
.perform_cmd(async move { Msg::ShowResult(show_request(&tid).await) });
|
||||
@@ -140,26 +189,46 @@ fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
Msg::ShowResult(Err(fetch_error)) => {
|
||||
error!("fetch failed {:?}", fetch_error);
|
||||
}
|
||||
|
||||
Msg::ShowPrettyRequest(tid) => {
|
||||
let url = Url::new().set_hash_path(["t", &tid]);
|
||||
orders.request_url(url);
|
||||
orders
|
||||
.skip()
|
||||
.perform_cmd(async move { Msg::ShowPrettyResult(show_pretty_request(&tid).await) });
|
||||
Msg::NextPage => {
|
||||
match &model.context {
|
||||
Context::Search(sr) => {
|
||||
orders.request_url(urls::search(&sr.query, sr.page + 1));
|
||||
}
|
||||
Context::Thread(_) => (), // do nothing (yet?)
|
||||
Context::None => (), // do nothing (yet?)
|
||||
};
|
||||
}
|
||||
Msg::ShowPrettyResult(Ok(response_data)) => {
|
||||
debug!("fetch ok {:#?}", response_data);
|
||||
model.context = Context::Thread(response_data);
|
||||
}
|
||||
Msg::ShowPrettyResult(Err(fetch_error)) => {
|
||||
error!("fetch failed {:?}", fetch_error);
|
||||
Msg::PreviousPage => {
|
||||
match &model.context {
|
||||
Context::Search(sr) => {
|
||||
orders.request_url(urls::search(&sr.query, sr.page.saturating_sub(1)));
|
||||
}
|
||||
Context::Thread(_) => (), // do nothing (yet?)
|
||||
Context::None => (), // do nothing (yet?)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_request(query: &str) -> fetch::Result<SearchSummary> {
|
||||
Request::new(api::search(query))
|
||||
pub async fn show_request(tid: &str) -> fetch::Result<Vec<shared::Message>> {
|
||||
let b = Request::new(api::show(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.bytes()
|
||||
.await?;
|
||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
||||
deserializer.disable_recursion_limit();
|
||||
Ok(Vec::<shared::Message>::deserialize(&mut deserializer)
|
||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
||||
}
|
||||
async fn search_request(
|
||||
query: &str,
|
||||
page: usize,
|
||||
results_per_page: usize,
|
||||
) -> fetch::Result<shared::SearchResult> {
|
||||
Request::new(api::search(query, page, results_per_page))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
@@ -168,25 +237,6 @@ async fn search_request(query: &str) -> fetch::Result<SearchSummary> {
|
||||
.await
|
||||
}
|
||||
|
||||
mod api {
|
||||
const BASE_URL: &str = "/api";
|
||||
pub fn refresh() -> String {
|
||||
format!("{BASE_URL}/refresh")
|
||||
}
|
||||
pub fn search(query: &str) -> String {
|
||||
format!("{BASE_URL}/search/{query}")
|
||||
}
|
||||
pub fn show(tid: &str) -> String {
|
||||
format!("{BASE_URL}/show/{tid}")
|
||||
}
|
||||
pub fn show_pretty(tid: &str) -> String {
|
||||
format!("{BASE_URL}/show/{tid}/pretty")
|
||||
}
|
||||
pub fn original(message_id: &str) -> String {
|
||||
format!("{BASE_URL}/original/{message_id}")
|
||||
}
|
||||
}
|
||||
|
||||
async fn refresh_request() -> fetch::Result<()> {
|
||||
let t = Request::new(api::refresh())
|
||||
.method(Method::Get)
|
||||
@@ -199,159 +249,10 @@ async fn refresh_request() -> fetch::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
|
||||
let b = Request::new(api::show(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.bytes()
|
||||
.await?;
|
||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
||||
deserializer.disable_recursion_limit();
|
||||
Ok(ThreadSet::deserialize(&mut deserializer)
|
||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
||||
}
|
||||
|
||||
async fn show_pretty_request(tid: &str) -> fetch::Result<ThreadSet> {
|
||||
Request::new(api::show_pretty(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.json()
|
||||
.await
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// View
|
||||
// ------ ------
|
||||
|
||||
// <subject>
|
||||
// <tags>
|
||||
//
|
||||
// <from1> <date>
|
||||
// <to1>
|
||||
// <content1>
|
||||
// <zippy>
|
||||
// <children1>
|
||||
// </zippy>
|
||||
//
|
||||
// <from2> <date>
|
||||
// <to2>
|
||||
// <body2>
|
||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||
let children = &thread.1;
|
||||
div![
|
||||
C!["message"],
|
||||
/* TODO(wathiede): collect all the tags and show them here. */
|
||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||
div![C!["header"], "From: ", &message.headers.from],
|
||||
div![C!["header"], "Date: ", &message.headers.date],
|
||||
div![C!["header"], "To: ", &message.headers.to],
|
||||
hr![],
|
||||
div![
|
||||
C!["body"],
|
||||
match &message.body {
|
||||
Some(body) => view_body(body.as_slice()),
|
||||
None => div!["<no body>"],
|
||||
},
|
||||
],
|
||||
children.iter().map(view_message)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_body(body: &[Part]) -> Node<Msg> {
|
||||
div![body.iter().map(view_part)]
|
||||
}
|
||||
|
||||
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
|
||||
match &content {
|
||||
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for text/plain"),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn view_part(part: &Part) -> Node<Msg> {
|
||||
match part.content_type.as_str() {
|
||||
"text/plain" => view_text_plain(&part.content),
|
||||
"text/html" => {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
|
||||
return div![C!["view-part-text-html"], div!["TEST"], raw![&inlined]];
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// https://en.wikipedia.org/wiki/MIME#alternative
|
||||
// RFC1341 states: In general, user agents that compose multipart/alternative entities
|
||||
// should place the body parts in increasing order of preference, that is, with the
|
||||
// preferred format last.
|
||||
"multipart/alternative" => {
|
||||
if let Some(Content::Multipart(parts)) = &part.content {
|
||||
for part in parts.iter().rev() {
|
||||
if part.content_type == "text/html" {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
return div![Node::from_html(None, &inlined)];
|
||||
}
|
||||
}
|
||||
if part.content_type == "text/plain" {
|
||||
return view_text_plain(&part.content);
|
||||
}
|
||||
}
|
||||
div!["No known multipart/alternative parts"]
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("multipart/alternative with non-multipart content"),
|
||||
]
|
||||
}
|
||||
}
|
||||
"multipart/mixed" => match &part.content {
|
||||
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
],
|
||||
},
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content type: {}", part.content_type)
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn first_subject(thread: &ThreadNode) -> Option<String> {
|
||||
if let Some(msg) = &thread.0 {
|
||||
return Some(msg.headers.subject.clone());
|
||||
} else {
|
||||
for tn in &thread.1 {
|
||||
if let Some(s) = first_subject(&tn) {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn set_title(title: &str) {
|
||||
seed::document().set_title(&format!("lb: {}", title));
|
||||
}
|
||||
@@ -365,14 +266,19 @@ fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<M
|
||||
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
||||
let tag = tag.clone();
|
||||
a![
|
||||
attrs! {
|
||||
At::Href => urls::search(&format!("tag:{tag}"), 0)
|
||||
},
|
||||
match tag.as_str() {
|
||||
"attachment" => span![classes, style, "📎"],
|
||||
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
||||
_ => span![classes, style, &tag],
|
||||
},
|
||||
ev(Ev::Click, move |_| Msg::SearchRequest(
|
||||
Url::encode_uri_component(format!("tag:{tag}"))
|
||||
)),
|
||||
ev(Ev::Click, move |_| Msg::SearchRequest {
|
||||
query: format!("tag:{tag}"),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
})
|
||||
]
|
||||
})
|
||||
}
|
||||
@@ -402,13 +308,14 @@ fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
|
||||
)
|
||||
}
|
||||
|
||||
fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg> {
|
||||
fn view_mobile_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let rows = search_results.0.iter().map(|r| {
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
/*
|
||||
let tid = r.thread.clone();
|
||||
tr![
|
||||
@@ -419,7 +326,7 @@ fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> No
|
||||
],
|
||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||
td![C!["date"], &r.date_relative],
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||
ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
|
||||
]
|
||||
*/
|
||||
let tid = r.thread.clone();
|
||||
@@ -427,7 +334,7 @@ fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> No
|
||||
div![
|
||||
C!["subject"],
|
||||
&r.subject,
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||
ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
|
||||
],
|
||||
div![
|
||||
span![C!["from"], pretty_authors(&r.authors)],
|
||||
@@ -437,16 +344,23 @@ fn view_mobile_search_results(query: &str, search_results: &SearchSummary) -> No
|
||||
hr![],
|
||||
]
|
||||
});
|
||||
div![h1!["Search results"], rows]
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
h1!["Search results"],
|
||||
view_search_pager(first, summaries.len(), search_results.total),
|
||||
rows,
|
||||
view_search_pager(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg> {
|
||||
fn view_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let rows = search_results.0.iter().map(|r| {
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
tr![
|
||||
td![
|
||||
@@ -458,78 +372,65 @@ fn view_search_results(query: &str, search_results: &SearchSummary) -> Node<Msg>
|
||||
C!["subject"],
|
||||
tags_chiclet(&r.tags, false),
|
||||
" ",
|
||||
span![
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
&r.subject,
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid))
|
||||
]
|
||||
],
|
||||
td![C!["date"], &r.date_relative]
|
||||
]
|
||||
});
|
||||
div![table![
|
||||
C![
|
||||
"table",
|
||||
"index",
|
||||
"is-fullwidth",
|
||||
"is-hoverable",
|
||||
"is-narrow",
|
||||
"is-striped",
|
||||
],
|
||||
thead![tr![
|
||||
th![C!["from"], "From"],
|
||||
th![C!["subject"], "Subject"],
|
||||
th![C!["date"], "Date"]
|
||||
]],
|
||||
tbody![rows]
|
||||
]]
|
||||
}
|
||||
|
||||
fn view_thread(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
assert_eq!(thread_set.0.len(), 1);
|
||||
let thread = &thread_set.0[0];
|
||||
assert_eq!(thread.0.len(), 1);
|
||||
let thread_node = &thread.0[0];
|
||||
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
|
||||
set_title(&subject);
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
h1![subject],
|
||||
a![
|
||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||
"Original"
|
||||
view_search_pager(first, summaries.len(), search_results.total),
|
||||
table![
|
||||
C![
|
||||
"table",
|
||||
"index",
|
||||
"is-fullwidth",
|
||||
"is-hoverable",
|
||||
"is-narrow",
|
||||
"is-striped",
|
||||
],
|
||||
thead![tr![
|
||||
th![C!["from"], "From"],
|
||||
th![C!["subject"], "Subject"],
|
||||
th![C!["date"], "Date"]
|
||||
]],
|
||||
tbody![rows]
|
||||
],
|
||||
view_message(&thread_node),
|
||||
div![
|
||||
C!["debug"],
|
||||
"Add zippy for debug dump",
|
||||
view_debug_thread_set(thread_set)
|
||||
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
|
||||
view_search_pager(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
ul![thread_set
|
||||
.0
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })]
|
||||
}
|
||||
fn view_debug_thread(thread: &Thread) -> Node<Msg> {
|
||||
ul![thread
|
||||
.0
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })]
|
||||
}
|
||||
|
||||
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> {
|
||||
ul![
|
||||
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]),
|
||||
thread_node.1.iter().enumerate().map(|(i, tn)| li![
|
||||
"tn",
|
||||
i,
|
||||
": ",
|
||||
view_debug_thread_node(tn)
|
||||
])
|
||||
fn view_search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||
let is_first = start <= 0;
|
||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
||||
nav![
|
||||
C!["pagination"],
|
||||
a![
|
||||
C![
|
||||
"pagination-previous",
|
||||
"button",
|
||||
IF!(is_first => "is-static"),
|
||||
IF!(is_first => "is-info"),
|
||||
],
|
||||
"<",
|
||||
ev(Ev::Click, |_| Msg::PreviousPage)
|
||||
],
|
||||
a![
|
||||
C!["pagination-next", "button", IF!(is_last => "is-static")],
|
||||
IF!(is_last => attrs!{ At::Disabled=>true }),
|
||||
">",
|
||||
ev(Ev::Click, |_| Msg::NextPage)
|
||||
],
|
||||
ul![
|
||||
C!["pagination-list"],
|
||||
li![format!("{} - {} of {}", start, start + count, total)],
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
@@ -541,7 +442,7 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let query = query.to_string();
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
nav![
|
||||
C!["navbar"],
|
||||
attrs! {At::Role=>"navigation"},
|
||||
@@ -559,13 +460,17 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
||||
],
|
||||
a![
|
||||
C!["navbar-item", "button"],
|
||||
attrs! {
|
||||
At::Href => urls::search("is:unread", 0)
|
||||
},
|
||||
"Unread",
|
||||
ev(Ev::Click, |_| Msg::SearchRequest("is:unread".to_string())),
|
||||
],
|
||||
a![
|
||||
C!["navbar-item", "button"],
|
||||
attrs! {
|
||||
At::Href => urls::search("", 0)
|
||||
},
|
||||
"All",
|
||||
ev(Ev::Click, |_| Msg::SearchRequest("".to_string())),
|
||||
],
|
||||
input![
|
||||
C!["navbar-item", "input"],
|
||||
@@ -574,10 +479,18 @@ fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
||||
At::AutoFocus => true.as_at_value();
|
||||
At::Value => query,
|
||||
},
|
||||
input_ev(Ev::Input, Msg::SearchRequest),
|
||||
input_ev(Ev::Input, |q| Msg::SearchRequest {
|
||||
query: Url::encode_uri_component(q),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}),
|
||||
// Resend search on enter.
|
||||
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
||||
Msg::SearchRequest(query)
|
||||
Msg::SearchRequest {
|
||||
query: Url::encode_uri_component(query),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
} else {
|
||||
Msg::Noop
|
||||
}),
|
||||
@@ -596,6 +509,13 @@ fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
||||
]
|
||||
}
|
||||
|
||||
fn view_thread(messages: &[shared::Message]) -> Node<Msg> {
|
||||
div![
|
||||
"MESSAGES GO HERE",
|
||||
ol![messages.iter().map(|msg| li![format!("{:?}", msg)])]
|
||||
]
|
||||
}
|
||||
|
||||
fn view_desktop(model: &Model) -> Node<Msg> {
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
|
||||
193
web/src/nm.rs
Normal file
193
web/src/nm.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
use notmuch::{Content, Part, Thread, ThreadNode, ThreadSet};
|
||||
use seed::{prelude::*, *};
|
||||
use serde::de::Deserialize;
|
||||
|
||||
use crate::{api, set_title, Msg};
|
||||
|
||||
pub async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
|
||||
let b = Request::new(api::show(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.bytes()
|
||||
.await?;
|
||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
||||
deserializer.disable_recursion_limit();
|
||||
Ok(ThreadSet::deserialize(&mut deserializer)
|
||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
||||
}
|
||||
|
||||
pub fn view_thread(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
assert_eq!(thread_set.0.len(), 1);
|
||||
let thread = &thread_set.0[0];
|
||||
assert_eq!(thread.0.len(), 1);
|
||||
let thread_node = &thread.0[0];
|
||||
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
|
||||
set_title(&subject);
|
||||
div![
|
||||
h1![subject],
|
||||
a![
|
||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||
"Original"
|
||||
],
|
||||
view_message(&thread_node),
|
||||
div![
|
||||
C!["debug"],
|
||||
"Add zippy for debug dump",
|
||||
view_debug_thread_set(thread_set)
|
||||
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
|
||||
]
|
||||
}
|
||||
|
||||
// <subject>
|
||||
// <tags>
|
||||
//
|
||||
// <from1> <date>
|
||||
// <to1>
|
||||
// <content1>
|
||||
// <zippy>
|
||||
// <children1>
|
||||
// </zippy>
|
||||
//
|
||||
// <from2> <date>
|
||||
// <to2>
|
||||
// <body2>
|
||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||
let children = &thread.1;
|
||||
div![
|
||||
C!["message"],
|
||||
/* TODO(wathiede): collect all the tags and show them here. */
|
||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||
div![C!["header"], "From: ", &message.headers.from],
|
||||
div![C!["header"], "Date: ", &message.headers.date],
|
||||
div![C!["header"], "To: ", &message.headers.to],
|
||||
hr![],
|
||||
div![
|
||||
C!["body"],
|
||||
match &message.body {
|
||||
Some(body) => view_body(body.as_slice()),
|
||||
None => div!["<no body>"],
|
||||
},
|
||||
],
|
||||
children.iter().map(view_message)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_body(body: &[Part]) -> Node<Msg> {
|
||||
div![body.iter().map(view_part)]
|
||||
}
|
||||
|
||||
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
|
||||
match &content {
|
||||
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for text/plain"),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn view_part(part: &Part) -> Node<Msg> {
|
||||
match part.content_type.as_str() {
|
||||
"text/plain" => view_text_plain(&part.content),
|
||||
"text/html" => {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
|
||||
return div![C!["view-part-text-html"], div!["TEST"], raw![&inlined]];
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// https://en.wikipedia.org/wiki/MIME#alternative
|
||||
// RFC1341 states: In general, user agents that compose multipart/alternative entities
|
||||
// should place the body parts in increasing order of preference, that is, with the
|
||||
// preferred format last.
|
||||
"multipart/alternative" => {
|
||||
if let Some(Content::Multipart(parts)) = &part.content {
|
||||
for part in parts.iter().rev() {
|
||||
if part.content_type == "text/html" {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
return div![Node::from_html(None, &inlined)];
|
||||
}
|
||||
}
|
||||
if part.content_type == "text/plain" {
|
||||
return view_text_plain(&part.content);
|
||||
}
|
||||
}
|
||||
div!["No known multipart/alternative parts"]
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("multipart/alternative with non-multipart content"),
|
||||
]
|
||||
}
|
||||
}
|
||||
"multipart/mixed" => match &part.content {
|
||||
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
],
|
||||
},
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content type: {}", part.content_type)
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn first_subject(thread: &ThreadNode) -> Option<String> {
|
||||
if let Some(msg) = &thread.0 {
|
||||
return Some(msg.headers.subject.clone());
|
||||
} else {
|
||||
for tn in &thread.1 {
|
||||
if let Some(s) = first_subject(&tn) {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
ul![thread_set
|
||||
.0
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })]
|
||||
}
|
||||
fn view_debug_thread(thread: &Thread) -> Node<Msg> {
|
||||
ul![thread
|
||||
.0
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })]
|
||||
}
|
||||
|
||||
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> {
|
||||
ul![
|
||||
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]),
|
||||
thread_node.1.iter().enumerate().map(|(i, tn)| li![
|
||||
"tn",
|
||||
i,
|
||||
": ",
|
||||
view_debug_thread_node(tn)
|
||||
])
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user