1330 lines
41 KiB
Rust
1330 lines
41 KiB
Rust
// (Lines like the one below ignore selected Clippy rules
|
|
// - it's useful when you want to check your code with `cargo make verify`
|
|
// but some rules are too "annoying" or are not applicable for your case.)
|
|
#![allow(clippy::wildcard_imports)]
|
|
use std::{
|
|
collections::hash_map::DefaultHasher,
|
|
hash::{Hash, Hasher},
|
|
};
|
|
|
|
use chrono::{DateTime, Datelike, Duration, Local, Utc};
|
|
use graphql_client::GraphQLQuery;
|
|
use itertools::Itertools;
|
|
use log::{debug, error, info, Level};
|
|
use notmuch::{Content, Part, Thread, ThreadNode, ThreadSet};
|
|
use seed::{prelude::*, *};
|
|
use serde::de::Deserialize;
|
|
use thiserror::Error;
|
|
use wasm_timer::Instant;
|
|
|
|
use crate::graphql::{front_page_query::*, send_graphql, show_thread_query::*};
|
|
|
|
mod graphql;
|
|
|
|
const SEARCH_RESULTS_PER_PAGE: usize = 20;
|
|
const USE_GRAPHQL: bool = true;
|
|
|
|
#[derive(Error, Debug)]
|
|
enum UIError {
|
|
#[error("No error, this should never be presented to user")]
|
|
NoError,
|
|
#[error("failed to fetch {0}: {1:?}")]
|
|
FetchError(&'static str, FetchError),
|
|
#[error("{0} error decoding: {1:?}")]
|
|
FetchDecodeError(&'static str, Vec<graphql_client::Error>),
|
|
#[error("no data or errors for {0}")]
|
|
NoData(&'static str),
|
|
}
|
|
|
|
// ------ ------
|
|
// Init
|
|
// ------ ------
|
|
|
|
// `init` describes what should happen when your app started.
|
|
fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|
if url.hash().is_none() {
|
|
orders.request_url(urls::search("is:unread", 0));
|
|
} else {
|
|
orders.notify(subs::UrlRequested::new(url));
|
|
};
|
|
orders.subscribe(on_url_changed);
|
|
|
|
Model {
|
|
context: Context::None,
|
|
query: "".to_string(),
|
|
refreshing_state: RefreshingState::None,
|
|
ui_error: UIError::NoError,
|
|
tags: None,
|
|
}
|
|
}
|
|
|
|
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
|
let mut url = uc.0;
|
|
info!(
|
|
"url changed '{}', history {}",
|
|
url,
|
|
history().length().unwrap_or(0)
|
|
);
|
|
let hpp = url.remaining_hash_path_parts();
|
|
match hpp.as_slice() {
|
|
["t", tid] => {
|
|
if USE_GRAPHQL {
|
|
Msg::ShowThreadRequest {
|
|
thread_id: tid.to_string(),
|
|
}
|
|
} else {
|
|
Msg::ShowPrettyRequest(tid.to_string())
|
|
}
|
|
}
|
|
["s", query] => {
|
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
if USE_GRAPHQL {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: None,
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
} else {
|
|
Msg::SearchRequest {
|
|
query,
|
|
page: 0,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
}
|
|
}
|
|
}
|
|
["s", query, page] => {
|
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
let page = page[1..].parse().unwrap_or(0);
|
|
if USE_GRAPHQL {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: Some(page.to_string()),
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
} else {
|
|
Msg::SearchRequest {
|
|
query,
|
|
page,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
}
|
|
}
|
|
}
|
|
p => {
|
|
if !p.is_empty() {
|
|
info!("Unhandled path '{p:?}'");
|
|
}
|
|
if USE_GRAPHQL {
|
|
Msg::FrontPageRequest {
|
|
query: "".to_string(),
|
|
after: None,
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
} else {
|
|
Msg::SearchRequest {
|
|
query: "".to_string(),
|
|
page: 0,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
mod urls {
|
|
use seed::Url;
|
|
pub fn search(query: &str, page: usize) -> Url {
|
|
let query = Url::encode_uri_component(query);
|
|
if page > 0 {
|
|
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
|
|
} else {
|
|
Url::new().set_hash_path(["s", &query])
|
|
}
|
|
}
|
|
pub fn thread(tid: &str) -> Url {
|
|
Url::new().set_hash_path(["t", tid])
|
|
}
|
|
}
|
|
|
|
// ------ ------
|
|
// Model
|
|
// ------ ------
|
|
enum Context {
|
|
None,
|
|
Search(shared::SearchResult),
|
|
SearchResult {
|
|
query: String,
|
|
results: Vec<FrontPageQuerySearchNodes>,
|
|
count: usize,
|
|
pager: FrontPageQuerySearchPageInfo,
|
|
},
|
|
Thread(ThreadSet),
|
|
ThreadResult(ShowThreadQueryThread),
|
|
}
|
|
|
|
// `Model` describes our app state.
|
|
struct Model {
|
|
query: String,
|
|
context: Context,
|
|
refreshing_state: RefreshingState,
|
|
ui_error: UIError,
|
|
tags: Option<Vec<Tag>>,
|
|
}
|
|
|
|
struct Tag {
|
|
name: String,
|
|
bg_color: String,
|
|
fg_color: String,
|
|
}
|
|
|
|
#[derive(Debug, PartialEq)]
|
|
enum RefreshingState {
|
|
None,
|
|
Loading,
|
|
Error(String),
|
|
}
|
|
|
|
// ------ ------
|
|
// Update
|
|
// ------ ------
|
|
|
|
// (Remove the line below once any of your `Msg` variants doesn't implement `Copy`.)
|
|
// `Msg` describes the different events you can modify state with.
|
|
enum Msg {
|
|
Noop,
|
|
// Tell the client to refresh its state
|
|
Reload,
|
|
// Tell the server to update state
|
|
RefreshStart,
|
|
RefreshDone(Option<FetchError>),
|
|
SearchRequest {
|
|
query: String,
|
|
page: usize,
|
|
results_per_page: usize,
|
|
},
|
|
SearchResult(fetch::Result<shared::SearchResult>),
|
|
ShowRequest(String),
|
|
ShowResult(fetch::Result<ThreadSet>),
|
|
ShowPrettyRequest(String),
|
|
ShowPrettyResult(fetch::Result<ThreadSet>),
|
|
NextPage,
|
|
PreviousPage,
|
|
|
|
FrontPageRequest {
|
|
query: String,
|
|
after: Option<String>,
|
|
before: Option<String>,
|
|
first: Option<i64>,
|
|
last: Option<i64>,
|
|
},
|
|
FrontPageResult(
|
|
fetch::Result<graphql_client::Response<graphql::front_page_query::ResponseData>>,
|
|
),
|
|
ShowThreadRequest {
|
|
thread_id: String,
|
|
},
|
|
ShowThreadResult(
|
|
fetch::Result<graphql_client::Response<graphql::show_thread_query::ResponseData>>,
|
|
),
|
|
}
|
|
|
|
// `update` describes how to handle each `Msg`.
|
|
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|
match msg {
|
|
Msg::Noop => {}
|
|
Msg::RefreshStart => {
|
|
model.refreshing_state = RefreshingState::Loading;
|
|
orders.perform_cmd(async move { Msg::RefreshDone(refresh_request().await.err()) });
|
|
}
|
|
Msg::RefreshDone(err) => {
|
|
model.refreshing_state = if let Some(err) = err {
|
|
RefreshingState::Error(format!("{:?}", err))
|
|
} else {
|
|
RefreshingState::None
|
|
};
|
|
orders.perform_cmd(async move { Msg::Reload });
|
|
}
|
|
Msg::Reload => {
|
|
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
|
}
|
|
|
|
Msg::SearchRequest {
|
|
query,
|
|
page,
|
|
results_per_page,
|
|
} => {
|
|
info!("searching for '{query}' pg {page} # / pg {results_per_page}");
|
|
model.query = query.clone();
|
|
orders.skip().perform_cmd(async move {
|
|
Msg::SearchResult(search_request(&query, page, results_per_page).await)
|
|
});
|
|
}
|
|
Msg::SearchResult(Ok(response_data)) => {
|
|
debug!("fetch ok {:#?}", response_data);
|
|
model.context = Context::Search(response_data);
|
|
}
|
|
Msg::SearchResult(Err(fetch_error)) => {
|
|
error!("fetch failed {:?}", fetch_error);
|
|
}
|
|
|
|
Msg::ShowRequest(tid) => {
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::ShowResult(show_request(&tid).await) });
|
|
}
|
|
Msg::ShowResult(Ok(response_data)) => {
|
|
debug!("fetch ok {:#?}", response_data);
|
|
model.context = Context::Thread(response_data);
|
|
}
|
|
Msg::ShowResult(Err(fetch_error)) => {
|
|
error!("fetch failed {:?}", fetch_error);
|
|
}
|
|
|
|
Msg::ShowPrettyRequest(tid) => {
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::ShowPrettyResult(show_pretty_request(&tid).await) });
|
|
}
|
|
Msg::ShowPrettyResult(Ok(response_data)) => {
|
|
debug!("fetch ok {:#?}", response_data);
|
|
model.context = Context::Thread(response_data);
|
|
}
|
|
Msg::ShowPrettyResult(Err(fetch_error)) => {
|
|
error!("fetch failed {:?}", fetch_error);
|
|
}
|
|
Msg::NextPage => {
|
|
match &model.context {
|
|
Context::Search(sr) => {
|
|
orders.request_url(urls::search(&sr.query, sr.page + 1));
|
|
}
|
|
Context::SearchResult { query, pager, .. } => {
|
|
let query = query.to_string();
|
|
let after = pager.end_cursor.clone();
|
|
orders.perform_cmd(async move {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after,
|
|
before: None,
|
|
first: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
|
last: None,
|
|
}
|
|
});
|
|
}
|
|
Context::Thread(_) => (), // do nothing (yet?)
|
|
Context::ThreadResult(_) => (), // do nothing (yet?)
|
|
Context::None => (), // do nothing (yet?)
|
|
};
|
|
}
|
|
Msg::PreviousPage => {
|
|
match &model.context {
|
|
Context::Search(sr) => {
|
|
orders.request_url(urls::search(&sr.query, sr.page.saturating_sub(1)));
|
|
}
|
|
Context::SearchResult { query, pager, .. } => {
|
|
let query = query.to_string();
|
|
let before = pager.start_cursor.clone();
|
|
orders.perform_cmd(async move {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: None,
|
|
before,
|
|
first: None,
|
|
last: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
|
}
|
|
});
|
|
}
|
|
|
|
Context::Thread(_) => (), // do nothing (yet?)
|
|
Context::ThreadResult(_) => (), // do nothing (yet?)
|
|
Context::None => (), // do nothing (yet?)
|
|
};
|
|
}
|
|
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after,
|
|
before,
|
|
first,
|
|
last,
|
|
} => {
|
|
info!("making FrontPageRequest: {query} after:{after:?} before:{before:?} first:{first:?} last:{last:?}");
|
|
model.query = query.clone();
|
|
orders.skip().perform_cmd(async move {
|
|
Msg::FrontPageResult(
|
|
send_graphql(graphql::FrontPageQuery::build_query(
|
|
graphql::front_page_query::Variables {
|
|
query,
|
|
after,
|
|
before,
|
|
first,
|
|
last,
|
|
},
|
|
))
|
|
.await,
|
|
)
|
|
});
|
|
}
|
|
Msg::FrontPageResult(Err(e)) => error!("error FrontPageResult: {e:?}"),
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: None,
|
|
errors: None,
|
|
..
|
|
})) => {
|
|
error!("FrontPageResult no data or errors, should not happen");
|
|
}
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: None,
|
|
errors: Some(e),
|
|
..
|
|
})) => {
|
|
error!("FrontPageResult error: {e:?}");
|
|
}
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: Some(data), ..
|
|
})) => {
|
|
model.tags = Some(
|
|
data.tags
|
|
.into_iter()
|
|
.map(|t| Tag {
|
|
name: t.name,
|
|
bg_color: t.bg_color,
|
|
fg_color: t.fg_color,
|
|
})
|
|
.collect(),
|
|
);
|
|
model.context = Context::SearchResult {
|
|
query: model.query.clone(),
|
|
results: data.search.nodes,
|
|
count: data.count as usize,
|
|
pager: data.search.page_info,
|
|
};
|
|
}
|
|
|
|
Msg::ShowThreadRequest { thread_id } => {
|
|
orders.skip().perform_cmd(async move {
|
|
Msg::ShowThreadResult(
|
|
send_graphql(graphql::ShowThreadQuery::build_query(
|
|
graphql::show_thread_query::Variables { thread_id },
|
|
))
|
|
.await,
|
|
)
|
|
});
|
|
}
|
|
Msg::ShowThreadResult(Ok(graphql_client::Response {
|
|
data: Some(data), ..
|
|
})) => {
|
|
model.tags = Some(
|
|
data.tags
|
|
.into_iter()
|
|
.map(|t| Tag {
|
|
name: t.name,
|
|
bg_color: t.bg_color,
|
|
fg_color: t.fg_color,
|
|
})
|
|
.collect(),
|
|
);
|
|
model.context = Context::ThreadResult(data.thread);
|
|
}
|
|
Msg::ShowThreadResult(bad) => {
|
|
error!("show_thread_query error: {bad:?}");
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn search_request(
|
|
query: &str,
|
|
page: usize,
|
|
results_per_page: usize,
|
|
) -> fetch::Result<shared::SearchResult> {
|
|
Request::new(api::search(query, page, results_per_page))
|
|
.method(Method::Get)
|
|
.fetch()
|
|
.await?
|
|
.check_status()?
|
|
.json()
|
|
.await
|
|
}
|
|
|
|
mod api {
|
|
use seed::Url;
|
|
|
|
const BASE_URL: &str = "/api";
|
|
pub fn refresh() -> String {
|
|
format!("{BASE_URL}/refresh")
|
|
}
|
|
pub fn search(query: &str, page: usize, results_per_page: usize) -> String {
|
|
let query = Url::encode_uri_component(query);
|
|
format!("{BASE_URL}/search/{query}?page={page}&results_per_page={results_per_page}")
|
|
}
|
|
pub fn show(tid: &str) -> String {
|
|
format!("{BASE_URL}/show/{tid}")
|
|
}
|
|
pub fn show_pretty(tid: &str) -> String {
|
|
format!("{BASE_URL}/show/{tid}/pretty")
|
|
}
|
|
pub fn original(message_id: &str) -> String {
|
|
format!("{BASE_URL}/original/{message_id}")
|
|
}
|
|
}
|
|
|
|
async fn refresh_request() -> fetch::Result<()> {
|
|
let t = Request::new(api::refresh())
|
|
.method(Method::Get)
|
|
.fetch()
|
|
.await?
|
|
.check_status()?
|
|
.text()
|
|
.await?;
|
|
info!("refresh {t}");
|
|
Ok(())
|
|
}
|
|
|
|
async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
|
|
let b = Request::new(api::show(tid))
|
|
.method(Method::Get)
|
|
.fetch()
|
|
.await?
|
|
.check_status()?
|
|
.bytes()
|
|
.await?;
|
|
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
|
deserializer.disable_recursion_limit();
|
|
Ok(ThreadSet::deserialize(&mut deserializer)
|
|
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
|
}
|
|
|
|
async fn show_pretty_request(tid: &str) -> fetch::Result<ThreadSet> {
|
|
Request::new(api::show_pretty(tid))
|
|
.method(Method::Get)
|
|
.fetch()
|
|
.await?
|
|
.check_status()?
|
|
.json()
|
|
.await
|
|
}
|
|
|
|
// ------ ------
|
|
// View
|
|
// ------ ------
|
|
|
|
// <subject>
|
|
// <tags>
|
|
//
|
|
// <from1> <date>
|
|
// <to1>
|
|
// <content1>
|
|
// <zippy>
|
|
// <children1>
|
|
// </zippy>
|
|
//
|
|
// <from2> <date>
|
|
// <to2>
|
|
// <body2>
|
|
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
|
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
|
let children = &thread.1;
|
|
div![
|
|
C!["message"],
|
|
/* TODO(wathiede): collect all the tags and show them here. */
|
|
/* TODO(wathiede): collect all the attachments from all the subparts */
|
|
div![C!["header"], "From: ", &message.headers.from],
|
|
div![C!["header"], "Date: ", &message.headers.date],
|
|
div![C!["header"], "To: ", &message.headers.to],
|
|
div![
|
|
C!["body"],
|
|
match &message.body {
|
|
Some(body) => view_body(body.as_slice()),
|
|
None => div!["<no body>"],
|
|
},
|
|
],
|
|
children.iter().map(view_message)
|
|
]
|
|
}
|
|
|
|
fn view_body(body: &[Part]) -> Node<Msg> {
|
|
div![body.iter().map(view_part)]
|
|
}
|
|
|
|
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
|
|
match &content {
|
|
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
|
|
_ => div![
|
|
C!["error"],
|
|
format!("Unhandled content enum for text/plain"),
|
|
],
|
|
}
|
|
}
|
|
|
|
fn view_part(part: &Part) -> Node<Msg> {
|
|
match part.content_type.as_str() {
|
|
"text/plain" => view_text_plain(&part.content),
|
|
"text/html" => {
|
|
if let Some(Content::String(html)) = &part.content {
|
|
let inliner = css_inline::CSSInliner::options()
|
|
.load_remote_stylesheets(false)
|
|
.remove_style_tags(true)
|
|
.build();
|
|
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
|
|
|
return div![C!["view-part-text-html"], raw![&inlined]];
|
|
} else {
|
|
div![
|
|
C!["error"],
|
|
format!("Unhandled content enum for multipart/mixed"),
|
|
]
|
|
}
|
|
}
|
|
|
|
// https://en.wikipedia.org/wiki/MIME#alternative
|
|
// RFC1341 states: In general, user agents that compose multipart/alternative entities
|
|
// should place the body parts in increasing order of preference, that is, with the
|
|
// preferred format last.
|
|
"multipart/alternative" => {
|
|
if let Some(Content::Multipart(parts)) = &part.content {
|
|
for part in parts.iter().rev() {
|
|
if part.content_type == "text/html" {
|
|
if let Some(Content::String(html)) = &part.content {
|
|
let inliner = css_inline::CSSInliner::options()
|
|
.load_remote_stylesheets(false)
|
|
.remove_style_tags(true)
|
|
.build();
|
|
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
|
return div![Node::from_html(None, &inlined)];
|
|
}
|
|
}
|
|
if part.content_type == "text/plain" {
|
|
return view_text_plain(&part.content);
|
|
}
|
|
}
|
|
div!["No known multipart/alternative parts"]
|
|
} else {
|
|
div![
|
|
C!["error"],
|
|
format!("multipart/alternative with non-multipart content"),
|
|
]
|
|
}
|
|
}
|
|
"multipart/mixed" => match &part.content {
|
|
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
|
|
_ => div![
|
|
C!["error"],
|
|
format!("Unhandled content enum for multipart/mixed"),
|
|
],
|
|
},
|
|
_ => div![
|
|
C!["error"],
|
|
format!("Unhandled content type: {}", part.content_type)
|
|
],
|
|
}
|
|
}
|
|
|
|
fn first_subject(thread: &ThreadNode) -> Option<String> {
|
|
if let Some(msg) = &thread.0 {
|
|
return Some(msg.headers.subject.clone());
|
|
} else {
|
|
for tn in &thread.1 {
|
|
if let Some(s) = first_subject(&tn) {
|
|
return Some(s);
|
|
}
|
|
}
|
|
}
|
|
None
|
|
}
|
|
|
|
fn set_title(title: &str) {
|
|
seed::document().set_title(&format!("lb: {}", title));
|
|
}
|
|
|
|
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
|
|
tags.iter().map(move |tag| {
|
|
let mut hasher = DefaultHasher::new();
|
|
tag.hash(&mut hasher);
|
|
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
|
let style = style! {St::BackgroundColor=>hex};
|
|
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
|
let tag = tag.clone();
|
|
a![
|
|
attrs! {
|
|
At::Href => urls::search(&format!("tag:{tag}"), 0)
|
|
},
|
|
match tag.as_str() {
|
|
"attachment" => span![classes, style, "📎"],
|
|
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
|
_ => span![classes, style, &tag],
|
|
},
|
|
ev(Ev::Click, move |_| Msg::SearchRequest {
|
|
query: format!("tag:{tag}"),
|
|
page: 0,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
})
|
|
]
|
|
})
|
|
}
|
|
|
|
fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
|
|
let one_person = authors.matches(',').count() == 0;
|
|
let authors = authors.split(',');
|
|
|
|
Itertools::intersperse(
|
|
authors.filter_map(move |author| {
|
|
if one_person {
|
|
return Some(span![
|
|
attrs! {
|
|
At::Title => author.trim()},
|
|
author
|
|
]);
|
|
}
|
|
author.split_whitespace().nth(0).map(|first| {
|
|
span![
|
|
attrs! {
|
|
At::Title => author.trim()},
|
|
first
|
|
]
|
|
})
|
|
}),
|
|
span![", "],
|
|
)
|
|
}
|
|
|
|
fn human_age(timestamp: i64) -> String {
|
|
let now = Local::now();
|
|
let yesterday = now - Duration::days(1);
|
|
let ts = DateTime::<Utc>::from_timestamp(timestamp, 0)
|
|
.unwrap()
|
|
.with_timezone(&Local);
|
|
let age = now - ts;
|
|
let datetime = if age < Duration::minutes(1) {
|
|
format!("{} min. ago", age.num_seconds())
|
|
} else if age < Duration::hours(1) {
|
|
format!("{} min. ago", age.num_minutes())
|
|
} else if ts.date_naive() == now.date_naive() {
|
|
ts.format("Today %H:%M").to_string()
|
|
} else if ts.date_naive() == yesterday.date_naive() {
|
|
ts.format("Yest. %H:%M").to_string()
|
|
} else if age < Duration::weeks(1) {
|
|
ts.format("%a %H:%M").to_string()
|
|
} else if ts.year() == now.year() {
|
|
ts.format("%b %d %H:%M").to_string()
|
|
} else {
|
|
ts.format("%b %d, %Y %H:%M").to_string()
|
|
};
|
|
datetime
|
|
}
|
|
|
|
fn view_mobile_search_results(
|
|
query: &str,
|
|
results: &[FrontPageQuerySearchNodes],
|
|
count: usize,
|
|
pager: &FrontPageQuerySearchPageInfo,
|
|
) -> Node<Msg> {
|
|
if query.is_empty() {
|
|
set_title("all mail");
|
|
} else {
|
|
set_title(query);
|
|
}
|
|
let rows = results.iter().map(|r| {
|
|
let tid = r.thread.clone();
|
|
let datetime = human_age(r.timestamp as i64);
|
|
a![
|
|
C!["has-text-light"],
|
|
attrs! {
|
|
At::Href => urls::thread(&tid)
|
|
},
|
|
div![
|
|
C!["row"],
|
|
div![C!["subject"], &r.subject],
|
|
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
|
div![
|
|
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
|
span![C!["is-size-7", "float-right", "date"], datetime]
|
|
]
|
|
]
|
|
]
|
|
});
|
|
div![
|
|
C!["search-results"],
|
|
view_search_pager(count, pager),
|
|
rows,
|
|
view_search_pager(count, pager),
|
|
]
|
|
}
|
|
|
|
fn view_mobile_search_results_legacy(
|
|
query: &str,
|
|
search_results: &shared::SearchResult,
|
|
) -> Node<Msg> {
|
|
if query.is_empty() {
|
|
set_title("all mail");
|
|
} else {
|
|
set_title(query);
|
|
}
|
|
let summaries = &search_results.summary.0;
|
|
let rows = summaries.iter().map(|r| {
|
|
/*
|
|
let tid = r.thread.clone();
|
|
tr![
|
|
td![
|
|
C!["from"],
|
|
pretty_authors(&r.authors),
|
|
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
],
|
|
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
|
td![C!["date"], &r.date_relative],
|
|
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
|
]
|
|
*/
|
|
let tid = r.thread.clone();
|
|
let datetime = human_age(r.timestamp as i64);
|
|
a![
|
|
C!["has-text-light"],
|
|
attrs! {
|
|
At::Href => urls::thread(&tid)
|
|
},
|
|
div![
|
|
C!["row"],
|
|
div![C!["subject"], &r.subject],
|
|
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
|
div![
|
|
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
|
span![C!["is-size-7", "float-right", "date"], datetime]
|
|
]
|
|
]
|
|
]
|
|
});
|
|
let first = search_results.page * search_results.results_per_page;
|
|
div![
|
|
C!["search-results"],
|
|
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
|
rows,
|
|
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
|
]
|
|
}
|
|
|
|
fn view_search_results(
|
|
query: &str,
|
|
results: &[FrontPageQuerySearchNodes],
|
|
count: usize,
|
|
pager: &FrontPageQuerySearchPageInfo,
|
|
) -> Node<Msg> {
|
|
info!("pager {pager:?}");
|
|
if query.is_empty() {
|
|
set_title("all mail");
|
|
} else {
|
|
set_title(query);
|
|
}
|
|
let rows = results.iter().map(|r| {
|
|
let tid = r.thread.clone();
|
|
let datetime = human_age(r.timestamp as i64);
|
|
tr![
|
|
td![
|
|
C!["from"],
|
|
pretty_authors(&r.authors),
|
|
// TODO(wathiede): visualize message count if more than one message is in the
|
|
// thread
|
|
//IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
],
|
|
td![
|
|
C!["subject"],
|
|
tags_chiclet(&r.tags, false),
|
|
" ",
|
|
a![
|
|
C!["has-text-light"],
|
|
attrs! {
|
|
At::Href => urls::thread(&tid)
|
|
},
|
|
&r.subject,
|
|
]
|
|
],
|
|
td![C!["date"], datetime]
|
|
]
|
|
});
|
|
|
|
div![
|
|
view_search_pager(count, pager),
|
|
table![
|
|
C![
|
|
"table",
|
|
"index",
|
|
"is-fullwidth",
|
|
"is-hoverable",
|
|
"is-narrow",
|
|
"is-striped",
|
|
],
|
|
thead![tr![
|
|
th![C!["from"], "From"],
|
|
th![C!["subject"], "Subject"],
|
|
th![C!["date"], "Date"]
|
|
]],
|
|
tbody![rows]
|
|
],
|
|
view_search_pager(count, pager)
|
|
]
|
|
}
|
|
|
|
fn view_search_results_legacy(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
|
if query.is_empty() {
|
|
set_title("all mail");
|
|
} else {
|
|
set_title(query);
|
|
}
|
|
let summaries = &search_results.summary.0;
|
|
let rows = summaries.iter().map(|r| {
|
|
let tid = r.thread.clone();
|
|
let datetime = human_age(r.timestamp as i64);
|
|
tr![
|
|
td![
|
|
C!["from"],
|
|
pretty_authors(&r.authors),
|
|
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
],
|
|
td![
|
|
C!["subject"],
|
|
tags_chiclet(&r.tags, false),
|
|
" ",
|
|
a![
|
|
C!["has-text-light"],
|
|
attrs! {
|
|
At::Href => urls::thread(&tid)
|
|
},
|
|
&r.subject,
|
|
]
|
|
],
|
|
td![C!["date"], datetime]
|
|
]
|
|
});
|
|
let first = search_results.page * search_results.results_per_page;
|
|
div![
|
|
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
|
table![
|
|
C![
|
|
"table",
|
|
"index",
|
|
"is-fullwidth",
|
|
"is-hoverable",
|
|
"is-narrow",
|
|
"is-striped",
|
|
],
|
|
thead![tr![
|
|
th![C!["from"], "From"],
|
|
th![C!["subject"], "Subject"],
|
|
th![C!["date"], "Date"]
|
|
]],
|
|
tbody![rows]
|
|
],
|
|
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
|
]
|
|
}
|
|
|
|
fn view_search_pager(count: usize, pager: &FrontPageQuerySearchPageInfo) -> Node<Msg> {
|
|
let start = pager
|
|
.start_cursor
|
|
.as_ref()
|
|
.map(|i| i.parse().unwrap_or(0))
|
|
.unwrap_or(0);
|
|
nav![
|
|
C!["pagination"],
|
|
a![
|
|
C![
|
|
"pagination-previous",
|
|
"button",
|
|
//IF!(!pager.has_previous_page => "is-static"),
|
|
],
|
|
IF!(!pager.has_previous_page => attrs!{ At::Disabled=>true }),
|
|
"<",
|
|
IF!(pager.has_previous_page => ev(Ev::Click, |_| Msg::PreviousPage)),
|
|
],
|
|
a![
|
|
C![
|
|
"pagination-next",
|
|
"button",
|
|
//IF!(!pager.has_next_page => "is-static")
|
|
],
|
|
IF!(!pager.has_next_page => attrs!{ At::Disabled=>true }),
|
|
">",
|
|
IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage))
|
|
],
|
|
ul![
|
|
C!["pagination-list"],
|
|
li![format!(
|
|
"{} - {} of {}",
|
|
start,
|
|
count.min(start + SEARCH_RESULTS_PER_PAGE),
|
|
count
|
|
)],
|
|
],
|
|
]
|
|
}
|
|
|
|
fn view_search_pager_legacy(start: usize, count: usize, total: usize) -> Node<Msg> {
|
|
let is_first = start <= 0;
|
|
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
|
nav![
|
|
C!["pagination"],
|
|
a![
|
|
C!["pagination-previous", "button",],
|
|
IF!(is_first => attrs!{ At::Disabled=>true }),
|
|
"<",
|
|
ev(Ev::Click, |_| Msg::PreviousPage)
|
|
],
|
|
a![
|
|
C!["pagination-next", "button", IF!(is_last => "is-static")],
|
|
IF!(is_last => attrs!{ At::Disabled=>true }),
|
|
">",
|
|
ev(Ev::Click, |_| Msg::NextPage)
|
|
],
|
|
ul![
|
|
C!["pagination-list"],
|
|
li![format!("{} - {} of {}", start, start + count, total)],
|
|
],
|
|
]
|
|
}
|
|
|
|
trait Email {
|
|
fn name(&self) -> Option<&str>;
|
|
fn addr(&self) -> Option<&str>;
|
|
}
|
|
|
|
impl<T: Email> Email for &'_ T {
|
|
fn name(&self) -> Option<&str> {
|
|
return (*self).name();
|
|
}
|
|
fn addr(&self) -> Option<&str> {
|
|
return (*self).addr();
|
|
}
|
|
}
|
|
|
|
macro_rules! implement_email {
|
|
( $t:ty ) => {
|
|
impl Email for $t {
|
|
fn name(&self) -> Option<&str> {
|
|
self.name.as_deref()
|
|
}
|
|
fn addr(&self) -> Option<&str> {
|
|
self.addr.as_deref()
|
|
}
|
|
}
|
|
};
|
|
}
|
|
implement_email!(ShowThreadQueryThreadMessagesTo);
|
|
implement_email!(ShowThreadQueryThreadMessagesCc);
|
|
implement_email!(ShowThreadQueryThreadMessagesFrom);
|
|
|
|
fn view_address(email: impl Email) -> Node<Msg> {
|
|
span![
|
|
C!["tag", "is-black"],
|
|
email.addr().as_ref().map(|a| attrs! {At::Title=>a}),
|
|
email
|
|
.name()
|
|
.as_ref()
|
|
.unwrap_or(&email.addr().unwrap_or("(UNKNOWN)"))
|
|
]
|
|
}
|
|
|
|
fn view_addresses(addrs: &[impl Email]) -> Vec<Node<Msg>> {
|
|
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
|
}
|
|
|
|
fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
|
set_title(&thread.subject);
|
|
let messages = thread.messages.iter().map(|msg| {
|
|
div![
|
|
C!["message"],
|
|
/* TODO(wathiede): collect all the tags and show them here. */
|
|
/* TODO(wathiede): collect all the attachments from all the subparts */
|
|
msg.from
|
|
.as_ref()
|
|
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
|
msg.timestamp
|
|
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
|
IF!(!msg.to.is_empty() => div![C!["header"], "To: ", view_addresses(&msg.to)]),
|
|
IF!(!msg.cc.is_empty() => div![C!["header"], "CC: ", view_addresses(&msg.cc)]),
|
|
div![
|
|
C!["body"],
|
|
match &msg.body {
|
|
ShowThreadQueryThreadMessagesBody::UnhandledContentType(
|
|
ShowThreadQueryThreadMessagesBodyOnUnhandledContentType { contents },
|
|
) => pre![C!["error"], contents],
|
|
ShowThreadQueryThreadMessagesBody::PlainText(
|
|
ShowThreadQueryThreadMessagesBodyOnPlainText {
|
|
contents,
|
|
content_tree,
|
|
},
|
|
) => div![C!["view-part-text-plain"], contents, pre![content_tree]],
|
|
ShowThreadQueryThreadMessagesBody::Html(
|
|
ShowThreadQueryThreadMessagesBodyOnHtml {
|
|
contents,
|
|
content_tree,
|
|
},
|
|
) => div![
|
|
C!["view-part-text-html"],
|
|
raw![contents],
|
|
pre![content_tree]
|
|
],
|
|
}
|
|
],
|
|
]
|
|
});
|
|
div![
|
|
C!["thread"],
|
|
p![C!["is-size-4"], &thread.subject],
|
|
messages,
|
|
/* TODO(wathiede): plumb in orignal id
|
|
a![
|
|
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
|
"Original"
|
|
],
|
|
*/
|
|
/*
|
|
div![
|
|
C!["debug"],
|
|
"Add zippy for debug dump",
|
|
view_debug_thread_set(thread_set)
|
|
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
|
|
*/
|
|
]
|
|
}
|
|
|
|
fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
|
assert_eq!(thread_set.0.len(), 1);
|
|
let thread = &thread_set.0[0];
|
|
assert_eq!(thread.0.len(), 1);
|
|
let thread_node = &thread.0[0];
|
|
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
|
|
set_title(&subject);
|
|
div![
|
|
C!["container"],
|
|
h1![C!["title"], subject],
|
|
view_message(&thread_node),
|
|
a![
|
|
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
|
"Original"
|
|
],
|
|
/*
|
|
div![
|
|
C!["debug"],
|
|
"Add zippy for debug dump",
|
|
view_debug_thread_set(thread_set)
|
|
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
|
|
*/
|
|
]
|
|
}
|
|
|
|
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> {
|
|
ul![thread_set
|
|
.0
|
|
.iter()
|
|
.enumerate()
|
|
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })]
|
|
}
|
|
fn view_debug_thread(thread: &Thread) -> Node<Msg> {
|
|
ul![thread
|
|
.0
|
|
.iter()
|
|
.enumerate()
|
|
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })]
|
|
}
|
|
|
|
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> {
|
|
ul![
|
|
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]),
|
|
thread_node.1.iter().enumerate().map(|(i, tn)| li![
|
|
"tn",
|
|
i,
|
|
": ",
|
|
view_debug_thread_node(tn)
|
|
])
|
|
]
|
|
}
|
|
|
|
fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
|
let is_loading = refresh_request == &RefreshingState::Loading;
|
|
let is_error = if let RefreshingState::Error(err) = refresh_request {
|
|
error!("Failed to refresh: {err:?}");
|
|
true
|
|
} else {
|
|
false
|
|
};
|
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
nav![
|
|
C!["navbar"],
|
|
attrs! {At::Role=>"navigation"},
|
|
div![
|
|
C!["navbar-start"],
|
|
a![
|
|
C!["navbar-item", "button", IF![is_error => "is-danger"]],
|
|
span![i![C![
|
|
"fa-solid",
|
|
"fa-arrow-rotate-right",
|
|
"refresh",
|
|
IF![is_loading => "loading"],
|
|
]]],
|
|
ev(Ev::Click, |_| Msg::RefreshStart),
|
|
],
|
|
a![
|
|
C!["navbar-item", "button"],
|
|
attrs! {
|
|
At::Href => urls::search("is:unread", 0)
|
|
},
|
|
"Unread",
|
|
],
|
|
a![
|
|
C!["navbar-item", "button"],
|
|
attrs! {
|
|
At::Href => urls::search("", 0)
|
|
},
|
|
"All",
|
|
],
|
|
input![
|
|
C!["navbar-item", "input"],
|
|
attrs! {
|
|
At::Placeholder => "Search";
|
|
At::AutoFocus => true.as_at_value();
|
|
At::Value => query,
|
|
},
|
|
input_ev(Ev::Input, |q| Msg::SearchRequest {
|
|
query: Url::encode_uri_component(if q.is_empty() {
|
|
"*".to_string()
|
|
} else {
|
|
q
|
|
}),
|
|
page: 0,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
}),
|
|
// Resend search on enter.
|
|
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
|
Msg::SearchRequest {
|
|
query: Url::encode_uri_component(query),
|
|
page: 0,
|
|
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
}
|
|
} else {
|
|
Msg::Noop
|
|
}),
|
|
]
|
|
]
|
|
]
|
|
}
|
|
|
|
fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
|
footer![
|
|
C!["footer"],
|
|
div![
|
|
C!["content", "has-text-right", "is-size-7"],
|
|
format!("Render time {} ms", render_time_ms)
|
|
]
|
|
]
|
|
}
|
|
|
|
fn view_desktop(model: &Model) -> Node<Msg> {
|
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
|
let content = match &model.context {
|
|
Context::None => div![h1!["Loading"]],
|
|
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
|
Context::ThreadResult(thread) => view_thread(thread),
|
|
Context::Search(search_results) => view_search_results_legacy(&model.query, search_results),
|
|
Context::SearchResult {
|
|
query,
|
|
results,
|
|
count,
|
|
pager,
|
|
} => view_search_results(&query, results.as_slice(), *count, pager),
|
|
};
|
|
div![
|
|
C!["desktop-main-content"],
|
|
aside![
|
|
C!["tags-menu", "menu"],
|
|
p![C!["menu-label"], "Tags"],
|
|
ul![
|
|
C!["menu-list"],
|
|
model.tags.as_ref().map(|tags| tags.iter().map(|t| li![a![
|
|
attrs! {
|
|
At::Href => urls::search(&format!("tag:{}", t.name), 0)
|
|
},
|
|
style! {
|
|
St::BackgroundColor => t.bg_color,
|
|
St::Color => t.fg_color,
|
|
},
|
|
&t.name
|
|
]]))
|
|
]
|
|
],
|
|
div![
|
|
view_header(&model.query, &model.refreshing_state),
|
|
content,
|
|
view_header(&model.query, &model.refreshing_state),
|
|
]
|
|
]
|
|
}
|
|
|
|
fn view_mobile(model: &Model) -> Node<Msg> {
|
|
let content = match &model.context {
|
|
Context::None => div![h1!["Loading"]],
|
|
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
|
Context::ThreadResult(thread) => view_thread(thread),
|
|
Context::Search(search_results) => {
|
|
view_mobile_search_results_legacy(&model.query, search_results)
|
|
}
|
|
Context::SearchResult {
|
|
query,
|
|
results,
|
|
count,
|
|
pager,
|
|
} => view_mobile_search_results(&query, results.as_slice(), *count, pager),
|
|
};
|
|
div![
|
|
view_header(&model.query, &model.refreshing_state),
|
|
content,
|
|
view_header(&model.query, &model.refreshing_state),
|
|
]
|
|
}
|
|
|
|
// `view` describes what to display.
|
|
fn view(model: &Model) -> Node<Msg> {
|
|
info!("refreshing {:?}", model.refreshing_state);
|
|
let is_mobile = seed::window()
|
|
.match_media("(max-width: 768px)")
|
|
.expect("failed media query")
|
|
.map(|mql| mql.matches())
|
|
.unwrap_or(false);
|
|
|
|
let start = Instant::now();
|
|
info!("view called");
|
|
div![
|
|
if is_mobile {
|
|
C!["mobile"]
|
|
} else {
|
|
C!["desktop"]
|
|
},
|
|
if is_mobile {
|
|
view_mobile(model)
|
|
} else {
|
|
view_desktop(model)
|
|
},
|
|
view_footer(start.elapsed().as_millis())
|
|
]
|
|
}
|
|
|
|
// ------ ------
|
|
// Start
|
|
// ------ ------
|
|
|
|
// (This function is invoked by `init` function in `index.html`.)
|
|
#[wasm_bindgen(start)]
|
|
pub fn start() {
|
|
// This provides better error messages in debug mode.
|
|
// It's disabled in release mode so it doesn't bloat up the file size.
|
|
#[cfg(debug_assertions)]
|
|
console_error_panic_hook::set_once();
|
|
|
|
let lvl = Level::Info;
|
|
console_log::init_with_level(lvl).expect("failed to initialize console logging");
|
|
// Mount the `app` to the element with the `id` "app".
|
|
App::start("app", init, update, view);
|
|
}
|