web: refactor code into separate modules
This commit is contained in:
parent
cfe1446668
commit
0a7cdefda3
86
web/src/api.rs
Normal file
86
web/src/api.rs
Normal file
@ -0,0 +1,86 @@
|
||||
use log::info;
|
||||
use notmuch::ThreadSet;
|
||||
use seed::{prelude::*, Url};
|
||||
use serde::de::Deserialize;
|
||||
|
||||
const BASE_URL: &str = "/api";
|
||||
pub fn refresh() -> String {
|
||||
format!("{BASE_URL}/refresh")
|
||||
}
|
||||
pub fn search(query: &str, page: usize, results_per_page: usize) -> String {
|
||||
let query = Url::encode_uri_component(query);
|
||||
format!("{BASE_URL}/search/{query}?page={page}&results_per_page={results_per_page}")
|
||||
}
|
||||
pub fn show(tid: &str) -> String {
|
||||
format!("{BASE_URL}/show/{tid}")
|
||||
}
|
||||
pub fn show_pretty(tid: &str) -> String {
|
||||
format!("{BASE_URL}/show/{tid}/pretty")
|
||||
}
|
||||
pub fn original(message_id: &str) -> String {
|
||||
format!("{BASE_URL}/original/{message_id}")
|
||||
}
|
||||
pub mod urls {
|
||||
use seed::Url;
|
||||
pub fn search(query: &str, page: usize) -> Url {
|
||||
let query = Url::encode_uri_component(query);
|
||||
if page > 0 {
|
||||
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
|
||||
} else {
|
||||
Url::new().set_hash_path(["s", &query])
|
||||
}
|
||||
}
|
||||
pub fn thread(tid: &str) -> Url {
|
||||
Url::new().set_hash_path(["t", tid])
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search_request(
|
||||
query: &str,
|
||||
page: usize,
|
||||
results_per_page: usize,
|
||||
) -> fetch::Result<shared::SearchResult> {
|
||||
Request::new(search(query, page, results_per_page))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.json()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn refresh_request() -> fetch::Result<()> {
|
||||
let t = Request::new(refresh())
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.text()
|
||||
.await?;
|
||||
info!("refresh {t}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
|
||||
let b = Request::new(show(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.bytes()
|
||||
.await?;
|
||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
||||
deserializer.disable_recursion_limit();
|
||||
Ok(ThreadSet::deserialize(&mut deserializer)
|
||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
||||
}
|
||||
|
||||
pub async fn show_pretty_request(tid: &str) -> fetch::Result<ThreadSet> {
|
||||
Request::new(show_pretty(tid))
|
||||
.method(Method::Get)
|
||||
.fetch()
|
||||
.await?
|
||||
.check_status()?
|
||||
.json()
|
||||
.await
|
||||
}
|
||||
2
web/src/consts.rs
Normal file
2
web/src/consts.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub const SEARCH_RESULTS_PER_PAGE: usize = 20;
|
||||
pub const USE_GRAPHQL: bool = true;
|
||||
1363
web/src/lib.rs
1363
web/src/lib.rs
File diff suppressed because it is too large
Load Diff
1
web/src/model.rs
Normal file
1
web/src/model.rs
Normal file
@ -0,0 +1 @@
|
||||
|
||||
406
web/src/state.rs
Normal file
406
web/src/state.rs
Normal file
@ -0,0 +1,406 @@
|
||||
use graphql_client::GraphQLQuery;
|
||||
use log::{debug, info};
|
||||
use notmuch::ThreadSet;
|
||||
use seed::{prelude::*, *};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::{
|
||||
api,
|
||||
api::urls,
|
||||
consts::{SEARCH_RESULTS_PER_PAGE, USE_GRAPHQL},
|
||||
graphql,
|
||||
graphql::{front_page_query::*, send_graphql, show_thread_query::*},
|
||||
};
|
||||
|
||||
// `init` describes what should happen when your app started.
|
||||
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
if url.hash().is_none() {
|
||||
orders.request_url(urls::search("is:unread", 0));
|
||||
} else {
|
||||
orders.notify(subs::UrlRequested::new(url));
|
||||
};
|
||||
orders.subscribe(on_url_changed);
|
||||
|
||||
Model {
|
||||
context: Context::None,
|
||||
query: "".to_string(),
|
||||
refreshing_state: RefreshingState::None,
|
||||
ui_error: UIError::NoError,
|
||||
tags: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
let mut url = uc.0;
|
||||
info!(
|
||||
"url changed '{}', history {}",
|
||||
url,
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = url.remaining_hash_path_parts();
|
||||
match hpp.as_slice() {
|
||||
["t", tid] => {
|
||||
if USE_GRAPHQL {
|
||||
Msg::ShowThreadRequest {
|
||||
thread_id: tid.to_string(),
|
||||
}
|
||||
} else {
|
||||
Msg::ShowPrettyRequest(tid.to_string())
|
||||
}
|
||||
}
|
||||
["s", query] => {
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
if USE_GRAPHQL {
|
||||
Msg::FrontPageRequest {
|
||||
query,
|
||||
after: None,
|
||||
before: None,
|
||||
first: None,
|
||||
last: None,
|
||||
}
|
||||
} else {
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
}
|
||||
["s", query, page] => {
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
let page = page[1..].parse().unwrap_or(0);
|
||||
if USE_GRAPHQL {
|
||||
Msg::FrontPageRequest {
|
||||
query,
|
||||
after: Some(page.to_string()),
|
||||
before: None,
|
||||
first: None,
|
||||
last: None,
|
||||
}
|
||||
} else {
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
}
|
||||
p => {
|
||||
if !p.is_empty() {
|
||||
info!("Unhandled path '{p:?}'");
|
||||
}
|
||||
if USE_GRAPHQL {
|
||||
Msg::FrontPageRequest {
|
||||
query: "".to_string(),
|
||||
after: None,
|
||||
before: None,
|
||||
first: None,
|
||||
last: None,
|
||||
}
|
||||
} else {
|
||||
Msg::SearchRequest {
|
||||
query: "".to_string(),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// `update` describes how to handle each `Msg`.
|
||||
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
match msg {
|
||||
Msg::Noop => {}
|
||||
Msg::RefreshStart => {
|
||||
model.refreshing_state = RefreshingState::Loading;
|
||||
orders.perform_cmd(async move { Msg::RefreshDone(api::refresh_request().await.err()) });
|
||||
}
|
||||
Msg::RefreshDone(err) => {
|
||||
model.refreshing_state = if let Some(err) = err {
|
||||
RefreshingState::Error(format!("{:?}", err))
|
||||
} else {
|
||||
RefreshingState::None
|
||||
};
|
||||
orders.perform_cmd(async move { Msg::Reload });
|
||||
}
|
||||
Msg::Reload => {
|
||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||
}
|
||||
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
page,
|
||||
results_per_page,
|
||||
} => {
|
||||
info!("searching for '{query}' pg {page} # / pg {results_per_page}");
|
||||
model.query = query.clone();
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::SearchResult(api::search_request(&query, page, results_per_page).await)
|
||||
});
|
||||
}
|
||||
Msg::SearchResult(Ok(response_data)) => {
|
||||
debug!("fetch ok {:#?}", response_data);
|
||||
model.context = Context::Search(response_data);
|
||||
}
|
||||
Msg::SearchResult(Err(fetch_error)) => {
|
||||
error!("fetch failed {:?}", fetch_error);
|
||||
}
|
||||
|
||||
Msg::ShowRequest(tid) => {
|
||||
orders
|
||||
.skip()
|
||||
.perform_cmd(async move { Msg::ShowResult(api::show_request(&tid).await) });
|
||||
}
|
||||
Msg::ShowResult(Ok(response_data)) => {
|
||||
debug!("fetch ok {:#?}", response_data);
|
||||
model.context = Context::Thread(response_data);
|
||||
}
|
||||
Msg::ShowResult(Err(fetch_error)) => {
|
||||
error!("fetch failed {:?}", fetch_error);
|
||||
}
|
||||
|
||||
Msg::ShowPrettyRequest(tid) => {
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::ShowPrettyResult(api::show_pretty_request(&tid).await)
|
||||
});
|
||||
}
|
||||
Msg::ShowPrettyResult(Ok(response_data)) => {
|
||||
debug!("fetch ok {:#?}", response_data);
|
||||
model.context = Context::Thread(response_data);
|
||||
}
|
||||
Msg::ShowPrettyResult(Err(fetch_error)) => {
|
||||
error!("fetch failed {:?}", fetch_error);
|
||||
}
|
||||
Msg::NextPage => {
|
||||
match &model.context {
|
||||
Context::Search(sr) => {
|
||||
orders.request_url(urls::search(&sr.query, sr.page + 1));
|
||||
}
|
||||
Context::SearchResult { query, pager, .. } => {
|
||||
let query = query.to_string();
|
||||
let after = pager.end_cursor.clone();
|
||||
orders.perform_cmd(async move {
|
||||
Msg::FrontPageRequest {
|
||||
query,
|
||||
after,
|
||||
before: None,
|
||||
first: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
||||
last: None,
|
||||
}
|
||||
});
|
||||
}
|
||||
Context::Thread(_) => (), // do nothing (yet?)
|
||||
Context::ThreadResult(_) => (), // do nothing (yet?)
|
||||
Context::None => (), // do nothing (yet?)
|
||||
};
|
||||
}
|
||||
Msg::PreviousPage => {
|
||||
match &model.context {
|
||||
Context::Search(sr) => {
|
||||
orders.request_url(urls::search(&sr.query, sr.page.saturating_sub(1)));
|
||||
}
|
||||
Context::SearchResult { query, pager, .. } => {
|
||||
let query = query.to_string();
|
||||
let before = pager.start_cursor.clone();
|
||||
orders.perform_cmd(async move {
|
||||
Msg::FrontPageRequest {
|
||||
query,
|
||||
after: None,
|
||||
before,
|
||||
first: None,
|
||||
last: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Context::Thread(_) => (), // do nothing (yet?)
|
||||
Context::ThreadResult(_) => (), // do nothing (yet?)
|
||||
Context::None => (), // do nothing (yet?)
|
||||
};
|
||||
}
|
||||
|
||||
Msg::UpdateQuery(query) => model.query = query,
|
||||
Msg::SearchQuery(query) => {
|
||||
orders.request_url(urls::search(&query, 0));
|
||||
}
|
||||
|
||||
Msg::FrontPageRequest {
|
||||
query,
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
} => {
|
||||
info!("making FrontPageRequest: {query} after:{after:?} before:{before:?} first:{first:?} last:{last:?}");
|
||||
model.query = query.clone();
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::FrontPageResult(
|
||||
send_graphql(graphql::FrontPageQuery::build_query(
|
||||
graphql::front_page_query::Variables {
|
||||
query,
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
},
|
||||
))
|
||||
.await,
|
||||
)
|
||||
});
|
||||
}
|
||||
Msg::FrontPageResult(Err(e)) => error!("error FrontPageResult: {e:?}"),
|
||||
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||
data: None,
|
||||
errors: None,
|
||||
..
|
||||
})) => {
|
||||
error!("FrontPageResult no data or errors, should not happen");
|
||||
}
|
||||
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||
data: None,
|
||||
errors: Some(e),
|
||||
..
|
||||
})) => {
|
||||
error!("FrontPageResult error: {e:?}");
|
||||
}
|
||||
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||
data: Some(data), ..
|
||||
})) => {
|
||||
model.tags = Some(
|
||||
data.tags
|
||||
.into_iter()
|
||||
.map(|t| Tag {
|
||||
name: t.name,
|
||||
bg_color: t.bg_color,
|
||||
fg_color: t.fg_color,
|
||||
unread: t.unread,
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
model.context = Context::SearchResult {
|
||||
query: model.query.clone(),
|
||||
results: data.search.nodes,
|
||||
count: data.count as usize,
|
||||
pager: data.search.page_info,
|
||||
};
|
||||
}
|
||||
|
||||
Msg::ShowThreadRequest { thread_id } => {
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::ShowThreadResult(
|
||||
send_graphql(graphql::ShowThreadQuery::build_query(
|
||||
graphql::show_thread_query::Variables { thread_id },
|
||||
))
|
||||
.await,
|
||||
)
|
||||
});
|
||||
}
|
||||
Msg::ShowThreadResult(Ok(graphql_client::Response {
|
||||
data: Some(data), ..
|
||||
})) => {
|
||||
model.tags = Some(
|
||||
data.tags
|
||||
.into_iter()
|
||||
.map(|t| Tag {
|
||||
name: t.name,
|
||||
bg_color: t.bg_color,
|
||||
fg_color: t.fg_color,
|
||||
unread: t.unread,
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
model.context = Context::ThreadResult(data.thread);
|
||||
}
|
||||
Msg::ShowThreadResult(bad) => {
|
||||
error!("show_thread_query error: {bad:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
// `Model` describes our app state.
|
||||
pub struct Model {
|
||||
pub query: String,
|
||||
pub context: Context,
|
||||
pub refreshing_state: RefreshingState,
|
||||
pub ui_error: UIError,
|
||||
pub tags: Option<Vec<Tag>>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum UIError {
|
||||
#[error("No error, this should never be presented to user")]
|
||||
NoError,
|
||||
#[error("failed to fetch {0}: {1:?}")]
|
||||
FetchError(&'static str, FetchError),
|
||||
#[error("{0} error decoding: {1:?}")]
|
||||
FetchDecodeError(&'static str, Vec<graphql_client::Error>),
|
||||
#[error("no data or errors for {0}")]
|
||||
NoData(&'static str),
|
||||
}
|
||||
|
||||
pub enum Context {
|
||||
None,
|
||||
Search(shared::SearchResult),
|
||||
SearchResult {
|
||||
query: String,
|
||||
results: Vec<FrontPageQuerySearchNodes>,
|
||||
count: usize,
|
||||
pager: FrontPageQuerySearchPageInfo,
|
||||
},
|
||||
Thread(ThreadSet),
|
||||
ThreadResult(ShowThreadQueryThread),
|
||||
}
|
||||
|
||||
pub struct Tag {
|
||||
pub name: String,
|
||||
pub bg_color: String,
|
||||
pub fg_color: String,
|
||||
pub unread: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum RefreshingState {
|
||||
None,
|
||||
Loading,
|
||||
Error(String),
|
||||
}
|
||||
// `Msg` describes the different events you can modify state with.
|
||||
pub enum Msg {
|
||||
Noop,
|
||||
// Tell the client to refresh its state
|
||||
Reload,
|
||||
// Tell the server to update state
|
||||
RefreshStart,
|
||||
RefreshDone(Option<FetchError>),
|
||||
SearchRequest {
|
||||
query: String,
|
||||
page: usize,
|
||||
results_per_page: usize,
|
||||
},
|
||||
SearchResult(fetch::Result<shared::SearchResult>),
|
||||
ShowRequest(String),
|
||||
ShowResult(fetch::Result<ThreadSet>),
|
||||
ShowPrettyRequest(String),
|
||||
ShowPrettyResult(fetch::Result<ThreadSet>),
|
||||
NextPage,
|
||||
PreviousPage,
|
||||
UpdateQuery(String),
|
||||
SearchQuery(String),
|
||||
|
||||
FrontPageRequest {
|
||||
query: String,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i64>,
|
||||
last: Option<i64>,
|
||||
},
|
||||
FrontPageResult(
|
||||
fetch::Result<graphql_client::Response<graphql::front_page_query::ResponseData>>,
|
||||
),
|
||||
ShowThreadRequest {
|
||||
thread_id: String,
|
||||
},
|
||||
ShowThreadResult(
|
||||
fetch::Result<graphql_client::Response<graphql::show_thread_query::ResponseData>>,
|
||||
),
|
||||
}
|
||||
848
web/src/view.rs
Normal file
848
web/src/view.rs
Normal file
@ -0,0 +1,848 @@
|
||||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
|
||||
use chrono::{DateTime, Datelike, Duration, Local, Utc};
|
||||
use itertools::Itertools;
|
||||
use log::info;
|
||||
use notmuch::{Content, Part, ThreadNode, ThreadSet};
|
||||
use seed::{prelude::*, *};
|
||||
use seed_hooks::{state_access::CloneState, topo, use_state};
|
||||
use wasm_timer::Instant;
|
||||
|
||||
use crate::{
|
||||
api,
|
||||
api::urls,
|
||||
consts::{SEARCH_RESULTS_PER_PAGE, USE_GRAPHQL},
|
||||
graphql::{front_page_query::*, show_thread_query::*},
|
||||
state::{Context, Model, Msg, RefreshingState, Tag},
|
||||
};
|
||||
|
||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||
let children = &thread.1;
|
||||
div![
|
||||
C!["message"],
|
||||
/* TODO(wathiede): collect all the tags and show them here. */
|
||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||
div![C!["header"], "From: ", &message.headers.from],
|
||||
div![C!["header"], "Date: ", &message.headers.date],
|
||||
div![C!["header"], "To: ", &message.headers.to],
|
||||
div![
|
||||
C!["body"],
|
||||
match &message.body {
|
||||
Some(body) => view_body(body.as_slice()),
|
||||
None => div!["<no body>"],
|
||||
},
|
||||
],
|
||||
children.iter().map(view_message)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_body(body: &[Part]) -> Node<Msg> {
|
||||
div![body.iter().map(view_part)]
|
||||
}
|
||||
|
||||
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
|
||||
match &content {
|
||||
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for text/plain"),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn view_part(part: &Part) -> Node<Msg> {
|
||||
match part.content_type.as_str() {
|
||||
"text/plain" => view_text_plain(&part.content),
|
||||
"text/html" => {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
|
||||
return div![C!["view-part-text-html"], raw![&inlined]];
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// https://en.wikipedia.org/wiki/MIME#alternative
|
||||
// RFC1341 states: In general, user agents that compose multipart/alternative entities
|
||||
// should place the body parts in increasing order of preference, that is, with the
|
||||
// preferred format last.
|
||||
"multipart/alternative" => {
|
||||
if let Some(Content::Multipart(parts)) = &part.content {
|
||||
for part in parts.iter().rev() {
|
||||
if part.content_type == "text/html" {
|
||||
if let Some(Content::String(html)) = &part.content {
|
||||
let inliner = css_inline::CSSInliner::options()
|
||||
.load_remote_stylesheets(false)
|
||||
.remove_style_tags(true)
|
||||
.build();
|
||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
||||
return div![Node::from_html(None, &inlined)];
|
||||
}
|
||||
}
|
||||
if part.content_type == "text/plain" {
|
||||
return view_text_plain(&part.content);
|
||||
}
|
||||
}
|
||||
div!["No known multipart/alternative parts"]
|
||||
} else {
|
||||
div![
|
||||
C!["error"],
|
||||
format!("multipart/alternative with non-multipart content"),
|
||||
]
|
||||
}
|
||||
}
|
||||
"multipart/mixed" => match &part.content {
|
||||
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content enum for multipart/mixed"),
|
||||
],
|
||||
},
|
||||
_ => div![
|
||||
C!["error"],
|
||||
format!("Unhandled content type: {}", part.content_type)
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn first_subject(thread: &ThreadNode) -> Option<String> {
|
||||
if let Some(msg) = &thread.0 {
|
||||
return Some(msg.headers.subject.clone());
|
||||
} else {
|
||||
for tn in &thread.1 {
|
||||
if let Some(s) = first_subject(&tn) {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
fn set_title(title: &str) {
|
||||
seed::document().set_title(&format!("lb: {}", title));
|
||||
}
|
||||
|
||||
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
|
||||
tags.iter().map(move |tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let style = style! {St::BackgroundColor=>hex};
|
||||
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
||||
let tag = tag.clone();
|
||||
a![
|
||||
attrs! {
|
||||
At::Href => urls::search(&format!("tag:{tag}"), 0)
|
||||
},
|
||||
match tag.as_str() {
|
||||
"attachment" => span![classes, style, "📎"],
|
||||
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
||||
_ => span![classes, style, &tag],
|
||||
},
|
||||
ev(Ev::Click, move |_| Msg::SearchRequest {
|
||||
query: format!("tag:{tag}"),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
})
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
|
||||
let one_person = authors.matches(',').count() == 0;
|
||||
let authors = authors.split(',');
|
||||
|
||||
Itertools::intersperse(
|
||||
authors.filter_map(move |author| {
|
||||
if one_person {
|
||||
return Some(span![
|
||||
attrs! {
|
||||
At::Title => author.trim()},
|
||||
author
|
||||
]);
|
||||
}
|
||||
author.split_whitespace().nth(0).map(|first| {
|
||||
span![
|
||||
attrs! {
|
||||
At::Title => author.trim()},
|
||||
first
|
||||
]
|
||||
})
|
||||
}),
|
||||
span![", "],
|
||||
)
|
||||
}
|
||||
|
||||
fn human_age(timestamp: i64) -> String {
|
||||
let now = Local::now();
|
||||
let yesterday = now - Duration::days(1);
|
||||
let ts = DateTime::<Utc>::from_timestamp(timestamp, 0)
|
||||
.unwrap()
|
||||
.with_timezone(&Local);
|
||||
let age = now - ts;
|
||||
let datetime = if age < Duration::minutes(1) {
|
||||
format!("{} min. ago", age.num_seconds())
|
||||
} else if age < Duration::hours(1) {
|
||||
format!("{} min. ago", age.num_minutes())
|
||||
} else if ts.date_naive() == now.date_naive() {
|
||||
ts.format("Today %H:%M").to_string()
|
||||
} else if ts.date_naive() == yesterday.date_naive() {
|
||||
ts.format("Yest. %H:%M").to_string()
|
||||
} else if age < Duration::weeks(1) {
|
||||
ts.format("%a %H:%M").to_string()
|
||||
} else if ts.year() == now.year() {
|
||||
ts.format("%b %d %H:%M").to_string()
|
||||
} else {
|
||||
ts.format("%b %d, %Y %H:%M").to_string()
|
||||
};
|
||||
datetime
|
||||
}
|
||||
|
||||
fn view_mobile_search_results(
|
||||
query: &str,
|
||||
results: &[FrontPageQuerySearchNodes],
|
||||
count: usize,
|
||||
pager: &FrontPageQuerySearchPageInfo,
|
||||
) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let rows = results.iter().map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![
|
||||
C!["row"],
|
||||
div![C!["subject"], &r.subject],
|
||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||
]
|
||||
]
|
||||
]
|
||||
});
|
||||
div![
|
||||
C!["search-results"],
|
||||
view_search_pager(count, pager),
|
||||
rows,
|
||||
view_search_pager(count, pager),
|
||||
]
|
||||
}
|
||||
|
||||
fn view_mobile_search_results_legacy(
|
||||
query: &str,
|
||||
search_results: &shared::SearchResult,
|
||||
) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
/*
|
||||
let tid = r.thread.clone();
|
||||
tr![
|
||||
td![
|
||||
C!["from"],
|
||||
pretty_authors(&r.authors),
|
||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||
],
|
||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||
td![C!["date"], &r.date_relative],
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||
]
|
||||
*/
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![
|
||||
C!["row"],
|
||||
div![C!["subject"], &r.subject],
|
||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||
]
|
||||
]
|
||||
]
|
||||
});
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
C!["search-results"],
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||
rows,
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_search_results(
|
||||
query: &str,
|
||||
results: &[FrontPageQuerySearchNodes],
|
||||
count: usize,
|
||||
pager: &FrontPageQuerySearchPageInfo,
|
||||
) -> Node<Msg> {
|
||||
info!("pager {pager:?}");
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let rows = results.iter().map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
tr![
|
||||
td![
|
||||
C!["from"],
|
||||
pretty_authors(&r.authors),
|
||||
// TODO(wathiede): visualize message count if more than one message is in the
|
||||
// thread
|
||||
//IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||
],
|
||||
td![
|
||||
C!["subject"],
|
||||
tags_chiclet(&r.tags, false),
|
||||
" ",
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
&r.subject,
|
||||
]
|
||||
],
|
||||
td![C!["date"], datetime]
|
||||
]
|
||||
});
|
||||
|
||||
div![
|
||||
view_search_pager(count, pager),
|
||||
table![
|
||||
C![
|
||||
"table",
|
||||
"index",
|
||||
"is-fullwidth",
|
||||
"is-hoverable",
|
||||
"is-narrow",
|
||||
"is-striped",
|
||||
],
|
||||
thead![tr![
|
||||
th![C!["from"], "From"],
|
||||
th![C!["subject"], "Subject"],
|
||||
th![C!["date"], "Date"]
|
||||
]],
|
||||
tbody![rows]
|
||||
],
|
||||
view_search_pager(count, pager)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_search_results_legacy(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
tr![
|
||||
td![
|
||||
C!["from"],
|
||||
pretty_authors(&r.authors),
|
||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||
],
|
||||
td![
|
||||
C!["subject"],
|
||||
tags_chiclet(&r.tags, false),
|
||||
" ",
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
&r.subject,
|
||||
]
|
||||
],
|
||||
td![C!["date"], datetime]
|
||||
]
|
||||
});
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||
table![
|
||||
C![
|
||||
"table",
|
||||
"index",
|
||||
"is-fullwidth",
|
||||
"is-hoverable",
|
||||
"is-narrow",
|
||||
"is-striped",
|
||||
],
|
||||
thead![tr![
|
||||
th![C!["from"], "From"],
|
||||
th![C!["subject"], "Subject"],
|
||||
th![C!["date"], "Date"]
|
||||
]],
|
||||
tbody![rows]
|
||||
],
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
fn view_search_pager(count: usize, pager: &FrontPageQuerySearchPageInfo) -> Node<Msg> {
|
||||
let start = pager
|
||||
.start_cursor
|
||||
.as_ref()
|
||||
.map(|i| i.parse().unwrap_or(0))
|
||||
.unwrap_or(0);
|
||||
nav![
|
||||
C!["pagination"],
|
||||
a![
|
||||
C![
|
||||
"pagination-previous",
|
||||
"button",
|
||||
//IF!(!pager.has_previous_page => "is-static"),
|
||||
],
|
||||
IF!(!pager.has_previous_page => attrs!{ At::Disabled=>true }),
|
||||
"<",
|
||||
IF!(pager.has_previous_page => ev(Ev::Click, |_| Msg::PreviousPage)),
|
||||
],
|
||||
a![
|
||||
C![
|
||||
"pagination-next",
|
||||
"button",
|
||||
//IF!(!pager.has_next_page => "is-static")
|
||||
],
|
||||
IF!(!pager.has_next_page => attrs!{ At::Disabled=>true }),
|
||||
">",
|
||||
IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage))
|
||||
],
|
||||
ul![
|
||||
C!["pagination-list"],
|
||||
li![format!(
|
||||
"{} - {} of {}",
|
||||
start,
|
||||
count.min(start + SEARCH_RESULTS_PER_PAGE),
|
||||
count
|
||||
)],
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
fn view_search_pager_legacy(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||
let is_first = start <= 0;
|
||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
||||
nav![
|
||||
C!["pagination"],
|
||||
a![
|
||||
C!["pagination-previous", "button",],
|
||||
IF!(is_first => attrs!{ At::Disabled=>true }),
|
||||
"<",
|
||||
ev(Ev::Click, |_| Msg::PreviousPage)
|
||||
],
|
||||
a![
|
||||
C!["pagination-next", "button", IF!(is_last => "is-static")],
|
||||
IF!(is_last => attrs!{ At::Disabled=>true }),
|
||||
">",
|
||||
ev(Ev::Click, |_| Msg::NextPage)
|
||||
],
|
||||
ul![
|
||||
C!["pagination-list"],
|
||||
li![format!("{} - {} of {}", start, start + count, total)],
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
trait Email {
|
||||
fn name(&self) -> Option<&str>;
|
||||
fn addr(&self) -> Option<&str>;
|
||||
}
|
||||
|
||||
impl<T: Email> Email for &'_ T {
|
||||
fn name(&self) -> Option<&str> {
|
||||
return (*self).name();
|
||||
}
|
||||
fn addr(&self) -> Option<&str> {
|
||||
return (*self).addr();
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! implement_email {
|
||||
( $($t:ty),+ ) => {$(
|
||||
impl Email for $t {
|
||||
fn name(&self) -> Option<&str> {
|
||||
self.name.as_deref()
|
||||
}
|
||||
fn addr(&self) -> Option<&str> {
|
||||
self.addr.as_deref()
|
||||
}
|
||||
}
|
||||
)+};
|
||||
}
|
||||
|
||||
implement_email!(
|
||||
ShowThreadQueryThreadMessagesTo,
|
||||
ShowThreadQueryThreadMessagesCc,
|
||||
ShowThreadQueryThreadMessagesFrom
|
||||
);
|
||||
|
||||
fn view_address(email: impl Email) -> Node<Msg> {
|
||||
span![
|
||||
C!["tag", "is-black"],
|
||||
email.addr().as_ref().map(|a| attrs! {At::Title=>a}),
|
||||
email
|
||||
.name()
|
||||
.as_ref()
|
||||
.unwrap_or(&email.addr().unwrap_or("(UNKNOWN)"))
|
||||
]
|
||||
}
|
||||
|
||||
fn view_addresses(addrs: &[impl Email]) -> Vec<Node<Msg>> {
|
||||
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||
set_title(&thread.subject);
|
||||
let messages = thread.messages.iter().map(|msg| {
|
||||
div![
|
||||
C!["message"],
|
||||
/* TODO(wathiede): collect all the tags and show them here. */
|
||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||
msg.from
|
||||
.as_ref()
|
||||
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
||||
msg.timestamp
|
||||
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
||||
div![
|
||||
C!["header"],
|
||||
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
||||
IF!(!msg.cc.is_empty() => span!["CC: ", view_addresses(&msg.cc)])
|
||||
],
|
||||
div![
|
||||
C!["body"],
|
||||
match &msg.body {
|
||||
ShowThreadQueryThreadMessagesBody::UnhandledContentType(
|
||||
ShowThreadQueryThreadMessagesBodyOnUnhandledContentType { contents },
|
||||
) => pre![C!["error"], contents],
|
||||
ShowThreadQueryThreadMessagesBody::PlainText(
|
||||
ShowThreadQueryThreadMessagesBodyOnPlainText {
|
||||
contents,
|
||||
content_tree,
|
||||
},
|
||||
) => div![C!["view-part-text-plain"], contents, pre![content_tree]],
|
||||
ShowThreadQueryThreadMessagesBody::Html(
|
||||
ShowThreadQueryThreadMessagesBodyOnHtml {
|
||||
contents,
|
||||
content_tree,
|
||||
},
|
||||
) => div![
|
||||
C!["view-part-text-html"],
|
||||
raw![contents],
|
||||
pre![content_tree]
|
||||
],
|
||||
}
|
||||
],
|
||||
]
|
||||
});
|
||||
div![
|
||||
C!["thread"],
|
||||
p![C!["is-size-4"], &thread.subject],
|
||||
messages,
|
||||
/* TODO(wathiede): plumb in orignal id
|
||||
a![
|
||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||
"Original"
|
||||
],
|
||||
*/
|
||||
]
|
||||
}
|
||||
|
||||
fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
assert_eq!(thread_set.0.len(), 1);
|
||||
let thread = &thread_set.0[0];
|
||||
assert_eq!(thread.0.len(), 1);
|
||||
let thread_node = &thread.0[0];
|
||||
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
|
||||
set_title(&subject);
|
||||
div![
|
||||
C!["container"],
|
||||
h1![C!["title"], subject],
|
||||
view_message(&thread_node),
|
||||
a![
|
||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||
"Original"
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
||||
let is_loading = refresh_request == &RefreshingState::Loading;
|
||||
let is_error = if let RefreshingState::Error(err) = refresh_request {
|
||||
error!("Failed to refresh: {err:?}");
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
nav![
|
||||
C!["navbar"],
|
||||
attrs! {At::Role=>"navigation"},
|
||||
div![
|
||||
C!["navbar-start"],
|
||||
a![
|
||||
C!["navbar-item", "button", IF![is_error => "is-danger"]],
|
||||
span![i![C![
|
||||
"fa-solid",
|
||||
"fa-arrow-rotate-right",
|
||||
"refresh",
|
||||
IF![is_loading => "loading"],
|
||||
]]],
|
||||
ev(Ev::Click, |_| Msg::RefreshStart),
|
||||
],
|
||||
a![
|
||||
C!["navbar-item", "button"],
|
||||
attrs! {
|
||||
At::Href => urls::search("is:unread", 0)
|
||||
},
|
||||
"Unread",
|
||||
],
|
||||
a![
|
||||
C!["navbar-item", "button"],
|
||||
attrs! {
|
||||
At::Href => urls::search("", 0)
|
||||
},
|
||||
"All",
|
||||
],
|
||||
input![
|
||||
C!["navbar-item", "input"],
|
||||
attrs! {
|
||||
At::Placeholder => "Search";
|
||||
At::AutoFocus => true.as_at_value();
|
||||
At::Value => query,
|
||||
},
|
||||
input_ev(Ev::Input, |q| if USE_GRAPHQL {
|
||||
Msg::UpdateQuery(q)
|
||||
} else {
|
||||
Msg::SearchRequest {
|
||||
query: Url::encode_uri_component(if q.is_empty() {
|
||||
"*".to_string()
|
||||
} else {
|
||||
q
|
||||
}),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}),
|
||||
// Send search on enter.
|
||||
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
||||
if USE_GRAPHQL {
|
||||
Msg::SearchQuery(query)
|
||||
} else {
|
||||
Msg::SearchRequest {
|
||||
query: Url::encode_uri_component(query),
|
||||
page: 0,
|
||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Msg::Noop
|
||||
}),
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
||||
footer![
|
||||
C!["footer"],
|
||||
div![
|
||||
C!["content", "has-text-right", "is-size-7"],
|
||||
format!("Render time {} ms", render_time_ms)
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
#[topo::nested]
|
||||
fn view_desktop(model: &Model) -> Node<Msg> {
|
||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||
Context::ThreadResult(thread) => view_thread(thread),
|
||||
Context::Search(search_results) => view_search_results_legacy(&model.query, search_results),
|
||||
Context::SearchResult {
|
||||
query,
|
||||
results,
|
||||
count,
|
||||
pager,
|
||||
} => view_search_results(&query, results.as_slice(), *count, pager),
|
||||
};
|
||||
fn view_tag_li(display_name: &str, indent: usize, t: &Tag) -> Node<Msg> {
|
||||
li![a![
|
||||
attrs! {
|
||||
At::Href => urls::search(&format!("tag:{}", t.name), 0)
|
||||
},
|
||||
(0..indent).map(|_| span![C!["tag-indent"], ""]),
|
||||
i![
|
||||
C!["tag-tag", "fa-solid", "fa-tag"],
|
||||
style! {
|
||||
//"--fa-primary-color" => t.fg_color,
|
||||
St::Color => t.bg_color,
|
||||
},
|
||||
],
|
||||
display_name,
|
||||
IF!(t.unread>0 => format!(" ({})", t.unread))
|
||||
]]
|
||||
}
|
||||
fn matches(a: &[&str], b: &[&str]) -> usize {
|
||||
std::iter::zip(a.iter(), b.iter())
|
||||
.take_while(|(a, b)| a == b)
|
||||
.count()
|
||||
}
|
||||
fn view_tag_list<'a>(tags: impl Iterator<Item = &'a Tag>) -> Vec<Node<Msg>> {
|
||||
let mut lis = Vec::new();
|
||||
let mut last = Vec::new();
|
||||
for t in tags {
|
||||
let parts: Vec<_> = t.name.split('/').collect();
|
||||
let mut n = matches(&last, &parts);
|
||||
if t.name.starts_with("ZZCrap/Free") {
|
||||
info!("n: {n}, parts: {parts:?} last: {last:?}");
|
||||
}
|
||||
if n <= parts.len() - 2 && parts.len() > 1 {
|
||||
// Synthesize fake tags for proper indenting.
|
||||
for i in n..parts.len() - 1 {
|
||||
let display_name = parts[n];
|
||||
lis.push(view_tag_li(
|
||||
&display_name,
|
||||
n,
|
||||
&Tag {
|
||||
name: parts[..i + 1].join("/"),
|
||||
bg_color: "#fff".to_string(),
|
||||
fg_color: "#000".to_string(),
|
||||
unread: 0,
|
||||
},
|
||||
));
|
||||
}
|
||||
last = parts[..parts.len() - 1].to_vec();
|
||||
n = parts.len() - 1;
|
||||
}
|
||||
let display_name = parts[n];
|
||||
lis.push(view_tag_li(&display_name, n, t));
|
||||
last = parts;
|
||||
}
|
||||
lis
|
||||
}
|
||||
let unread = model
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
|
||||
.unwrap_or(Vec::new());
|
||||
let tags_open = use_state(|| false);
|
||||
let force_tags_open = unread.is_empty();
|
||||
div![
|
||||
C!["desktop-main-content"],
|
||||
aside![
|
||||
C!["tags-menu", "menu"],
|
||||
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
||||
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter())]),
|
||||
p![
|
||||
C!["menu-label"],
|
||||
IF!(!force_tags_open =>
|
||||
i![C![
|
||||
"fa-solid",
|
||||
if tags_open.get() {
|
||||
"fa-angle-up"
|
||||
} else {
|
||||
"fa-angle-down"
|
||||
}
|
||||
]]),
|
||||
" Tags",
|
||||
ev(Ev::Click, move |_| {
|
||||
tags_open.set(!tags_open.get());
|
||||
})
|
||||
],
|
||||
ul![
|
||||
C!["menu-list"],
|
||||
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter()))),
|
||||
]
|
||||
],
|
||||
div![
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
content,
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
fn view_mobile(model: &Model) -> Node<Msg> {
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||
Context::ThreadResult(thread) => view_thread(thread),
|
||||
Context::Search(search_results) => {
|
||||
view_mobile_search_results_legacy(&model.query, search_results)
|
||||
}
|
||||
Context::SearchResult {
|
||||
query,
|
||||
results,
|
||||
count,
|
||||
pager,
|
||||
} => view_mobile_search_results(&query, results.as_slice(), *count, pager),
|
||||
};
|
||||
div![
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
content,
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
]
|
||||
}
|
||||
|
||||
// `view` describes what to display.
|
||||
pub fn view(model: &Model) -> Node<Msg> {
|
||||
info!("refreshing {:?}", model.refreshing_state);
|
||||
let is_mobile = seed::window()
|
||||
.match_media("(max-width: 768px)")
|
||||
.expect("failed media query")
|
||||
.map(|mql| mql.matches())
|
||||
.unwrap_or(false);
|
||||
|
||||
let start = Instant::now();
|
||||
info!("view called");
|
||||
div![
|
||||
if is_mobile {
|
||||
C!["mobile"]
|
||||
} else {
|
||||
C!["desktop"]
|
||||
},
|
||||
if is_mobile {
|
||||
view_mobile(model)
|
||||
} else {
|
||||
view_desktop(model)
|
||||
},
|
||||
view_footer(start.elapsed().as_millis())
|
||||
]
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user