676 lines
22 KiB
Rust
676 lines
22 KiB
Rust
use std::collections::HashSet;
|
|
|
|
use graphql_client::GraphQLQuery;
|
|
use log::{debug, error, info, warn};
|
|
use seed::{prelude::*, *};
|
|
use thiserror::Error;
|
|
use web_sys::HtmlElement;
|
|
|
|
use crate::{
|
|
api,
|
|
api::urls,
|
|
consts::SEARCH_RESULTS_PER_PAGE,
|
|
graphql,
|
|
graphql::{front_page_query::*, send_graphql, show_thread_query::*},
|
|
};
|
|
|
|
/// Used to fake the unread string while in development
|
|
pub fn unread_query() -> &'static str {
|
|
let host = seed::window()
|
|
.location()
|
|
.host()
|
|
.expect("failed to get host");
|
|
if host.starts_with("6758.") {
|
|
return "tag:letterbox";
|
|
}
|
|
"is:unread"
|
|
}
|
|
|
|
// `init` describes what should happen when your app started.
|
|
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|
let version = shared::build_version(bi);
|
|
info!("Build Info: {}", version);
|
|
if url.hash().is_none() {
|
|
orders.request_url(urls::search(unread_query(), 0));
|
|
} else {
|
|
orders.notify(subs::UrlRequested::new(url));
|
|
};
|
|
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
|
// TODO(wathiede): only do this while viewing the index? Or maybe add a new message that force
|
|
// 'notmuch new' on the server periodically?
|
|
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
|
orders.subscribe(on_url_changed);
|
|
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
|
|
|
|
build_info::build_info!(fn bi);
|
|
Model {
|
|
context: Context::None,
|
|
query: "".to_string(),
|
|
refreshing_state: RefreshingState::None,
|
|
tags: None,
|
|
read_completion_ratio: 0.,
|
|
content_el: ElRef::<HtmlElement>::default(),
|
|
versions: Version {
|
|
client: version,
|
|
server: None,
|
|
},
|
|
}
|
|
}
|
|
|
|
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
|
let mut url = uc.0;
|
|
info!(
|
|
"url changed '{}', history {}",
|
|
url,
|
|
history().length().unwrap_or(0)
|
|
);
|
|
let hpp = url.remaining_hash_path_parts();
|
|
match hpp.as_slice() {
|
|
["t", tid] => Msg::ShowThreadRequest {
|
|
thread_id: tid.to_string(),
|
|
},
|
|
["s", query] => {
|
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: None,
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
}
|
|
["s", query, page] => {
|
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
let page = page[1..].parse().unwrap_or(0);
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: Some(page.to_string()),
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
}
|
|
p => {
|
|
if !p.is_empty() {
|
|
info!("Unhandled path '{p:?}'");
|
|
}
|
|
Msg::FrontPageRequest {
|
|
query: "".to_string(),
|
|
after: None,
|
|
before: None,
|
|
first: None,
|
|
last: None,
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// `update` describes how to handle each `Msg`.
|
|
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|
match msg {
|
|
Msg::Noop => {}
|
|
Msg::RefreshStart => {
|
|
model.refreshing_state = RefreshingState::Loading;
|
|
orders.perform_cmd(async move { Msg::RefreshDone(api::refresh_request().await.err()) });
|
|
}
|
|
Msg::RefreshDone(err) => {
|
|
model.refreshing_state = if let Some(err) = err {
|
|
RefreshingState::Error(format!("{:?}", err))
|
|
} else {
|
|
RefreshingState::None
|
|
};
|
|
orders.perform_cmd(async move { Msg::Refresh });
|
|
}
|
|
Msg::Refresh => {
|
|
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
|
}
|
|
Msg::Reload => {
|
|
window()
|
|
.location()
|
|
.reload()
|
|
.expect("failed to reload window");
|
|
}
|
|
Msg::OnResize => (),
|
|
|
|
Msg::NextPage => {
|
|
match &model.context {
|
|
Context::SearchResult { query, pager, .. } => {
|
|
let query = query.to_string();
|
|
let after = pager.end_cursor.clone();
|
|
orders.perform_cmd(async move {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after,
|
|
before: None,
|
|
first: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
|
last: None,
|
|
}
|
|
});
|
|
}
|
|
Context::ThreadResult { .. } => (), // do nothing (yet?)
|
|
Context::None => (), // do nothing (yet?)
|
|
};
|
|
}
|
|
Msg::PreviousPage => {
|
|
match &model.context {
|
|
Context::SearchResult { query, pager, .. } => {
|
|
let query = query.to_string();
|
|
let before = pager.start_cursor.clone();
|
|
orders.perform_cmd(async move {
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after: None,
|
|
before,
|
|
first: None,
|
|
last: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
|
}
|
|
});
|
|
}
|
|
|
|
Context::ThreadResult { .. } => (), // do nothing (yet?)
|
|
Context::None => (), // do nothing (yet?)
|
|
};
|
|
}
|
|
|
|
Msg::UpdateQuery(query) => model.query = query,
|
|
Msg::SearchQuery(query) => {
|
|
orders.request_url(urls::search(&query, 0));
|
|
}
|
|
|
|
Msg::SetUnread(query, unread) => {
|
|
let search_url = urls::search(&model.query, 0).to_string();
|
|
orders.skip().perform_cmd(async move {
|
|
let res: Result<
|
|
graphql_client::Response<graphql::mark_read_mutation::ResponseData>,
|
|
gloo_net::Error,
|
|
> = send_graphql(graphql::MarkReadMutation::build_query(
|
|
graphql::mark_read_mutation::Variables {
|
|
query: query.clone(),
|
|
unread,
|
|
},
|
|
))
|
|
.await;
|
|
if let Err(e) = res {
|
|
error!("Failed to set read for {query} to {unread}: {e}");
|
|
}
|
|
seed::window()
|
|
.location()
|
|
.set_href(&search_url)
|
|
.expect("failed to change location");
|
|
Msg::Noop
|
|
});
|
|
}
|
|
Msg::AddTag(query, tag) => {
|
|
let search_url = urls::search(&model.query, 0).to_string();
|
|
orders.skip().perform_cmd(async move {
|
|
let res: Result<
|
|
graphql_client::Response<graphql::add_tag_mutation::ResponseData>,
|
|
gloo_net::Error,
|
|
> = send_graphql(graphql::AddTagMutation::build_query(
|
|
graphql::add_tag_mutation::Variables {
|
|
query: query.clone(),
|
|
tag: tag.clone(),
|
|
},
|
|
))
|
|
.await;
|
|
if let Err(e) = res {
|
|
error!("Failed to add tag {tag} to {query}: {e}");
|
|
}
|
|
seed::window()
|
|
.location()
|
|
.set_href(&search_url)
|
|
.expect("failed to change location");
|
|
Msg::Noop
|
|
});
|
|
}
|
|
Msg::RemoveTag(query, tag) => {
|
|
let search_url = urls::search(&model.query, 0).to_string();
|
|
orders.skip().perform_cmd(async move {
|
|
let res: Result<
|
|
graphql_client::Response<graphql::remove_tag_mutation::ResponseData>,
|
|
gloo_net::Error,
|
|
> = send_graphql(graphql::RemoveTagMutation::build_query(
|
|
graphql::remove_tag_mutation::Variables {
|
|
query: query.clone(),
|
|
tag: tag.clone(),
|
|
},
|
|
))
|
|
.await;
|
|
if let Err(e) = res {
|
|
error!("Failed to remove tag {tag} to {query}: {e}");
|
|
}
|
|
// TODO: reconsider this behavior
|
|
seed::window()
|
|
.location()
|
|
.set_href(&search_url)
|
|
.expect("failed to change location");
|
|
Msg::Noop
|
|
});
|
|
}
|
|
|
|
Msg::FrontPageRequest {
|
|
query,
|
|
after,
|
|
before,
|
|
first,
|
|
last,
|
|
} => {
|
|
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
|
|
{
|
|
// If no pagination set, set reasonable defaults
|
|
(None, None, None, None) => {
|
|
(None, None, Some(SEARCH_RESULTS_PER_PAGE as i64), None)
|
|
}
|
|
_ => (after, before, first, last),
|
|
};
|
|
model.query = query.clone();
|
|
orders.skip().perform_cmd(async move {
|
|
Msg::FrontPageResult(
|
|
send_graphql(graphql::FrontPageQuery::build_query(
|
|
graphql::front_page_query::Variables {
|
|
query,
|
|
after,
|
|
before,
|
|
first,
|
|
last,
|
|
},
|
|
))
|
|
.await,
|
|
)
|
|
});
|
|
}
|
|
Msg::FrontPageResult(Err(e)) => error!("error FrontPageResult: {e:?}"),
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: None,
|
|
errors: None,
|
|
..
|
|
})) => {
|
|
error!("FrontPageResult no data or errors, should not happen");
|
|
}
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: None,
|
|
errors: Some(e),
|
|
..
|
|
})) => {
|
|
error!("FrontPageResult error: {e:?}");
|
|
}
|
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
|
data: Some(data), ..
|
|
})) => {
|
|
model.tags = Some(
|
|
data.tags
|
|
.into_iter()
|
|
.map(|t| Tag {
|
|
name: t.name,
|
|
bg_color: t.bg_color,
|
|
unread: t.unread,
|
|
})
|
|
.collect(),
|
|
);
|
|
info!("pager {:#?}", data.search.page_info);
|
|
let selected_threads = 'context: {
|
|
if let Context::SearchResult {
|
|
results,
|
|
selected_threads,
|
|
..
|
|
} = &model.context
|
|
{
|
|
let old: HashSet<_> = results.iter().map(|n| &n.thread).collect();
|
|
let new: HashSet<_> = data.search.nodes.iter().map(|n| &n.thread).collect();
|
|
|
|
if old == new {
|
|
break 'context selected_threads.clone();
|
|
}
|
|
}
|
|
HashSet::new()
|
|
};
|
|
model.context = Context::SearchResult {
|
|
query: model.query.clone(),
|
|
results: data.search.nodes,
|
|
count: data.count as usize,
|
|
pager: data.search.page_info,
|
|
selected_threads,
|
|
};
|
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
|
// Generate signal so progress bar is reset
|
|
orders.send_msg(Msg::WindowScrolled);
|
|
}
|
|
|
|
Msg::ShowThreadRequest { thread_id } => {
|
|
orders.skip().perform_cmd(async move {
|
|
Msg::ShowThreadResult(
|
|
send_graphql(graphql::ShowThreadQuery::build_query(
|
|
graphql::show_thread_query::Variables { thread_id },
|
|
))
|
|
.await,
|
|
)
|
|
});
|
|
}
|
|
Msg::ShowThreadResult(Ok(graphql_client::Response {
|
|
data: Some(data), ..
|
|
})) => {
|
|
model.tags = Some(
|
|
data.tags
|
|
.into_iter()
|
|
.map(|t| Tag {
|
|
name: t.name,
|
|
bg_color: t.bg_color,
|
|
unread: t.unread,
|
|
})
|
|
.collect(),
|
|
);
|
|
match &data.thread {
|
|
graphql::show_thread_query::ShowThreadQueryThread::EmailThread(
|
|
ShowThreadQueryThreadOnEmailThread { messages, .. },
|
|
) => {
|
|
let mut open_messages: HashSet<_> = messages
|
|
.iter()
|
|
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
|
.map(|msg| msg.id.clone())
|
|
.collect();
|
|
if open_messages.is_empty() {
|
|
open_messages = messages.iter().map(|msg| msg.id.clone()).collect();
|
|
}
|
|
model.context = Context::ThreadResult {
|
|
thread: data.thread,
|
|
open_messages,
|
|
};
|
|
}
|
|
graphql::show_thread_query::ShowThreadQueryThread::NewsPost(..) => {
|
|
model.context = Context::ThreadResult {
|
|
thread: data.thread,
|
|
open_messages: HashSet::new(),
|
|
};
|
|
}
|
|
}
|
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
|
// Generate signal so progress bar is reset
|
|
orders.send_msg(Msg::WindowScrolled);
|
|
}
|
|
Msg::ShowThreadResult(bad) => {
|
|
error!("show_thread_query error: {bad:#?}");
|
|
}
|
|
Msg::SelectionSetNone => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
*selected_threads = HashSet::new();
|
|
}
|
|
}
|
|
Msg::SelectionSetAll => {
|
|
if let Context::SearchResult {
|
|
results,
|
|
selected_threads,
|
|
..
|
|
} = &mut model.context
|
|
{
|
|
*selected_threads = results.iter().map(|node| node.thread.clone()).collect();
|
|
}
|
|
}
|
|
Msg::SelectionAddTag(tag) => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
let threads = selected_threads
|
|
.iter()
|
|
.map(|tid| tid.to_string())
|
|
.collect::<Vec<_>>()
|
|
.join(" ");
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::AddTag(threads, tag) });
|
|
}
|
|
}
|
|
Msg::SelectionRemoveTag(tag) => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
let threads = selected_threads
|
|
.iter()
|
|
.map(|tid| tid.to_string())
|
|
.collect::<Vec<_>>()
|
|
.join(" ");
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::RemoveTag(threads, tag) });
|
|
}
|
|
}
|
|
Msg::SelectionMarkAsRead => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
let threads = selected_threads
|
|
.iter()
|
|
.map(|tid| tid.to_string())
|
|
.collect::<Vec<_>>()
|
|
.join(" ");
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::SetUnread(threads, false) });
|
|
}
|
|
}
|
|
Msg::SelectionMarkAsUnread => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
let threads = selected_threads
|
|
.iter()
|
|
.map(|tid| tid.to_string())
|
|
.collect::<Vec<_>>()
|
|
.join(" ");
|
|
orders
|
|
.skip()
|
|
.perform_cmd(async move { Msg::SetUnread(threads, true) });
|
|
}
|
|
}
|
|
Msg::SelectionAddThread(tid) => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
selected_threads.insert(tid);
|
|
}
|
|
}
|
|
Msg::SelectionRemoveThread(tid) => {
|
|
if let Context::SearchResult {
|
|
selected_threads, ..
|
|
} = &mut model.context
|
|
{
|
|
selected_threads.remove(&tid);
|
|
}
|
|
}
|
|
Msg::MessageCollapse(id) => {
|
|
if let Context::ThreadResult { open_messages, .. } = &mut model.context {
|
|
open_messages.remove(&id);
|
|
}
|
|
}
|
|
Msg::MessageExpand(id) => {
|
|
if let Context::ThreadResult { open_messages, .. } = &mut model.context {
|
|
open_messages.insert(id);
|
|
}
|
|
}
|
|
Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)),
|
|
Msg::CopyToClipboard(text) => {
|
|
let clipboard = seed::window()
|
|
.navigator()
|
|
.clipboard()
|
|
.expect("couldn't get clipboard");
|
|
orders.perform_cmd(async move {
|
|
wasm_bindgen_futures::JsFuture::from(clipboard.write_text(&text))
|
|
.await
|
|
.expect("failed to copy to clipboard");
|
|
});
|
|
}
|
|
Msg::WindowScrolled => {
|
|
if let Some(el) = model.content_el.get() {
|
|
let ih = window()
|
|
.inner_height()
|
|
.expect("window height")
|
|
.unchecked_into::<js_sys::Number>()
|
|
.value_of();
|
|
|
|
let r = el.get_bounding_client_rect();
|
|
if r.height() < ih {
|
|
// The whole content fits in the window, no scrollbar
|
|
orders.send_msg(Msg::SetProgress(0.));
|
|
return;
|
|
}
|
|
let end: f64 = r.height() - ih;
|
|
if end < 0. {
|
|
orders.send_msg(Msg::SetProgress(0.));
|
|
return;
|
|
}
|
|
// Flip Y, normally it's 0-point when the top of the content hits the top of the
|
|
// screen and goes negative from there.
|
|
let y = -r.y();
|
|
let ratio: f64 = (y / end).max(0.);
|
|
debug!(
|
|
"WindowScrolled ih {ih} end {end} ratio {ratio:.02} {}x{} @ {},{}",
|
|
r.width(),
|
|
r.height(),
|
|
r.x(),
|
|
r.y()
|
|
);
|
|
|
|
orders.send_msg(Msg::SetProgress(ratio));
|
|
} else {
|
|
orders.send_msg(Msg::SetProgress(0.));
|
|
}
|
|
}
|
|
Msg::SetProgress(ratio) => {
|
|
model.read_completion_ratio = ratio;
|
|
}
|
|
Msg::UpdateServerVersion(version) => {
|
|
if version != model.versions.client {
|
|
warn!(
|
|
"Server ({}) and client ({}) version mismatch, reloading",
|
|
version, model.versions.client
|
|
);
|
|
orders.send_msg(Msg::Reload);
|
|
}
|
|
model.versions.server = Some(version);
|
|
}
|
|
}
|
|
}
|
|
// `Model` describes our app state.
|
|
pub struct Model {
|
|
pub query: String,
|
|
pub context: Context,
|
|
pub refreshing_state: RefreshingState,
|
|
pub tags: Option<Vec<Tag>>,
|
|
pub read_completion_ratio: f64,
|
|
pub content_el: ElRef<HtmlElement>,
|
|
pub versions: Version,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct Version {
|
|
pub client: String,
|
|
pub server: Option<String>,
|
|
}
|
|
|
|
#[derive(Error, Debug)]
|
|
#[allow(dead_code)] // Remove once the UI is showing errors
|
|
pub enum UIError {
|
|
#[error("No error, this should never be presented to user")]
|
|
NoError,
|
|
#[error("failed to fetch {0}: {1:?}")]
|
|
FetchError(&'static str, gloo_net::Error),
|
|
#[error("{0} error decoding: {1:?}")]
|
|
FetchDecodeError(&'static str, Vec<graphql_client::Error>),
|
|
#[error("no data or errors for {0}")]
|
|
NoData(&'static str),
|
|
}
|
|
|
|
pub enum Context {
|
|
None,
|
|
SearchResult {
|
|
query: String,
|
|
results: Vec<FrontPageQuerySearchNodes>,
|
|
count: usize,
|
|
pager: FrontPageQuerySearchPageInfo,
|
|
selected_threads: HashSet<String>,
|
|
},
|
|
ThreadResult {
|
|
thread: ShowThreadQueryThread,
|
|
open_messages: HashSet<String>,
|
|
},
|
|
}
|
|
|
|
pub struct Tag {
|
|
pub name: String,
|
|
pub bg_color: String,
|
|
pub unread: i64,
|
|
}
|
|
|
|
#[derive(Debug, PartialEq)]
|
|
pub enum RefreshingState {
|
|
None,
|
|
Loading,
|
|
Error(String),
|
|
}
|
|
// `Msg` describes the different events you can modify state with.
|
|
pub enum Msg {
|
|
Noop,
|
|
// Tell the client to refresh its state
|
|
Refresh,
|
|
// Tell the client to reload whole page from server
|
|
Reload,
|
|
// Window has changed size
|
|
OnResize,
|
|
// Tell the server to update state
|
|
RefreshStart,
|
|
RefreshDone(Option<gloo_net::Error>),
|
|
NextPage,
|
|
PreviousPage,
|
|
UpdateQuery(String),
|
|
SearchQuery(String),
|
|
|
|
SetUnread(String, bool),
|
|
AddTag(String, String),
|
|
RemoveTag(String, String),
|
|
|
|
FrontPageRequest {
|
|
query: String,
|
|
after: Option<String>,
|
|
before: Option<String>,
|
|
first: Option<i64>,
|
|
last: Option<i64>,
|
|
},
|
|
FrontPageResult(
|
|
Result<graphql_client::Response<graphql::front_page_query::ResponseData>, gloo_net::Error>,
|
|
),
|
|
ShowThreadRequest {
|
|
thread_id: String,
|
|
},
|
|
ShowThreadResult(
|
|
Result<graphql_client::Response<graphql::show_thread_query::ResponseData>, gloo_net::Error>,
|
|
),
|
|
|
|
SelectionSetNone,
|
|
SelectionSetAll,
|
|
SelectionAddTag(String),
|
|
#[allow(dead_code)]
|
|
// TODO
|
|
SelectionRemoveTag(String),
|
|
SelectionMarkAsRead,
|
|
SelectionMarkAsUnread,
|
|
SelectionAddThread(String),
|
|
SelectionRemoveThread(String),
|
|
|
|
MessageCollapse(String),
|
|
MessageExpand(String),
|
|
MultiMsg(Vec<Msg>),
|
|
|
|
CopyToClipboard(String),
|
|
|
|
WindowScrolled,
|
|
SetProgress(f64),
|
|
UpdateServerVersion(String),
|
|
}
|