Compare commits

...

18 Commits

Author SHA1 Message Date
56bc1cf7ed server: escape RSS feeds that are HTML escaped 2024-08-03 11:29:20 -07:00
e0863ac085 web: more robust avatar intial filtering 2024-07-29 17:29:15 -07:00
d5fa89b38c web: show tag list in all modalities. WIP 2024-07-29 08:48:44 -07:00
605af13a37 web: monospace font for plain text emails 2024-07-29 08:32:28 -07:00
3838cbd6e2 cargo fix 2024-07-24 11:08:47 -07:00
c76df0ef90 web: update copy icon in more places 2024-07-24 11:06:38 -07:00
cd77d302df web: small icon tweak for copying email addresses 2024-07-24 11:03:32 -07:00
71348d562d version bump 2024-07-24 11:03:26 -07:00
b6ae46db93 Move cargo config up a directory 2024-07-22 16:56:13 -07:00
6cb84054ed Only build server by default 2024-07-22 16:48:47 -07:00
7b511c1673 Fix cleanhtml build 2024-07-22 16:41:14 -07:00
bfd5e12bea Make URL joining more robust 2024-07-22 16:39:59 -07:00
ad8fb77857 Add copy to clipboard links to from/to/cc addresses 2024-07-22 16:04:25 -07:00
831466ddda Add mark read/unread support for news 2024-07-22 14:43:05 -07:00
4ee34444ae Move thread: and id: prefixing to server side.
This paves way for better news: support
2024-07-22 14:26:48 -07:00
879ddb112e Remove some logging and fix a comment 2024-07-22 14:26:24 -07:00
331fb4f11b Fix build 2024-07-22 12:19:45 -07:00
4e5275ca0e cargo sqlx prepare 2024-07-22 12:19:38 -07:00
23 changed files with 554 additions and 208 deletions

4
.cargo/config.toml Normal file
View File

@@ -0,0 +1,4 @@
[build]
rustflags = [ "--cfg=web_sys_unstable_apis" ]

22
Cargo.lock generated
View File

@@ -1427,6 +1427,15 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "html-escape"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476"
dependencies = [
"utf8-width",
]
[[package]]
name = "html5ever"
version = "0.26.0"
@@ -3168,6 +3177,7 @@ dependencies = [
"async-graphql-rocket",
"css-inline",
"glog",
"html-escape",
"linkify",
"log",
"lol_html",
@@ -4140,10 +4150,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "uuid"
version = "1.9.1"
name = "utf8-width"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3"
[[package]]
name = "uuid"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
dependencies = [
"getrandom 0.2.15",
]

View File

@@ -1,5 +1,8 @@
[workspace]
resolver = "2"
default-members = [
"server"
]
members = [
"web",
"server",

View File

@@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n title,\n name,\n homepage\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n uid = $1\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "date",
"type_info": "Timestamp"
},
{
"ordinal": 1,
"name": "is_read",
"type_info": "Bool"
},
{
"ordinal": 2,
"name": "link",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "summary",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "homepage",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true,
true,
true,
true,
true,
true,
true,
true
]
},
"hash": "113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n site,\n name,\n count (\n NOT is_read\n OR NULL\n ) unread\nFROM\n post AS p\n JOIN feed AS f ON p.site = f.slug --\n -- TODO: figure this out to make the query faster when only looking for unread\n --WHERE\n -- (\n -- NOT $1\n -- OR NOT is_read\n -- )\nGROUP BY\n 1,\n 2\nORDER BY\n site\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "unread",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
true,
true,
null
]
},
"hash": "2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n post\nSET\n is_read = $1\nWHERE\n uid = $2\n",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9"
}

View File

@@ -0,0 +1,49 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n site = $1\n AND (\n NOT $2\n OR NOT is_read\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "date",
"type_info": "Timestamp"
},
{
"ordinal": 1,
"name": "is_read",
"type_info": "Bool"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "uid",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "name",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text",
"Bool",
"Int8",
"Int8"
]
},
"nullable": [
true,
true,
true,
false,
true
]
},
"hash": "d9326384e689f361b24c2cadde57c5a06049c5055e2782f385275dea4540b20b"
}

View File

@@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n COUNT(*) count\nFROM\n post\nWHERE\n site = $1\n AND (\n NOT $2\n OR NOT is_read\n )\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text",
"Bool"
]
},
"nullable": [
null
]
},
"hash": "f99699f8916bda34faaccf72fdd92b6e36e01600700ee4132e1de974b3aa79dc"
}

View File

@@ -30,4 +30,5 @@ maplit = "1.0.2"
linkify = "0.10.0"
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
url = "2.5.2"
html-escape = "0.2.13"

View File

@@ -0,0 +1,6 @@
UPDATE
post
SET
is_read = $1
WHERE
uid = $2

View File

@@ -9,7 +9,7 @@ fn main() -> anyhow::Result<()> {
println!("Sanitizing {src} into {dst}");
let bytes = fs::read(src)?;
let html = String::from_utf8_lossy(&bytes);
let html = sanitize_html(&html, "")?;
let html = sanitize_html(&html, "", &None)?;
fs::write(dst, html)?;
Ok(())

View File

@@ -3,7 +3,7 @@ use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
use mailparse::MailParseError;
use thiserror::Error;
use crate::SanitizeError;
use crate::TransformError;
#[derive(Error, Debug)]
pub enum ServerError {
@@ -19,8 +19,8 @@ pub enum ServerError {
PartNotFound,
#[error("sqlx error: {0}")]
SQLXError(#[from] sqlx::Error),
#[error("html sanitize error: {0}")]
SanitizeError(#[from] SanitizeError),
#[error("html transform error: {0}")]
TransformError(#[from] TransformError),
#[error("UTF8 error: {0}")]
Utf8Error(#[from] Utf8Error),
#[error("FromUTF8 error: {0}")]

View File

@@ -1,7 +1,6 @@
use async_graphql::{
connection::{Connection},
Context, EmptySubscription, Enum, Error, FieldResult, Object, Schema, SimpleObject, Union,
connection::Connection, Context, EmptySubscription, Enum, Error, FieldResult, Object, Schema,
SimpleObject, Union,
};
use log::info;
use notmuch::Notmuch;
@@ -273,11 +272,14 @@ impl Mutation {
unread: bool,
) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
info!("set_read_status({unread})");
if unread {
nm.tag_add("unread", &format!("{query}"))?;
} else {
nm.tag_remove("unread", &format!("{query}"))?;
let pool = ctx.data_unchecked::<PgPool>();
for q in query.split_whitespace() {
if newsreader::is_newsreader_thread(&q) {
newsreader::set_read_status(pool, &q, unread).await?;
} else {
nm::set_read_status(nm, q, unread).await?;
}
}
Ok(true)
}

View File

@@ -5,20 +5,55 @@ pub mod nm;
use css_inline::{CSSInliner, InlineError, InlineOptions};
use linkify::{LinkFinder, LinkKind};
use log::error;
use log::{error, info};
use lol_html::{element, errors::RewritingError, rewrite_str, RewriteStrSettings};
use maplit::{hashmap, hashset};
use thiserror::Error;
use url::Url;
// TODO: figure out how to use Cow
trait Transformer {
fn should_run(&self, input: &str) -> bool;
// TODO: should input be something like `html_escape` uses:
// <S: ?Sized + AsRef<str>>(text: &S) -> Cow<str>
fn transform(&self, input: &str) -> Result<String, TransformError>;
}
// TODO: how would we make this more generic to allow good implementations of Transformer outside
// of this module?
#[derive(Error, Debug)]
pub enum SanitizeError {
pub enum TransformError {
#[error("lol-html rewrite error")]
RewritingError(#[from] RewritingError),
#[error("css inline error")]
InlineError(#[from] InlineError),
}
struct SanitizeHtml<'a> {
cid_prefix: &'a str,
base_url: &'a Option<Url>,
}
impl<'a> Transformer for SanitizeHtml<'a> {
fn should_run(&self, _input: &str) -> bool {
true
}
fn transform(&self, input: &str) -> Result<String, TransformError> {
Ok(sanitize_html(input, self.cid_prefix, self.base_url)?)
}
}
struct EscapeHtml;
impl Transformer for EscapeHtml {
fn should_run(&self, input: &str) -> bool {
input.starts_with("&lt")
}
fn transform(&self, input: &str) -> Result<String, TransformError> {
Ok(html_escape::decode_html_entities(input).to_string())
}
}
pub fn linkify_html(text: &str) -> String {
let mut finder = LinkFinder::new();
let finder = finder.url_must_have_scheme(false).kinds(&[LinkKind::Url]);
@@ -50,31 +85,15 @@ pub fn linkify_html(text: &str) -> String {
pub fn sanitize_html(
html: &str,
cid_prefix: &str,
base_url: &Url,
) -> Result<String, SanitizeError> {
let element_content_handlers = vec![
base_url: &Option<Url>,
) -> Result<String, TransformError> {
let mut element_content_handlers = vec![
// Open links in new tab
element!("a[href]", |el| {
el.set_attribute("target", "_blank").unwrap();
Ok(())
}),
// Make links with relative URLs absolute
element!("a[href]", |el| {
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
el.set_attribute("href", &href.as_str()).unwrap();
}
Ok(())
}),
// Make images with relative srcs absolute
element!("img[src]", |el| {
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
el.set_attribute("src", &src.as_str()).unwrap();
}
Ok(())
}),
// Replace mixed part CID images with URL
element!("img[src]", |el| {
let src = el
@@ -98,6 +117,26 @@ pub fn sanitize_html(
Ok(())
}),
];
if let Some(base_url) = base_url {
element_content_handlers.extend(vec![
// Make links with relative URLs absolute
element!("a[href]", |el| {
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
el.set_attribute("href", &href.as_str()).unwrap();
}
Ok(())
}),
// Make images with relative srcs absolute
element!("img[src]", |el| {
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
el.set_attribute("src", &src.as_str()).unwrap();
}
Ok(())
}),
]);
}
let inline_opts = InlineOptions {
inline_style_tags: true,

View File

@@ -5,7 +5,6 @@ use std::{
};
use async_graphql::connection::{self, Connection, Edge};
use log::info;
use sqlx::postgres::PgPool;
use url::Url;
@@ -15,7 +14,7 @@ const THREAD_PREFIX: &'static str = "news:";
use crate::{
error::ServerError,
graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary},
sanitize_html,
EscapeHtml, SanitizeHtml, Transformer,
};
pub fn is_newsreader_search(query: &str) -> bool {
@@ -44,7 +43,6 @@ pub async fn search(
query: String,
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
let query: Query = query.parse()?;
info!("news search query {query:?}");
let site = query.site.expect("search has no site");
connection::query(
after,
@@ -52,7 +50,6 @@ pub async fn search(
first,
last,
|after: Option<usize>, before: Option<usize>, first, last| async move {
info!("search page info {after:#?}, {before:#?}, {first:#?}, {last:#?}");
let default_page_size = 100;
let (offset, limit) = match (after, before, first, last) {
// Reasonable defaults
@@ -86,7 +83,6 @@ pub async fn search(
// The +1 is to see if there are more pages of data available.
let limit = limit + 1;
info!("search page offset {offset} limit {limit}");
let rows = sqlx::query_file!(
"sql/threads.sql",
site,
@@ -188,18 +184,18 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
})
.unwrap_or(default_homepage.to_string()),
)?;
let link = Url::parse(
&r.link
.as_ref()
.map(|h| {
if h.is_empty() {
default_homepage.to_string()
} else {
h.to_string()
}
})
.unwrap_or(default_homepage.to_string()),
)?;
let link = &r
.link
.as_ref()
.map(|h| {
if h.is_empty() {
default_homepage.to_string()
} else {
h.to_string()
}
})
.map(|h| Url::parse(&h).ok())
.flatten();
let addr = r.link.as_ref().map(|link| {
if link.contains('@') {
link.clone()
@@ -211,13 +207,24 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
}
}
});
let html = r.summary.unwrap_or("NO SUMMARY".to_string());
let mut html = r.summary.unwrap_or("NO SUMMARY".to_string());
// TODO: add site specific cleanups. For example:
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolet
let html = sanitize_html(&html, "", &link)?;
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolent
let tranformers: Vec<Box<dyn Transformer>> = vec![
Box::new(EscapeHtml),
Box::new(SanitizeHtml {
cid_prefix: "",
base_url: &link,
}),
];
for t in tranformers.iter() {
if t.should_run(&html) {
html = t.transform(&html)?;
}
}
let body = Body::Html(Html {
html,
html: html.to_string(),
content_tree: "".to_string(),
});
let title = r.title.unwrap_or("NO TITLE".to_string());
@@ -253,6 +260,7 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
struct Query {
unread_only: bool,
site: Option<String>,
uid: Option<String>,
remainder: Vec<String>,
}
@@ -261,6 +269,7 @@ impl FromStr for Query {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut unread_only = false;
let mut site = None;
let mut uid = None;
let mut remainder = Vec::new();
let site_prefix = format!("tag:{TAG_PREFIX}");
for word in s.split_whitespace() {
@@ -268,6 +277,8 @@ impl FromStr for Query {
unread_only = true
} else if word.starts_with(&site_prefix) {
site = Some(word[site_prefix.len()..].to_string())
} else if word.starts_with(THREAD_PREFIX) {
uid = Some(word[THREAD_PREFIX.len()..].to_string())
} else {
remainder.push(word.to_string());
}
@@ -275,7 +286,20 @@ impl FromStr for Query {
Ok(Query {
unread_only,
site,
uid,
remainder,
})
}
}
pub async fn set_read_status<'ctx>(
pool: &PgPool,
query: &str,
unread: bool,
) -> Result<bool, ServerError> {
let query: Query = query.parse()?;
sqlx::query_file!("sql/set_unread.sql", !unread, query.uid)
.execute(pool)
.await?;
Ok(true)
}

View File

@@ -10,7 +10,6 @@ use log::{error, info, warn};
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
use memmap::MmapOptions;
use notmuch::Notmuch;
use url::Url;
use crate::{
error::ServerError,
@@ -80,7 +79,7 @@ pub async fn search(
.0
.into_iter()
.map(|ts| ThreadSummary {
thread: ts.thread,
thread: format!("thread:{}", ts.thread),
timestamp: ts.timestamp,
date_relative: ts.date_relative,
matched: ts.matched,
@@ -179,7 +178,7 @@ pub async fn thread(
.get_first_value("date")
.and_then(|d| mailparse::dateparse(&d).ok());
let cid_prefix = shared::urls::cid_prefix(None, &id);
let base_url = Url::parse("https://there-should-be-no-relative-urls-in-email").unwrap();
let base_url = None;
let body = match extract_body(&m, &id)? {
Body::PlainText(PlainText { text, content_tree }) => {
let text = if text.len() > MAX_RAW_MESSAGE_SIZE {
@@ -248,7 +247,7 @@ pub async fn thread(
// TODO(wathiede): parse message and fill out attachments
let attachments = extract_attachments(&m, &id)?;
messages.push(Message {
id,
id: format!("id:{id}"),
from,
to,
cc,
@@ -752,3 +751,16 @@ fn render_content_type_tree(m: &ParsedMail) -> String {
SKIP_HEADERS.join("\n ")
)
}
pub async fn set_read_status<'ctx>(
nm: &Notmuch,
query: &str,
unread: bool,
) -> Result<bool, ServerError> {
if unread {
nm.tag_add("unread", &format!("{query}"))?;
} else {
nm.tag_remove("unread", &format!("{query}"))?;
}
Ok(true)
}

View File

@@ -36,6 +36,8 @@ wasm-opt = ['-Os']
[dependencies.web-sys]
version = "0.3.58"
features = [
"Clipboard",
"MediaQueryList",
"Navigator",
"Window"
]

View File

@@ -372,7 +372,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{
let threads = selected_threads
.iter()
.map(|tid| format!("thread:{tid}"))
.map(|tid| tid.to_string())
.collect::<Vec<_>>()
.join(" ");
orders
@@ -387,7 +387,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{
let threads = selected_threads
.iter()
.map(|tid| format!("thread:{tid}"))
.map(|tid| tid.to_string())
.collect::<Vec<_>>()
.join(" ");
orders
@@ -402,7 +402,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{
let threads = selected_threads
.iter()
.map(|tid| format!("thread:{tid}"))
.map(|tid| tid.to_string())
.collect::<Vec<_>>()
.join(" ");
orders
@@ -417,7 +417,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{
let threads = selected_threads
.iter()
.map(|tid| format!("thread:{tid}"))
.map(|tid| tid.to_string())
.collect::<Vec<_>>()
.join(" ");
orders
@@ -452,6 +452,17 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
}
}
Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)),
Msg::CopyToClipboard(text) => {
let clipboard = seed::window()
.navigator()
.clipboard()
.expect("couldn't get clipboard");
orders.perform_cmd(async move {
wasm_bindgen_futures::JsFuture::from(clipboard.write_text(&text))
.await
.expect("failed to copy to clipboard");
});
}
}
}
// `Model` describes our app state.
@@ -551,4 +562,6 @@ pub enum Msg {
MessageCollapse(String),
MessageExpand(String),
MultiMsg(Vec<Msg>),
CopyToClipboard(String),
}

View File

@@ -3,8 +3,8 @@ use seed_hooks::{state_access::CloneState, topo, use_state};
use crate::{
api::urls,
state::{Context, Model, Msg, Tag},
view::{self, view_header, view_search_results},
state::{Context, Model, Msg},
view::{self, view_header, view_search_results, view_tags},
};
#[topo::nested]
@@ -33,100 +33,9 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
show_icon_text,
),
};
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
let href = if search_unread {
urls::search(&format!("is:unread tag:{}", t.name), 0)
} else {
urls::search(&format!("tag:{}", t.name), 0)
};
li![a![
attrs! {
At::Href => href
},
(0..indent).map(|_| span![C!["tag-indent"], ""]),
i![
C!["tag-tag", "fa-solid", "fa-tag"],
style! {
//"--fa-primary-color" => t.fg_color,
St::Color => t.bg_color,
},
],
display_name,
IF!(t.unread>0 => format!(" ({})", t.unread))
]]
}
fn matches(a: &[&str], b: &[&str]) -> usize {
std::iter::zip(a.iter(), b.iter())
.take_while(|(a, b)| a == b)
.count()
}
fn view_tag_list<'a>(
tags: impl Iterator<Item = &'a Tag>,
search_unread: bool,
) -> Vec<Node<Msg>> {
let mut lis = Vec::new();
let mut last = Vec::new();
for t in tags {
let parts: Vec<_> = t.name.split('/').collect();
let mut n = matches(&last, &parts);
if n <= parts.len() - 2 && parts.len() > 1 {
// Synthesize fake tags for proper indenting.
for i in n..parts.len() - 1 {
let display_name = parts[n];
lis.push(view_tag_li(
&display_name,
n,
&Tag {
name: parts[..i + 1].join("/"),
bg_color: "#fff".to_string(),
fg_color: "#000".to_string(),
unread: 0,
},
search_unread,
));
}
n = parts.len() - 1;
}
let display_name = parts[n];
lis.push(view_tag_li(&display_name, n, t, search_unread));
last = parts;
}
lis
}
let unread = model
.tags
.as_ref()
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
.unwrap_or(Vec::new());
let tags_open = use_state(|| false);
let force_tags_open = unread.is_empty();
div![
C!["main-content"],
aside![
C!["tags-menu", "menu"],
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter(),true)]),
p![
C!["menu-label"],
IF!(!force_tags_open =>
i![C![
"fa-solid",
if tags_open.get() {
"fa-angle-up"
} else {
"fa-angle-down"
}
]]),
" Tags",
ev(Ev::Click, move |_| {
tags_open.set(!tags_open.get());
})
],
ul![
C!["menu-list"],
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter(),false))),
]
],
view_tags(model),
div![
view_header(&model.query, &model.refreshing_state),
content,

View File

@@ -6,7 +6,10 @@ use crate::{
api::urls,
graphql::front_page_query::*,
state::{Context, Model, Msg},
view::{self, human_age, pretty_authors, search_toolbar, set_title, tags_chiclet, view_header},
view::{
self, human_age, pretty_authors, search_toolbar, set_title, tags_chiclet, view_header,
view_tags,
},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
@@ -37,6 +40,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
view_tags(model),
]
}

View File

@@ -6,14 +6,14 @@ use std::{
use chrono::{DateTime, Datelike, Duration, Local, Utc};
use human_format::{Formatter, Scales};
use itertools::Itertools;
use log::error;
use log::{error, info};
use seed::{prelude::*, *};
use seed_hooks::{state_access::CloneState, topo, use_state};
use crate::{
api::urls,
graphql::{front_page_query::*, show_thread_query::*},
state::{unread_query, Model, Msg, RefreshingState},
state::{unread_query, Model, Msg, RefreshingState, Tag},
};
mod desktop;
@@ -73,6 +73,7 @@ fn removable_tags_chiclet<'a>(
"is-grouped-multiline"
],
tags.iter().map(move |tag| {
let thread_id = thread_id.to_string();
let hex = compute_color(tag);
let style = style! {St::BackgroundColor=>hex};
let classes = C!["tag", IF!(is_mobile => "is-small")];
@@ -81,7 +82,6 @@ fn removable_tags_chiclet<'a>(
};
let tag = tag.clone();
let rm_tag = tag.clone();
let thread_id = format!("thread:{thread_id}");
div![
C!["control"],
div![
@@ -122,14 +122,16 @@ fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
if one_person {
return Some(span![
attrs! {
At::Title => author.trim()},
At::Title => author.trim()
},
author
]);
}
author.split_whitespace().nth(0).map(|first| {
span![
attrs! {
At::Title => author.trim()},
At::Title => author.trim()
},
first
]
})
@@ -442,8 +444,10 @@ fn has_unread(tags: &[String]) -> bool {
fn render_avatar(avatar: Option<String>, from: &str) -> Node<Msg> {
let initials: String = from
.to_lowercase()
.trim()
.split(" ")
.map(|word| word.chars().next().unwrap())
.filter(|c| c.is_alphanumeric())
// Limit to 2 characters because more characters don't fit in the box
.take(2)
.collect();
@@ -516,7 +520,17 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
p![
strong![from],
br![],
small![from_detail],
small![
&from_detail,
" ",
from_detail.map(|detail| span![
i![C!["far", "fa-clone"]],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::CopyToClipboard(detail.to_string())
})
])
],
table![
IF!(!msg.to.is_empty() =>
tr![
@@ -526,19 +540,31 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
msg.to.iter().enumerate().map(|(i, to)|
small![
if i>0 { ", " }else { "" },
match to {
ShowThreadQueryThreadMessagesTo {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesTo {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesTo {
addr: Some(addr), ..
} => format!("{addr}"),
_ => String::from("UNKNOWN"),
{
let to = match to {
ShowThreadQueryThreadMessagesTo {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesTo {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesTo {
addr: Some(addr), ..
} => format!("{addr}"),
_ => String::from("UNKNOWN"),
};
span![
&to, " ",
span![
i![C!["far", "fa-clone"]],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::CopyToClipboard(to)
})
]
]
}
])
@@ -551,21 +577,32 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
msg.cc.iter().enumerate().map(|(i, cc)|
small![
if i>0 { ", " }else { "" },
match cc {
ShowThreadQueryThreadMessagesCc {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesCc {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesCc {
addr: Some(addr), ..
} => format!("<{addr}>"),
_ => String::from("UNKNOWN"),
{
let cc = match cc {
ShowThreadQueryThreadMessagesCc {
name: Some(name),
addr:Some(addr),
} => format!("{name} <{addr}>"),
ShowThreadQueryThreadMessagesCc {
name: Some(name),
addr:None
} => format!("{name}"),
ShowThreadQueryThreadMessagesCc {
addr: Some(addr), ..
} => format!("<{addr}>"),
_ => String::from("UNKNOWN"),
};
span![
&cc, " ",
span![
i![C!["far", "fa-clone"]],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::CopyToClipboard(cc)
})
]
]
}
])
]
]),
@@ -592,7 +629,7 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::SetUnread(format!("id:{id}"), !is_unread)
Msg::SetUnread(id, !is_unread)
})
]
]
@@ -664,7 +701,7 @@ fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
],
ev(Ev::Click, move |e| {
e.stop_propagation();
Msg::SetUnread(format!("id:{id}"), !is_unread)
Msg::SetUnread(id, !is_unread)
})
]
]
@@ -808,7 +845,8 @@ fn thread(
});
let read_thread_id = thread.thread_id.clone();
let unread_thread_id = thread.thread_id.clone();
let spam_thread_id = thread.thread_id.clone();
let spam_add_thread_id = thread.thread_id.clone();
let spam_unread_thread_id = thread.thread_id.clone();
div![
C!["thread"],
h3![C!["is-size-5"], subject],
@@ -827,20 +865,14 @@ fn thread(
attrs! {At::Title => "Mark as read"},
span![C!["icon", "is-small"], i![C!["far", "fa-envelope-open"]]],
IF!(show_icon_text=>span!["Read"]),
ev(Ev::Click, move |_| Msg::SetUnread(
format!("thread:{read_thread_id}"),
false
)),
ev(Ev::Click, move |_| Msg::SetUnread(read_thread_id, false)),
],
button![
C!["button", "mark-unread"],
attrs! {At::Title => "Mark as unread"},
span![C!["icon", "is-small"], i![C!["far", "fa-envelope"]]],
IF!(show_icon_text=>span!["Unread"]),
ev(Ev::Click, move |_| Msg::SetUnread(
format!("thread:{unread_thread_id}"),
true
)),
ev(Ev::Click, move |_| Msg::SetUnread(unread_thread_id, true)),
],
],
],
@@ -854,8 +886,8 @@ fn thread(
span![C!["icon", "is-small"], i![C!["far", "fa-hand"]]],
IF!(show_icon_text=>span!["Spam"]),
ev(Ev::Click, move |_| Msg::MultiMsg(vec![
Msg::AddTag(format!("thread:{spam_thread_id}"), "Spam".to_string()),
Msg::SetUnread(format!("thread:{spam_thread_id}"), false)
Msg::AddTag(spam_add_thread_id, "Spam".to_string()),
Msg::SetUnread(spam_unread_thread_id, false)
])),
],
],
@@ -970,3 +1002,97 @@ pub fn view(model: &Model) -> Node<Msg> {
_ => div![C!["desktop"], desktop::view(model)],
},]
}
pub fn view_tags(model: &Model) -> Node<Msg> {
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
let href = if search_unread {
urls::search(&format!("is:unread tag:{}", t.name), 0)
} else {
urls::search(&format!("tag:{}", t.name), 0)
};
li![a![
attrs! {
At::Href => href
},
(0..indent).map(|_| span![C!["tag-indent"], ""]),
i![
C!["tag-tag", "fa-solid", "fa-tag"],
style! {
//"--fa-primary-color" => t.fg_color,
St::Color => t.bg_color,
},
],
display_name,
IF!(t.unread>0 => format!(" ({})", t.unread))
]]
}
fn matches(a: &[&str], b: &[&str]) -> usize {
std::iter::zip(a.iter(), b.iter())
.take_while(|(a, b)| a == b)
.count()
}
fn view_tag_list<'a>(
tags: impl Iterator<Item = &'a Tag>,
search_unread: bool,
) -> Vec<Node<Msg>> {
let mut lis = Vec::new();
let mut last = Vec::new();
for t in tags {
let parts: Vec<_> = t.name.split('/').collect();
let mut n = matches(&last, &parts);
if n <= parts.len() - 2 && parts.len() > 1 {
// Synthesize fake tags for proper indenting.
for i in n..parts.len() - 1 {
let display_name = parts[n];
lis.push(view_tag_li(
&display_name,
n,
&Tag {
name: parts[..i + 1].join("/"),
bg_color: "#fff".to_string(),
fg_color: "#000".to_string(),
unread: 0,
},
search_unread,
));
}
n = parts.len() - 1;
}
let display_name = parts[n];
lis.push(view_tag_li(&display_name, n, t, search_unread));
last = parts;
}
lis
}
let unread = model
.tags
.as_ref()
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
.unwrap_or(Vec::new());
let tags_open = use_state(|| false);
let force_tags_open = unread.is_empty();
aside![
C!["tags-menu", "menu"],
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter(),true)]),
p![
C!["menu-label"],
IF!(!force_tags_open =>
i![C![
"fa-solid",
if tags_open.get() {
"fa-angle-up"
} else {
"fa-angle-down"
}
]]),
" Tags",
ev(Ev::Click, move |_| {
tags_open.set(!tags_open.get());
})
],
ul![
C!["menu-list"],
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter(),false))),
]
]
}

View File

@@ -2,7 +2,7 @@ use seed::{prelude::*, *};
use crate::{
state::{Context, Model, Msg},
view::{self, view_header, view_search_results},
view::{self, view_header, view_search_results, view_tags},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
@@ -36,6 +36,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
view_tags(model),
]
]
}

View File

@@ -65,8 +65,9 @@
}
.view-part-text-plain {
padding: 0.5em;
font-family: monospace;
overflow-wrap: break-word;
padding: 0.5em;
white-space: pre-wrap;
word-break: break-word;
word-wrap: break-word;