Compare commits
82 Commits
letterbox-
...
letterbox-
| Author | SHA1 | Date | |
|---|---|---|---|
| c81a8c1cd3 | |||
| 7c3cfec3d1 | |||
| a2920fde3b | |||
| 8bc449ae6e | |||
| 0febd0535a | |||
| a9e00a54e4 | |||
| 6811c689ff | |||
| 8ba6b3d0b0 | |||
| a7c5585e80 | |||
| 4ef4d49113 | |||
| f8af303110 | |||
| fa5aac34ba | |||
| b58556254e | |||
| e365ced7dd | |||
| 93d569fb14 | |||
| f86a5f464d | |||
| 956c20b156 | |||
| 1eb498712b | |||
| f12979c0be | |||
| 4665f34e54 | |||
| bbdc35061c | |||
| f11f0b4d23 | |||
| c7c47e4a73 | |||
| c3835522b2 | |||
| dfa80f9046 | |||
| b8dfdabf8d | |||
| bbcf52b006 | |||
| f92c05cd28 | |||
| 885bbe0a8c | |||
| 8b1d111837 | |||
| 08abf31fa9 | |||
| fa99959508 | |||
| 0f6af0f475 | |||
| 4c486e9168 | |||
| 109d380ea7 | |||
| 4244fa0d82 | |||
| 4b15e71893 | |||
| 1bbebad01b | |||
| 27edffd090 | |||
| 08212a9f78 | |||
| 877ec6c4b0 | |||
| 3ce92d6bdf | |||
| 1a28bb2021 | |||
| b86f72f75c | |||
| 1a8b98d420 | |||
| 383a7d800f | |||
| 453561140a | |||
| f6d5d3755b | |||
| 5226fe090e | |||
| c10ad00ca7 | |||
| 64fc92c3d6 | |||
| b9c116d5b6 | |||
| 007200b37b | |||
| 9824ad1e18 | |||
| a8819c7551 | |||
| 8cdfbdd08f | |||
| b2d1dc9276 | |||
| 1f79b43a85 | |||
| 904619bccd | |||
| 14104f6469 | |||
| dccfb6f71f | |||
| 547266a705 | |||
| 273562b58c | |||
| dc39eed1a7 | |||
| 9178badfd0 | |||
| 38e75ec251 | |||
| c1496bf87b | |||
| 4da888b240 | |||
| c703be2ca5 | |||
| 5cec8add5e | |||
| 0225dbde3a | |||
| f84b8fa6c2 | |||
| 979cbcd23e | |||
| b3070e1919 | |||
| e5fdde8f30 | |||
| 7de36bbc3d | |||
| 1c4f27902e | |||
| 7ee86f0d2f | |||
| a0b06fd5ef | |||
| 630bb20b35 | |||
| 17ea2a35cb | |||
| 7d9376d607 |
@@ -26,7 +26,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
toolchain: nightly
|
||||
target: wasm32-unknown-unknown
|
||||
- run: cargo install trunk
|
||||
- run: cd web; trunk build
|
||||
|
||||
692
Cargo.lock
generated
692
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
|
||||
edition = "2021"
|
||||
license = "UNLICENSED"
|
||||
publish = ["xinu"]
|
||||
version = "0.15.7"
|
||||
version = "0.17.24"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
|
||||
[profile.dev]
|
||||
|
||||
@@ -11,14 +11,14 @@ version.workspace = true
|
||||
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.14"
|
||||
mailparse = "0.16.0"
|
||||
log = "0.4.27"
|
||||
mailparse = "0.16.1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||
thiserror = "2.0.0"
|
||||
thiserror = "2.0.12"
|
||||
tracing = "0.1.41"
|
||||
|
||||
[dev-dependencies]
|
||||
itertools = "0.14.0"
|
||||
pretty_assertions = "1"
|
||||
rayon = "1.5"
|
||||
rayon = "1.10"
|
||||
|
||||
@@ -214,9 +214,8 @@ use std::{
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
use tracing::{error, info, instrument, warn};
|
||||
|
||||
/// # Number of seconds since the Epoch
|
||||
pub type UnixTime = isize;
|
||||
@@ -503,15 +502,28 @@ impl Notmuch {
|
||||
self.tags_for_query("*")
|
||||
}
|
||||
|
||||
#[instrument(skip_all, fields(tag=tag,search_term=search_term))]
|
||||
pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||
self.run_notmuch(["tag", &format!("+{tag}"), search_term])?;
|
||||
self.tags_add(tag, &[search_term])
|
||||
}
|
||||
|
||||
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
||||
pub fn tags_add(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
||||
let tag = format!("+{tag}");
|
||||
let mut args = vec!["tag", &tag];
|
||||
args.extend(search_term);
|
||||
self.run_notmuch(&args)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, fields(tag=tag,search_term=search_term))]
|
||||
pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||
self.run_notmuch(["tag", &format!("-{tag}"), search_term])?;
|
||||
self.tags_remove(tag, &[search_term])
|
||||
}
|
||||
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
||||
pub fn tags_remove(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
||||
let tag = format!("-{tag}");
|
||||
let mut args = vec!["tag", &tag];
|
||||
args.extend(search_term);
|
||||
self.run_notmuch(&args)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -598,6 +610,11 @@ impl Notmuch {
|
||||
|
||||
#[instrument(skip_all, fields(id=id,part=part))]
|
||||
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
||||
let id = if id.starts_with("id:") {
|
||||
id
|
||||
} else {
|
||||
&format!("id:{id}")
|
||||
};
|
||||
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
||||
Ok(res)
|
||||
}
|
||||
@@ -700,6 +717,13 @@ impl Notmuch {
|
||||
cmd.args(args);
|
||||
info!("{:?}", &cmd);
|
||||
let out = cmd.output()?;
|
||||
if !out.stderr.is_empty() {
|
||||
warn!(
|
||||
"{:?}: STDERR:\n{}",
|
||||
&cmd,
|
||||
String::from_utf8_lossy(&out.stderr)
|
||||
);
|
||||
}
|
||||
Ok(out.stdout)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,4 +11,10 @@ version.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.69"
|
||||
anyhow = "1.0.98"
|
||||
clap = { version = "4.5.37", features = ["derive", "env"] }
|
||||
letterbox-notmuch = { version = "0.17.9", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.17.9", registry = "xinu" }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
||||
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
||||
|
||||
@@ -1,210 +1,36 @@
|
||||
use std::{convert::Infallible, io::Write, str::FromStr};
|
||||
use std::{collections::HashMap, io::Write};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
enum MatchType {
|
||||
From,
|
||||
Sender,
|
||||
To,
|
||||
Cc,
|
||||
Subject,
|
||||
List,
|
||||
DeliveredTo,
|
||||
XForwardedTo,
|
||||
ReplyTo,
|
||||
XOriginalTo,
|
||||
XSpam,
|
||||
Body,
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
#[derive(Debug, Default)]
|
||||
struct Match {
|
||||
match_type: MatchType,
|
||||
needle: String,
|
||||
use clap::{Parser, Subcommand};
|
||||
use letterbox_shared::{cleanup_match, Match, MatchType, Rule};
|
||||
use sqlx::{types::Json, PgPool};
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Mode {
|
||||
Debug,
|
||||
Notmuchrc,
|
||||
LoadSql {
|
||||
#[arg(short, long)]
|
||||
dsn: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct Rule {
|
||||
matches: Vec<Match>,
|
||||
tags: Vec<String>,
|
||||
/// Simple program to greet a person
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
#[arg(short, long, default_value = "/home/wathiede/dotfiles/procmailrc")]
|
||||
input: String,
|
||||
|
||||
#[command(subcommand)]
|
||||
mode: Mode,
|
||||
}
|
||||
|
||||
fn unescape(s: &str) -> String {
|
||||
s.replace('\\', "")
|
||||
}
|
||||
|
||||
fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||
unescape(&s[prefix.len()..]).replace(".*", "")
|
||||
}
|
||||
|
||||
mod matches {
|
||||
pub const TO: &'static str = "TO";
|
||||
pub const CC: &'static str = "Cc";
|
||||
pub const TOCC: &'static str = "(TO|Cc)";
|
||||
pub const FROM: &'static str = "From";
|
||||
pub const SENDER: &'static str = "Sender";
|
||||
pub const SUBJECT: &'static str = "Subject";
|
||||
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
||||
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
||||
pub const REPLY_TO: &'static str = "Reply-To";
|
||||
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
||||
pub const LIST_ID: &'static str = "List-ID";
|
||||
pub const X_SPAM: &'static str = "X-Spam";
|
||||
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
||||
}
|
||||
|
||||
impl FromStr for Match {
|
||||
type Err = Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
// Examples:
|
||||
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
||||
// "* ^TOsonyrewards.com@xinu.tv"
|
||||
let mut it = s.split_whitespace().skip(1);
|
||||
let mut needle = it.next().unwrap();
|
||||
if needle == "1^0" {
|
||||
needle = it.next().unwrap();
|
||||
}
|
||||
let mut needle = vec![needle];
|
||||
needle.extend(it);
|
||||
let needle = needle.join(" ");
|
||||
let first = needle.chars().nth(0).unwrap_or(' ');
|
||||
use matches::*;
|
||||
if first == '^' {
|
||||
let needle = &needle[1..];
|
||||
if needle.starts_with(TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::To,
|
||||
needle: cleanup_match(TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(FROM) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::From,
|
||||
needle: cleanup_match(FROM, needle),
|
||||
});
|
||||
} else if needle.starts_with(CC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Cc,
|
||||
needle: cleanup_match(CC, needle),
|
||||
});
|
||||
} else if needle.starts_with(TOCC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::To,
|
||||
needle: cleanup_match(TOCC, needle),
|
||||
});
|
||||
} else if needle.starts_with(SENDER) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Sender,
|
||||
needle: cleanup_match(SENDER, needle),
|
||||
});
|
||||
} else if needle.starts_with(SUBJECT) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Subject,
|
||||
needle: cleanup_match(SUBJECT, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_ORIGINAL_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XOriginalTo,
|
||||
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(LIST_ID) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::List,
|
||||
needle: cleanup_match(LIST_ID, needle),
|
||||
});
|
||||
} else if needle.starts_with(REPLY_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::ReplyTo,
|
||||
needle: cleanup_match(REPLY_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_SPAM_FLAG) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XSpam,
|
||||
needle: '*'.to_string(),
|
||||
});
|
||||
} else if needle.starts_with(X_SPAM) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XSpam,
|
||||
needle: '*'.to_string(),
|
||||
});
|
||||
} else if needle.starts_with(DELIVERED_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::DeliveredTo,
|
||||
needle: cleanup_match(DELIVERED_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_FORWARDED_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XForwardedTo,
|
||||
needle: cleanup_match(X_FORWARDED_TO, needle),
|
||||
});
|
||||
} else {
|
||||
unreachable!("needle: '{needle}'")
|
||||
}
|
||||
} else {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Body,
|
||||
needle: cleanup_match("", &needle),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
||||
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
||||
// faster.
|
||||
let mut lines = Vec::new();
|
||||
for r in rules {
|
||||
for m in &r.matches {
|
||||
for t in &r.tags {
|
||||
if let MatchType::Unknown = m.match_type {
|
||||
eprintln!("rule has unknown match {:?}", r);
|
||||
continue;
|
||||
}
|
||||
|
||||
let rule = match m.match_type {
|
||||
MatchType::From => "from:",
|
||||
// TODO(wathiede): something more specific?
|
||||
MatchType::Sender => "from:",
|
||||
MatchType::To => "to:",
|
||||
MatchType::Cc => "to:",
|
||||
MatchType::Subject => "subject:",
|
||||
MatchType::List => "List-ID:",
|
||||
MatchType::Body => "",
|
||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||
// index. Handle them later.
|
||||
MatchType::DeliveredTo
|
||||
| MatchType::XForwardedTo
|
||||
| MatchType::ReplyTo
|
||||
| MatchType::XOriginalTo
|
||||
| MatchType::XSpam => continue,
|
||||
MatchType::Unknown => unreachable!(),
|
||||
};
|
||||
// Preserve unread status if run with --remove-all
|
||||
lines.push(format!(
|
||||
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
lines.push(format!(
|
||||
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
||||
// `tag:unprocessed` to all new mail.
|
||||
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
lines.sort();
|
||||
for l in lines {
|
||||
writeln!(w, "{l}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let input = "/home/wathiede/dotfiles/procmailrc";
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
let mut rules = Vec::new();
|
||||
let mut cur_rule = Rule::default();
|
||||
for l in std::fs::read_to_string(input)?.lines() {
|
||||
for l in std::fs::read_to_string(args.input)?.lines() {
|
||||
let l = if let Some(idx) = l.find('#') {
|
||||
&l[..idx]
|
||||
} else {
|
||||
@@ -222,6 +48,9 @@ fn main() -> anyhow::Result<()> {
|
||||
match first {
|
||||
':' => {
|
||||
// start of rule
|
||||
|
||||
// If carbon-copy flag present, don't stop on match
|
||||
cur_rule.stop_on_match = !l.contains('c');
|
||||
}
|
||||
'*' => {
|
||||
// add to current rule
|
||||
@@ -230,26 +59,119 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
'.' => {
|
||||
// delivery to folder
|
||||
cur_rule.tags.push(cleanup_match(
|
||||
cur_rule.tag = cleanup_match(
|
||||
"",
|
||||
&l.replace('.', "/")
|
||||
.replace(' ', "")
|
||||
.trim_matches('/')
|
||||
.to_string(),
|
||||
));
|
||||
);
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
}
|
||||
'/' => cur_rule = Rule::default(), // Ex. /dev/null
|
||||
'|' => cur_rule = Rule::default(), // external command
|
||||
'$' => {
|
||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||
cur_rule.tags.push(cleanup_match("", "inbox"));
|
||||
cur_rule.tag = cleanup_match("", "inbox");
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
} // variable, should only be $DEFAULT in my config
|
||||
_ => panic!("Unhandled first character '{}' {}", first, l),
|
||||
_ => panic!("Unhandled first character '{}'\nLine: {}", first, l),
|
||||
}
|
||||
}
|
||||
notmuch_from_rules(std::io::stdout(), &rules)?;
|
||||
match args.mode {
|
||||
Mode::Debug => print_rules(&rules),
|
||||
Mode::Notmuchrc => notmuch_from_rules(std::io::stdout(), &rules)?,
|
||||
Mode::LoadSql { dsn } => load_sql(&dsn, &rules).await?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_rules(rules: &[Rule]) {
|
||||
let mut tally = HashMap::new();
|
||||
for r in rules {
|
||||
for m in &r.matches {
|
||||
*tally.entry(m.match_type).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
let mut sorted: Vec<_> = tally.iter().map(|(k, v)| (v, k)).collect();
|
||||
sorted.sort();
|
||||
sorted.reverse();
|
||||
for (v, k) in sorted {
|
||||
println!("{k:?}: {v}");
|
||||
}
|
||||
}
|
||||
|
||||
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
||||
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
||||
// faster.
|
||||
let mut lines = Vec::new();
|
||||
for r in rules {
|
||||
for m in &r.matches {
|
||||
let t = &r.tag;
|
||||
if let MatchType::Unknown = m.match_type {
|
||||
eprintln!("rule has unknown match {:?}", r);
|
||||
continue;
|
||||
}
|
||||
|
||||
let rule = match m.match_type {
|
||||
MatchType::From => "from:",
|
||||
// TODO(wathiede): something more specific?
|
||||
MatchType::Sender => "from:",
|
||||
MatchType::To => "to:",
|
||||
MatchType::Cc => "to:",
|
||||
MatchType::Subject => "subject:",
|
||||
MatchType::ListId => "List-ID:",
|
||||
MatchType::Body => "",
|
||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||
// index. Handle them later.
|
||||
MatchType::DeliveredTo
|
||||
| MatchType::XForwardedTo
|
||||
| MatchType::ReplyTo
|
||||
| MatchType::XOriginalTo
|
||||
| MatchType::XSpam => continue,
|
||||
MatchType::Unknown => unreachable!(),
|
||||
};
|
||||
// Preserve unread status if run with --remove-all
|
||||
lines.push(format!(
|
||||
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
lines.push(format!(
|
||||
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
||||
// `tag:unprocessed` to all new mail.
|
||||
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
||||
t, rule, m.needle
|
||||
));
|
||||
}
|
||||
}
|
||||
lines.sort();
|
||||
for l in lines {
|
||||
writeln!(w, "{l}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_sql(dsn: &str, rules: &[Rule]) -> anyhow::Result<()> {
|
||||
let pool = PgPool::connect(dsn).await?;
|
||||
println!("clearing email_rule table");
|
||||
sqlx::query!("DELETE FROM email_rule")
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
for (order, rule) in rules.iter().enumerate() {
|
||||
println!("inserting {order}: {rule:?}");
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO email_rule (sort_order, rule)
|
||||
VALUES ($1, $2)
|
||||
"#,
|
||||
order as i32,
|
||||
Json(rule) as _
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
20
server/.sqlx/query-6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c.json
generated
Normal file
20
server/.sqlx/query-6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT rule as \"rule: Json<Rule>\"\n FROM email_rule\n ORDER BY sort_order\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "rule: Json<Rule>",
|
||||
"type_info": "Jsonb"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c"
|
||||
}
|
||||
@@ -12,45 +12,44 @@ version.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ammonia = "4.0.0"
|
||||
anyhow = "1.0.79"
|
||||
ammonia = "4.1.0"
|
||||
anyhow = "1.0.98"
|
||||
async-graphql = { version = "7", features = ["log"] }
|
||||
async-graphql-axum = "7.0.15"
|
||||
async-trait = "0.1.81"
|
||||
async-graphql-axum = "7.0.16"
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.3", features = ["ws"] }
|
||||
axum-macros = "0.5.0"
|
||||
build-info = "0.0.40"
|
||||
cacher = { version = "0.2.0", registry = "xinu" }
|
||||
chrono = "0.4.39"
|
||||
clap = { version = "4.5.36", features = ["derive"] }
|
||||
css-inline = "0.14.0"
|
||||
chrono = "0.4.40"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
css-inline = "0.14.4"
|
||||
futures = "0.3.31"
|
||||
headers = "0.4.0"
|
||||
html-escape = "0.2.13"
|
||||
letterbox-notmuch = { version = "0.15.7", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.15.7", path = "../shared", registry = "xinu" }
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.24", registry = "xinu" }
|
||||
letterbox-shared = { path = "../shared", version = "0.17.24", registry = "xinu" }
|
||||
linkify = "0.10.0"
|
||||
log = "0.4.17"
|
||||
lol_html = "2.0.0"
|
||||
mailparse = "0.16.0"
|
||||
lol_html = "2.3.0"
|
||||
mailparse = "0.16.1"
|
||||
maplit = "1.0.2"
|
||||
memmap = "0.7.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.7", features = ["blocking"] }
|
||||
scraper = "0.23.0"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = "1.0.87"
|
||||
sqlx = { version = "0.8.2", features = ["postgres", "runtime-tokio", "time"] }
|
||||
tantivy = { version = "0.24.0", optional = true }
|
||||
thiserror = "2.0.0"
|
||||
tokio = "1.26.0"
|
||||
reqwest = { version = "0.12.15", features = ["blocking"] }
|
||||
scraper = "0.23.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "time"] }
|
||||
tantivy = { version = "0.24.1", optional = true }
|
||||
thiserror = "2.0.12"
|
||||
tokio = "1.44.2"
|
||||
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||
tracing = "0.1.41"
|
||||
url = "2.5.2"
|
||||
url = "2.5.4"
|
||||
urlencoding = "2.1.3"
|
||||
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
xtracing = { version = "0.3.0", registry = "xinu" }
|
||||
xtracing = { version = "0.3.2", registry = "xinu" }
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
DROP TABLE IF NOT EXISTS email_rule;
|
||||
|
||||
-- Add down migration script here
|
||||
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE TABLE IF NOT EXISTS email_rule (
|
||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||
sort_order integer NOT NULL,
|
||||
rule jsonb NOT NULL
|
||||
);
|
||||
@@ -21,7 +21,7 @@ use letterbox_notmuch::Notmuch;
|
||||
use letterbox_server::tantivy::TantivyConnection;
|
||||
use letterbox_server::{
|
||||
graphql::{compute_catchup_ids, Attachment, MutationRoot, QueryRoot, SubscriptionRoot},
|
||||
nm::{attachment_bytes, cid_attachment_bytes},
|
||||
nm::{attachment_bytes, cid_attachment_bytes, label_unprocessed},
|
||||
ws::ConnectionTracker,
|
||||
};
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
@@ -29,9 +29,9 @@ use serde::Deserialize;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tokio::{net::TcpListener, sync::Mutex};
|
||||
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
||||
use tracing::info;
|
||||
use tracing::{error, info};
|
||||
|
||||
// Make our own error that wraps `anyhow::Error`.
|
||||
// Make our own error that wraps `ServerError`.
|
||||
struct AppError(letterbox_server::ServerError);
|
||||
|
||||
// Tell axum how to convert `AppError` into a response.
|
||||
@@ -142,6 +142,17 @@ async fn view_cid(
|
||||
Ok(inline_attachment_response(attachment))
|
||||
}
|
||||
|
||||
// TODO make this work with gitea message ids like `wathiede/letterbox/pulls/91@git.z.xinu.tv`
|
||||
async fn view_original(
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path(id): extract::Path<String>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
info!("view_original {id}");
|
||||
let bytes = nm.show_original(&id)?;
|
||||
let s = String::from_utf8_lossy(&bytes).to_string();
|
||||
Ok(s.into_response())
|
||||
}
|
||||
|
||||
async fn graphiql() -> impl IntoResponse {
|
||||
response::Html(
|
||||
GraphiQLSource::build()
|
||||
@@ -165,11 +176,15 @@ async fn start_ws(
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct NotificationParams {
|
||||
delay_ms: Option<u64>,
|
||||
num_unprocessed: Option<usize>,
|
||||
}
|
||||
|
||||
async fn send_refresh_websocket_handler(
|
||||
State(AppState {
|
||||
connection_tracker, ..
|
||||
nm,
|
||||
pool,
|
||||
connection_tracker,
|
||||
..
|
||||
}): State<AppState>,
|
||||
params: Query<NotificationParams>,
|
||||
) -> impl IntoResponse {
|
||||
@@ -179,12 +194,27 @@ async fn send_refresh_websocket_handler(
|
||||
info!("sleeping {delay:?}");
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
let limit = match params.num_unprocessed {
|
||||
Some(0) => None,
|
||||
Some(limit) => Some(limit),
|
||||
None => Some(10),
|
||||
};
|
||||
|
||||
let mut ids = None;
|
||||
match label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await {
|
||||
Ok(i) => ids = Some(i),
|
||||
Err(err) => error!("Failed to label_unprocessed: {err:?}"),
|
||||
};
|
||||
connection_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await;
|
||||
"refresh triggered"
|
||||
if let Some(ids) = ids {
|
||||
format!("{ids:?}")
|
||||
} else {
|
||||
"refresh triggered".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
async fn watch_new(
|
||||
@@ -193,18 +223,33 @@ async fn watch_new(
|
||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
poll_time: Duration,
|
||||
) -> Result<(), async_graphql::Error> {
|
||||
let mut old_ids = Vec::new();
|
||||
loop {
|
||||
async fn watch_new_iteration(
|
||||
nm: &Notmuch,
|
||||
pool: &PgPool,
|
||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
old_ids: &[String],
|
||||
) -> Result<Vec<String>, async_graphql::Error> {
|
||||
let ids = compute_catchup_ids(&nm, &pool, "is:unread").await?;
|
||||
info!("old_ids: {} ids: {}", old_ids.len(), ids.len());
|
||||
if old_ids != ids {
|
||||
info!("old_ids: {old_ids:?}\n ids: {ids:?}");
|
||||
label_unprocessed(&nm, &pool, false, Some(100), "tag:unprocessed").await?;
|
||||
conn_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await
|
||||
}
|
||||
old_ids = ids;
|
||||
Ok(ids)
|
||||
}
|
||||
let mut old_ids = Vec::new();
|
||||
loop {
|
||||
old_ids = match watch_new_iteration(&nm, &pool, conn_tracker.clone(), &old_ids).await {
|
||||
Ok(old_ids) => old_ids,
|
||||
Err(err) => {
|
||||
error!("watch_new_iteration failed: {err:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
tokio::time::sleep(poll_time).await;
|
||||
}
|
||||
}
|
||||
@@ -212,6 +257,7 @@ async fn watch_new(
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
nm: Notmuch,
|
||||
pool: PgPool,
|
||||
connection_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
}
|
||||
|
||||
@@ -252,7 +298,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let connection_tracker = Arc::new(Mutex::new(ConnectionTracker::default()));
|
||||
let ct = Arc::clone(&connection_tracker);
|
||||
let poll_time = Duration::from_secs(60);
|
||||
let _h = tokio::spawn(watch_new(nm.clone(), pool, ct, poll_time));
|
||||
let _h = tokio::spawn(watch_new(nm.clone(), pool.clone(), ct, poll_time));
|
||||
|
||||
let api_routes = Router::new()
|
||||
.route(
|
||||
@@ -260,6 +306,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
||||
get(download_attachment),
|
||||
)
|
||||
.route("/view/attachment/{id}/{idx}/{*rest}", get(view_attachment))
|
||||
.route("/original/{id}", get(view_original))
|
||||
.route("/cid/{id}/{cid}", get(view_cid))
|
||||
.route("/ws", any(start_ws))
|
||||
.route_service("/graphql/ws", GraphQLSubscription::new(schema.clone()))
|
||||
@@ -276,6 +323,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
||||
.nest("/notification", notification_routes)
|
||||
.with_state(AppState {
|
||||
nm,
|
||||
pool,
|
||||
connection_tracker,
|
||||
})
|
||||
.layer(
|
||||
|
||||
39
server/src/bin/test-labeling.rs
Normal file
39
server/src/bin/test-labeling.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use std::error::Error;
|
||||
|
||||
use clap::Parser;
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use letterbox_server::nm::label_unprocessed;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[arg(short, long)]
|
||||
newsreader_database_url: String,
|
||||
#[arg(short, long, default_value = "10")]
|
||||
/// Set to 0 to process all matches
|
||||
messages_to_process: usize,
|
||||
#[arg(short, long, default_value = "false")]
|
||||
execute: bool,
|
||||
/// Process messages matching this notmuch query
|
||||
#[arg(short, long, default_value = "tag:unprocessed")]
|
||||
query: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let cli = Cli::parse();
|
||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||
build_info::build_info!(fn bi);
|
||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
||||
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
||||
let nm = Notmuch::default();
|
||||
let limit = if cli.messages_to_process > 0 {
|
||||
Some(cli.messages_to_process)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
label_unprocessed(&nm, &pool, !cli.execute, limit, &cli.query).await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -9,15 +9,14 @@ use async_graphql::{
|
||||
use cacher::FilesystemCacher;
|
||||
use futures::stream;
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use tokio::join;
|
||||
use tracing::instrument;
|
||||
use tracing::{info, instrument};
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
use crate::tantivy::TantivyConnection;
|
||||
use crate::{newsreader, nm, Query};
|
||||
use crate::{newsreader, nm, nm::label_unprocessed, Query};
|
||||
|
||||
/// # Number of seconds since the Epoch
|
||||
pub type UnixTime = isize;
|
||||
@@ -629,6 +628,10 @@ impl MutationRoot {
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
||||
newsreader::refresh(pool, cacher).await?;
|
||||
|
||||
// Process email labels
|
||||
label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
{
|
||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||
|
||||
@@ -21,7 +21,6 @@ use cacher::{Cacher, FilesystemCacher};
|
||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||
pub use error::ServerError;
|
||||
use linkify::{LinkFinder, LinkKind};
|
||||
use log::{debug, error, info, warn};
|
||||
use lol_html::{
|
||||
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
|
||||
RewriteStrSettings,
|
||||
@@ -32,6 +31,7 @@ use reqwest::StatusCode;
|
||||
use scraper::{Html, Selector};
|
||||
use sqlx::types::time::PrimitiveDateTime;
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, error, info, warn};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
|
||||
@@ -3,11 +3,10 @@ use std::collections::HashMap;
|
||||
use cacher::FilesystemCacher;
|
||||
use futures::{stream::FuturesUnordered, StreamExt};
|
||||
use letterbox_shared::compute_color;
|
||||
use log::{error, info};
|
||||
use maplit::hashmap;
|
||||
use scraper::Selector;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tracing::instrument;
|
||||
use tracing::{error, info, instrument};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
|
||||
188
server/src/nm.rs
188
server/src/nm.rs
@@ -1,12 +1,14 @@
|
||||
use std::{collections::HashMap, fs::File};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs::File,
|
||||
};
|
||||
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use letterbox_shared::compute_color;
|
||||
use log::{error, info, warn};
|
||||
use letterbox_shared::{compute_color, Rule};
|
||||
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
use sqlx::PgPool;
|
||||
use tracing::instrument;
|
||||
use sqlx::{types::Json, PgPool};
|
||||
use tracing::{error, info, info_span, instrument, warn};
|
||||
|
||||
use crate::{
|
||||
compute_offset_limit,
|
||||
@@ -925,3 +927,179 @@ WHERE
|
||||
.await?;
|
||||
Ok(row.map(|r| r.url))
|
||||
}
|
||||
|
||||
/*
|
||||
* grab email_rules table from sql
|
||||
* For each message with `unprocessed` label
|
||||
* parse the message
|
||||
* pass headers for each message through a matcher using email rules
|
||||
* for each match, add label to message
|
||||
* if any matches were found, remove unprocessed
|
||||
* TODO: how to handle inbox label
|
||||
*/
|
||||
|
||||
#[instrument(name="nm::label_unprocessed", skip_all, fields(dryrun=dryrun, limit=?limit, query=%query))]
|
||||
pub async fn label_unprocessed(
|
||||
nm: &Notmuch,
|
||||
pool: &PgPool,
|
||||
dryrun: bool,
|
||||
limit: Option<usize>,
|
||||
query: &str,
|
||||
) -> Result<Box<[String]>, ServerError> {
|
||||
use futures::StreamExt;
|
||||
let ids = nm.message_ids(query)?;
|
||||
info!(
|
||||
"Processing {limit:?} of {} messages with '{query}'",
|
||||
ids.len()
|
||||
);
|
||||
let rules: Vec<_> = sqlx::query!(
|
||||
r#"
|
||||
SELECT rule as "rule: Json<Rule>"
|
||||
FROM email_rule
|
||||
ORDER BY sort_order
|
||||
"#,
|
||||
)
|
||||
.fetch(pool)
|
||||
.map(|r| r.unwrap().rule.0)
|
||||
.collect()
|
||||
.await;
|
||||
/*
|
||||
use letterbox_shared::{Match, MatchType};
|
||||
let rules = vec![Rule {
|
||||
stop_on_match: false,
|
||||
matches: vec![Match {
|
||||
match_type: MatchType::From,
|
||||
needle: "eftours".to_string(),
|
||||
}],
|
||||
tag: "EFTours".to_string(),
|
||||
}];
|
||||
*/
|
||||
info!("Loaded {} rules", rules.len());
|
||||
|
||||
let limit = limit.unwrap_or(ids.len());
|
||||
let limit = limit.min(ids.len());
|
||||
let ids = &ids[..limit];
|
||||
|
||||
let mut add_mutations = HashMap::new();
|
||||
let mut rm_mutations = HashMap::new();
|
||||
for id in ids {
|
||||
let id = format!("id:{id}");
|
||||
let files = nm.files(&id)?;
|
||||
// Only process the first file path is multiple files have the same id
|
||||
let Some(path) = files.iter().next() else {
|
||||
error!("No files for message-ID {id}");
|
||||
let t = "Letterbox/Bad";
|
||||
nm.tag_add(t, &id)?;
|
||||
let t = "unprocessed";
|
||||
nm.tag_remove(t, &id)?;
|
||||
continue;
|
||||
};
|
||||
let file = File::open(&path)?;
|
||||
info!("parsing {path}");
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = match info_span!("parse_mail", path = path).in_scope(|| parse_mail(&mmap)) {
|
||||
Ok(m) => m,
|
||||
Err(err) => {
|
||||
error!("Failed to parse {path}: {err}");
|
||||
let t = "Letterbox/Bad";
|
||||
nm.tag_add(t, &id)?;
|
||||
let t = "unprocessed";
|
||||
nm.tag_remove(t, &id)?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let (matched_rule, add_tags) = find_tags(&rules, &m.headers);
|
||||
if matched_rule {
|
||||
if dryrun {
|
||||
info!(
|
||||
"\nAdd tags: {add_tags:?}\nTo: {} From: {} Subject: {}\n",
|
||||
m.headers.get_first_value("to").expect("no from header"),
|
||||
m.headers.get_first_value("from").expect("no from header"),
|
||||
m.headers
|
||||
.get_first_value("subject")
|
||||
.expect("no subject header")
|
||||
);
|
||||
}
|
||||
for t in &add_tags {
|
||||
//nm.tag_add(t, &id)?;
|
||||
add_mutations
|
||||
.entry(t.to_string())
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
if add_tags.contains("spam") || add_tags.contains("Spam") {
|
||||
//nm.tag_remove("unread", &id)?;
|
||||
let t = "unread".to_string();
|
||||
rm_mutations
|
||||
.entry(t)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
if !add_tags.contains("inbox") {
|
||||
//nm.tag_remove("inbox", &id)?;
|
||||
let t = "inbox".to_string();
|
||||
rm_mutations
|
||||
.entry(t)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
//nm.tag_remove("unprocessed", &id)?;
|
||||
} else {
|
||||
if add_tags.is_empty() {
|
||||
let t = "Grey".to_string();
|
||||
add_mutations
|
||||
.entry(t)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
//nm.tag_remove("inbox", &id)?;
|
||||
let t = "inbox".to_string();
|
||||
rm_mutations
|
||||
.entry(t)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
let t = "unprocessed".to_string();
|
||||
rm_mutations
|
||||
.entry(t)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id.clone());
|
||||
}
|
||||
info!("Adding {} distinct labels", add_mutations.len());
|
||||
for (tag, ids) in add_mutations.iter() {
|
||||
info!(" {tag}: {}", ids.len());
|
||||
if !dryrun {
|
||||
let ids: Vec<_> = ids.iter().map(|s| s.as_str()).collect();
|
||||
info_span!("tags_add", tag = tag, count = ids.len())
|
||||
.in_scope(|| nm.tags_add(tag, &ids))?;
|
||||
}
|
||||
}
|
||||
info!("Removing {} distinct labels", rm_mutations.len());
|
||||
for (tag, ids) in rm_mutations.iter() {
|
||||
info!(" {tag}: {}", ids.len());
|
||||
if !dryrun {
|
||||
let ids: Vec<_> = ids.iter().map(|s| s.as_str()).collect();
|
||||
info_span!("tags_remove", tag = tag, count = ids.len())
|
||||
.in_scope(|| nm.tags_remove(tag, &ids))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ids.into())
|
||||
}
|
||||
fn find_tags<'a, 'b>(rules: &'a [Rule], headers: &'b [MailHeader]) -> (bool, HashSet<&'a str>) {
|
||||
let mut matched_rule = false;
|
||||
let mut add_tags = HashSet::new();
|
||||
for rule in rules {
|
||||
for hdr in headers {
|
||||
if rule.is_match(&hdr.get_key(), &hdr.get_value()) {
|
||||
//info!("Matched {rule:?}");
|
||||
matched_rule = true;
|
||||
add_tags.insert(rule.tag.as_str());
|
||||
if rule.stop_on_match {
|
||||
return (true, add_tags);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return (matched_rule, add_tags);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,9 @@ version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
build-info = "0.0.40"
|
||||
letterbox-notmuch = { version = "0.15.7", path = "../notmuch", registry = "xinu" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.24", registry = "xinu" }
|
||||
regex = "1.11.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
sqlx = "0.8.5"
|
||||
strum_macros = "0.27.1"
|
||||
tracing = "0.1.41"
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use build_info::{BuildInfo, VersionControl};
|
||||
use letterbox_notmuch::SearchSummary;
|
||||
use regex::{RegexBuilder, RegexSetBuilder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::debug;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SearchResult {
|
||||
@@ -20,6 +26,13 @@ pub enum WebsocketMessage {
|
||||
|
||||
pub mod urls {
|
||||
pub const MOUNT_POINT: &'static str = "/api";
|
||||
pub fn view_original(host: Option<&str>, id: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!("//{host}/api/original/{id}")
|
||||
} else {
|
||||
format!("/api/original/{id}")
|
||||
}
|
||||
}
|
||||
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!("//{host}/api/cid/{cid}/")
|
||||
@@ -58,3 +71,198 @@ pub fn compute_color(data: &str) -> String {
|
||||
data.hash(&mut hasher);
|
||||
format!("#{:06x}", hasher.finish() % (1 << 24))
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
||||
)]
|
||||
pub enum MatchType {
|
||||
From,
|
||||
Sender,
|
||||
To,
|
||||
Cc,
|
||||
Subject,
|
||||
ListId,
|
||||
DeliveredTo,
|
||||
XForwardedTo,
|
||||
ReplyTo,
|
||||
XOriginalTo,
|
||||
XSpam,
|
||||
Body,
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct Match {
|
||||
pub match_type: MatchType,
|
||||
pub needle: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct Rule {
|
||||
pub stop_on_match: bool,
|
||||
pub matches: Vec<Match>,
|
||||
pub tag: String,
|
||||
}
|
||||
impl Rule {
|
||||
pub fn is_match(&self, header_key: &str, header_value: &str) -> bool {
|
||||
let pats: Vec<_> = self
|
||||
.matches
|
||||
.iter()
|
||||
.filter_map(|m| match m.match_type {
|
||||
MatchType::To => Some("^(to|cc|bcc|x-original-to)$"),
|
||||
MatchType::From => Some("^from$"),
|
||||
MatchType::Sender => Some("^sender$"),
|
||||
MatchType::Subject => Some("^subject$"),
|
||||
MatchType::ListId => Some("^list-id$"),
|
||||
MatchType::XOriginalTo => Some("^x-original-to$"),
|
||||
MatchType::ReplyTo => Some("^reply-to$"),
|
||||
MatchType::XSpam => Some("^x-spam$"),
|
||||
MatchType::Body => None,
|
||||
c => panic!("TODO handle '{c:?}' match type"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let set = RegexSetBuilder::new(&pats)
|
||||
.case_insensitive(true)
|
||||
.build()
|
||||
.expect("failed to compile regex for matches");
|
||||
let matches: Vec<_> = set.matches(header_key).into_iter().collect();
|
||||
if !matches.is_empty() {
|
||||
//info!("matched key '{header_key}' '{header_value}'");
|
||||
for m_idx in matches {
|
||||
let needle = regex::escape(&self.matches[m_idx].needle);
|
||||
let pat = RegexBuilder::new(&needle)
|
||||
.case_insensitive(true)
|
||||
.build()
|
||||
.expect("failed to compile regex for needle");
|
||||
if pat.is_match(header_value) {
|
||||
debug!("{header_key} matched {header_value} against {needle}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
mod matches {
|
||||
// From https://linux.die.net/man/5/procmailrc
|
||||
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
||||
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
||||
|
||||
pub const TO: &'static str = "TO";
|
||||
pub const CC: &'static str = "Cc";
|
||||
pub const TOCC: &'static str = "(TO|Cc)";
|
||||
pub const FROM: &'static str = "From";
|
||||
pub const SENDER: &'static str = "Sender";
|
||||
pub const SUBJECT: &'static str = "Subject";
|
||||
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
||||
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
||||
pub const REPLY_TO: &'static str = "Reply-To";
|
||||
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
||||
pub const LIST_ID: &'static str = "List-ID";
|
||||
pub const X_SPAM: &'static str = "X-Spam";
|
||||
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
||||
}
|
||||
|
||||
impl FromStr for Match {
|
||||
type Err = Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
// Examples:
|
||||
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
||||
// "* ^TOsonyrewards.com@xinu.tv"
|
||||
let mut it = s.split_whitespace().skip(1);
|
||||
let mut needle = it.next().unwrap();
|
||||
if needle == "1^0" {
|
||||
needle = it.next().unwrap();
|
||||
}
|
||||
let mut needle = vec![needle];
|
||||
needle.extend(it);
|
||||
let needle = needle.join(" ");
|
||||
let first = needle.chars().nth(0).unwrap_or(' ');
|
||||
use matches::*;
|
||||
if first == '^' {
|
||||
let needle = &needle[1..];
|
||||
if needle.starts_with(TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::To,
|
||||
needle: cleanup_match(TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(FROM) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::From,
|
||||
needle: cleanup_match(FROM, needle),
|
||||
});
|
||||
} else if needle.starts_with(CC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Cc,
|
||||
needle: cleanup_match(CC, needle),
|
||||
});
|
||||
} else if needle.starts_with(TOCC) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::To,
|
||||
needle: cleanup_match(TOCC, needle),
|
||||
});
|
||||
} else if needle.starts_with(SENDER) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Sender,
|
||||
needle: cleanup_match(SENDER, needle),
|
||||
});
|
||||
} else if needle.starts_with(SUBJECT) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Subject,
|
||||
needle: cleanup_match(SUBJECT, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_ORIGINAL_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XOriginalTo,
|
||||
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(LIST_ID) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::ListId,
|
||||
needle: cleanup_match(LIST_ID, needle),
|
||||
});
|
||||
} else if needle.starts_with(REPLY_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::ReplyTo,
|
||||
needle: cleanup_match(REPLY_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_SPAM_FLAG) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XSpam,
|
||||
needle: '*'.to_string(),
|
||||
});
|
||||
} else if needle.starts_with(X_SPAM) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XSpam,
|
||||
needle: '*'.to_string(),
|
||||
});
|
||||
} else if needle.starts_with(DELIVERED_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::DeliveredTo,
|
||||
needle: cleanup_match(DELIVERED_TO, needle),
|
||||
});
|
||||
} else if needle.starts_with(X_FORWARDED_TO) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::XForwardedTo,
|
||||
needle: cleanup_match(X_FORWARDED_TO, needle),
|
||||
});
|
||||
} else {
|
||||
unreachable!("needle: '{needle}'")
|
||||
}
|
||||
} else {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::Body,
|
||||
needle: cleanup_match("", &needle),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
fn unescape(s: &str) -> String {
|
||||
s.replace('\\', "")
|
||||
}
|
||||
pub fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||
unescape(&s[prefix.len()..]).replace(".*", "")
|
||||
}
|
||||
|
||||
@@ -12,30 +12,29 @@ version.workspace = true
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.33"
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
|
||||
[dependencies]
|
||||
console_error_panic_hook = "0.1.7"
|
||||
log = "0.4.17"
|
||||
log = "0.4.27"
|
||||
seed = { version = "0.10.0", features = ["routing"] }
|
||||
#seed = "0.9.2"
|
||||
console_log = { version = "0.1.0", registry = "xinu" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
console_log = { version = "0.1.4", registry = "xinu" }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
itertools = "0.14.0"
|
||||
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||
chrono = "0.4.31"
|
||||
serde_json = { version = "1.0.140", features = ["unbounded_depth"] }
|
||||
chrono = "0.4.40"
|
||||
graphql_client = "0.14.0"
|
||||
thiserror = "2.0.0"
|
||||
thiserror = "2.0.12"
|
||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||
human_format = "1.1.0"
|
||||
build-info = "0.0.40"
|
||||
wasm-bindgen = "=0.2.100"
|
||||
uuid = { version = "1.13.1", features = [
|
||||
uuid = { version = "1.16.0", features = [
|
||||
"js",
|
||||
] } # direct dep to set js feature, prevents Rng issues
|
||||
letterbox-shared = { version = "0.15.7", path = "../shared", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.15.7", path = "../notmuch", registry = "xinu" }
|
||||
seed_hooks = { version = "0.4.0", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.17.9", registry = "xinu" }
|
||||
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
||||
strum_macros = "0.27.1"
|
||||
gloo-console = "0.3.0"
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
@@ -45,14 +44,15 @@ wasm-sockets = "1.0.0"
|
||||
wasm-opt = ['-Os']
|
||||
|
||||
[dependencies.web-sys]
|
||||
version = "0.3.58"
|
||||
version = "0.3.77"
|
||||
features = [
|
||||
"Clipboard",
|
||||
"DomRect",
|
||||
"Element",
|
||||
"History",
|
||||
"MediaQueryList",
|
||||
"Navigator",
|
||||
"Window",
|
||||
"History",
|
||||
"Performance",
|
||||
"ScrollRestoration",
|
||||
"Window",
|
||||
]
|
||||
|
||||
@@ -72,10 +72,6 @@ fn on_url_changed(old: &Url, mut new: Url) -> Msg {
|
||||
if did_change {
|
||||
messages.push(Msg::ScrollToTop)
|
||||
}
|
||||
info!(
|
||||
"url changed\nold '{old}'\nnew '{new}', history {}",
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = new.remaining_hash_path_parts();
|
||||
let msg = match hpp.as_slice() {
|
||||
["t", tid] => Msg::ShowThreadRequest {
|
||||
@@ -553,7 +549,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
});
|
||||
}
|
||||
Msg::ScrollToTop => {
|
||||
info!("scrolling to the top");
|
||||
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||
}
|
||||
Msg::WindowScrolled => {
|
||||
@@ -619,6 +614,36 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
orders.send_msg(Msg::CatchupRequest { query });
|
||||
}
|
||||
Msg::CatchupKeepUnread => {
|
||||
if let Some(thread_id) = current_thread_id(&model.context) {
|
||||
if let Context::ThreadResult {
|
||||
thread:
|
||||
ShowThreadQueryThread::EmailThread(ShowThreadQueryThreadOnEmailThread {
|
||||
messages,
|
||||
..
|
||||
}),
|
||||
..
|
||||
} = &model.context
|
||||
{
|
||||
//orders.send_msg(Msg::SetUnread(thread_id, false));
|
||||
let unread_messages: Vec<_> = messages
|
||||
.iter()
|
||||
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
||||
.map(|msg| &msg.id)
|
||||
.collect();
|
||||
if unread_messages.is_empty() {
|
||||
// All messages are read, so mark them all unread
|
||||
orders.send_msg(Msg::SetUnread(thread_id, true));
|
||||
} else {
|
||||
// Do nothing if there are some messages unread
|
||||
}
|
||||
} else {
|
||||
// News post, not email, just mark unread
|
||||
orders.send_msg(Msg::SetUnread(thread_id, true));
|
||||
};
|
||||
} else {
|
||||
// This shouldn't happen
|
||||
warn!("no current thread_id");
|
||||
}
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupMarkAsRead => {
|
||||
|
||||
@@ -263,81 +263,108 @@ fn search_results(
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let rows = results.iter().map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
let check_tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
let unread_idx = r.tags.iter().position(|e| e == &"unread");
|
||||
let mut tags = r.tags.clone();
|
||||
if let Some(idx) = unread_idx {
|
||||
tags.remove(idx);
|
||||
};
|
||||
let is_unread = unread_idx.is_some();
|
||||
let mut title_break = None;
|
||||
const TITLE_LENGTH_WRAP_LIMIT: usize = 40;
|
||||
for w in r.subject.split_whitespace() {
|
||||
if w.len() > TITLE_LENGTH_WRAP_LIMIT {
|
||||
title_break = Some(C!["break-all", "text-pretty"]);
|
||||
let rows: Vec<_> = results
|
||||
.iter()
|
||||
.map(|r| {
|
||||
let tid = r.thread.clone();
|
||||
let check_tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
let unread_idx = r.tags.iter().position(|e| e == &"unread");
|
||||
let mut tags = r.tags.clone();
|
||||
if let Some(idx) = unread_idx {
|
||||
tags.remove(idx);
|
||||
};
|
||||
let is_unread = unread_idx.is_some();
|
||||
let mut title_break = None;
|
||||
const TITLE_LENGTH_WRAP_LIMIT: usize = 40;
|
||||
for w in r.subject.split_whitespace() {
|
||||
if w.len() > TITLE_LENGTH_WRAP_LIMIT {
|
||||
title_break = Some(C!["break-all", "text-pretty"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
div![
|
||||
C![
|
||||
"flex",
|
||||
"flex-nowrap",
|
||||
"w-auto",
|
||||
"flex-auto",
|
||||
"py-4",
|
||||
"border-b",
|
||||
"border-neutral-800"
|
||||
],
|
||||
div![
|
||||
C!["flex", "items-center", "mr-4"],
|
||||
input![
|
||||
C![&tw_classes::CHECKBOX],
|
||||
attrs! {
|
||||
At::Type=>"checkbox",
|
||||
At::Checked=>selected_threads.contains(&tid).as_at_value(),
|
||||
}
|
||||
C![
|
||||
"flex",
|
||||
"flex-nowrap",
|
||||
"w-auto",
|
||||
"flex-auto",
|
||||
"py-4",
|
||||
"border-b",
|
||||
"border-neutral-800"
|
||||
],
|
||||
ev(Ev::Input, move |e| {
|
||||
if let Some(input) = e
|
||||
.target()
|
||||
.as_ref()
|
||||
.expect("failed to get reference to target")
|
||||
.dyn_ref::<web_sys::HtmlInputElement>()
|
||||
{
|
||||
if input.checked() {
|
||||
Msg::SelectionAddThread(check_tid)
|
||||
} else {
|
||||
Msg::SelectionRemoveThread(check_tid)
|
||||
}
|
||||
} else {
|
||||
Msg::Noop
|
||||
}
|
||||
}),
|
||||
],
|
||||
a![
|
||||
C!["flex-grow"],
|
||||
IF!(is_unread => C!["font-bold"]),
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![title_break, &r.subject],
|
||||
span![C!["text-xs"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
C!["flex", "flex-wrap", "justify-between"],
|
||||
span![tags_chiclet(&tags)],
|
||||
span![C!["text-sm"], datetime]
|
||||
C!["flex", "items-center", "mr-4"],
|
||||
input![
|
||||
C![&tw_classes::CHECKBOX],
|
||||
attrs! {
|
||||
At::Type=>"checkbox",
|
||||
At::Checked=>selected_threads.contains(&tid).as_at_value(),
|
||||
}
|
||||
],
|
||||
ev(Ev::Input, move |e| {
|
||||
if let Some(input) = e
|
||||
.target()
|
||||
.as_ref()
|
||||
.expect("failed to get reference to target")
|
||||
.dyn_ref::<web_sys::HtmlInputElement>()
|
||||
{
|
||||
if input.checked() {
|
||||
Msg::SelectionAddThread(check_tid)
|
||||
} else {
|
||||
Msg::SelectionRemoveThread(check_tid)
|
||||
}
|
||||
} else {
|
||||
Msg::Noop
|
||||
}
|
||||
}),
|
||||
],
|
||||
a![
|
||||
C!["flex-grow"],
|
||||
IF!(is_unread => C!["font-bold"]),
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![title_break, &r.subject],
|
||||
span![C!["text-xs"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
C!["flex", "flex-wrap", "justify-between"],
|
||||
span![tags_chiclet(&tags)],
|
||||
span![C!["text-sm"], datetime]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
});
|
||||
})
|
||||
.collect();
|
||||
let show_bulk_edit = !selected_threads.is_empty();
|
||||
let all_selected = selected_threads.len() == results.len();
|
||||
let all_selected = (selected_threads.len() == results.len()) && !rows.is_empty();
|
||||
let content = if rows.is_empty() {
|
||||
let caught_up = query.contains("is:unread");
|
||||
let read_emoji = ["👻", "👽", "👾", "🤖", "💀"];
|
||||
let no_results_emoji = ["🙈", "👀", "🤦", "🤷", "🙅", "🛟", "🍩", "🌑", "💿", "🔍"];
|
||||
// Randomly choose emoji based on what 10-second window we're currently in
|
||||
let now = seed::window()
|
||||
.performance()
|
||||
.map(|p| p.now() as usize / 10_000)
|
||||
.unwrap_or(0);
|
||||
let (emoji, text) = if caught_up {
|
||||
let idx = now % read_emoji.len();
|
||||
(read_emoji[idx], "All caught up!")
|
||||
} else {
|
||||
let idx = now % no_results_emoji.len();
|
||||
(no_results_emoji[idx], "No results")
|
||||
};
|
||||
div![
|
||||
C!["text-center"],
|
||||
h1![C!["text-9xl"], emoji],
|
||||
p![C!["mt-8", "text-3xl", "font-semibold"], text]
|
||||
]
|
||||
} else {
|
||||
div![rows]
|
||||
};
|
||||
div![
|
||||
C!["flex", "flex-col", "flex-auto", "p-4"],
|
||||
search_toolbar(count, pager, show_bulk_edit, all_selected),
|
||||
div![rows],
|
||||
content,
|
||||
search_toolbar(count, pager, show_bulk_edit, all_selected),
|
||||
]
|
||||
}
|
||||
@@ -694,6 +721,8 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
|
||||
.collect();
|
||||
let show_x_original_to = !*to_xinu.borrow() && msg.x_original_to.is_some();
|
||||
let show_delivered_to = !*to_xinu.borrow() && !show_x_original_to && msg.delivered_to.is_some();
|
||||
let host = seed::window().location().host().expect("couldn't get host");
|
||||
let href = letterbox_shared::urls::view_original(Some(&host), &msg.id);
|
||||
div![
|
||||
C!["flex", "p-4", "bg-neutral-800"],
|
||||
div![avatar],
|
||||
@@ -775,20 +804,36 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
|
||||
C!["text-right"],
|
||||
msg.timestamp
|
||||
.map(|ts| div![C!["text-xs", "text-nowrap"], human_age(ts)]),
|
||||
i![C![
|
||||
"mx-4",
|
||||
"read-status",
|
||||
"far",
|
||||
if is_unread {
|
||||
"fa-envelope"
|
||||
} else {
|
||||
"fa-envelope-open"
|
||||
},
|
||||
]],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
}),
|
||||
div![
|
||||
C!["p-2"],
|
||||
i![C![
|
||||
"mx-4",
|
||||
"read-status",
|
||||
"far",
|
||||
if is_unread {
|
||||
"fa-envelope"
|
||||
} else {
|
||||
"fa-envelope-open"
|
||||
},
|
||||
]],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
}),
|
||||
],
|
||||
div![
|
||||
C!["text-xs"],
|
||||
span![a![
|
||||
attrs! {
|
||||
At::Href=>href,
|
||||
At::Target=>"_blank",
|
||||
},
|
||||
"View original",
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
})
|
||||
]]
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -931,20 +976,23 @@ fn render_closed_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Nod
|
||||
C!["text-right"],
|
||||
msg.timestamp
|
||||
.map(|ts| div![C!["text-xs", "text-nowrap"], human_age(ts)]),
|
||||
i![C![
|
||||
"mx-4",
|
||||
"read-status",
|
||||
"far",
|
||||
if is_unread {
|
||||
"fa-envelope"
|
||||
} else {
|
||||
"fa-envelope-open"
|
||||
},
|
||||
]],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
}),
|
||||
div![
|
||||
C!["p-2"],
|
||||
i![C![
|
||||
"mx-4",
|
||||
"read-status",
|
||||
"far",
|
||||
if is_unread {
|
||||
"fa-envelope"
|
||||
} else {
|
||||
"fa-envelope-open"
|
||||
},
|
||||
]],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
})
|
||||
],
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -977,7 +1025,7 @@ fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool)
|
||||
],
|
||||
IF!(open =>
|
||||
div![
|
||||
C!["bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto", from],
|
||||
C!["content", "bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto", from],
|
||||
match &msg.body {
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBody::UnhandledContentType(
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
|
||||
@@ -1081,7 +1129,6 @@ fn render_attachements(
|
||||
]
|
||||
}
|
||||
|
||||
// TODO: add cathup_mode:bool and hide elements when true
|
||||
#[topo::nested]
|
||||
fn thread(
|
||||
thread: &ShowThreadQueryThreadOnEmailThread,
|
||||
@@ -1172,13 +1219,7 @@ fn thread(
|
||||
el_ref(content_el),
|
||||
messages,
|
||||
IF!(!catchup_mode => click_to_top())
|
||||
],
|
||||
/* TODO(wathiede): plumb in orignal id
|
||||
a![
|
||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||
"Original"
|
||||
],
|
||||
*/
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::{collections::VecDeque, rc::Rc};
|
||||
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{error, info};
|
||||
use log::{debug, error};
|
||||
use seed::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -122,13 +122,13 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
match msg {
|
||||
Msg::WebSocketOpened => {
|
||||
model.web_socket_reconnector = None;
|
||||
info!("WebSocket connection is open now");
|
||||
debug!("WebSocket connection is open now");
|
||||
}
|
||||
Msg::TextMessageReceived(msg) => {
|
||||
model.updates.push_back(msg);
|
||||
}
|
||||
Msg::WebSocketClosed(close_event) => {
|
||||
info!(
|
||||
debug!(
|
||||
r#"==================
|
||||
WebSocket connection was closed:
|
||||
Clean: {0}
|
||||
@@ -148,7 +148,7 @@ Reason: {2}
|
||||
}
|
||||
}
|
||||
Msg::WebSocketFailed => {
|
||||
info!("WebSocket failed");
|
||||
debug!("WebSocket failed");
|
||||
if model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
@@ -156,7 +156,7 @@ Reason: {2}
|
||||
}
|
||||
}
|
||||
Msg::ReconnectWebSocket(retries) => {
|
||||
info!("Reconnect attempt: {}", retries);
|
||||
debug!("Reconnect attempt: {}", retries);
|
||||
model.web_socket = create_websocket(&model.ws_url, orders).unwrap();
|
||||
}
|
||||
Msg::SendMessage(msg) => {
|
||||
@@ -177,16 +177,16 @@ fn create_websocket(url: &str, orders: &impl Orders<Msg>) -> Result<EventClient,
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_connection(Some(Box::new(move |client: &EventClient| {
|
||||
info!("{:#?}", client.status);
|
||||
debug!("{:#?}", client.status);
|
||||
let msg = match *client.status.borrow() {
|
||||
ConnectionStatus::Connecting => {
|
||||
info!("Connecting...");
|
||||
debug!("Connecting...");
|
||||
None
|
||||
}
|
||||
ConnectionStatus::Connected => Some(Msg::WebSocketOpened),
|
||||
ConnectionStatus::Error => Some(Msg::WebSocketFailed),
|
||||
ConnectionStatus::Disconnected => {
|
||||
info!("Disconnected");
|
||||
debug!("Disconnected");
|
||||
None
|
||||
}
|
||||
};
|
||||
@@ -195,7 +195,7 @@ fn create_websocket(url: &str, orders: &impl Orders<Msg>) -> Result<EventClient,
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_close(Some(Box::new(move |ev| {
|
||||
info!("WS: Connection closed");
|
||||
debug!("WS: Connection closed");
|
||||
send(Some(Msg::WebSocketClosed(ev)));
|
||||
})));
|
||||
|
||||
|
||||
@@ -2,23 +2,23 @@ html {
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.mail-thread a,
|
||||
.mail-thread .content a,
|
||||
.news-post a {
|
||||
color: var(--color-link) !important;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.mail-thread br,
|
||||
.mail-thread .content br,
|
||||
.news-post br {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
content: " ";
|
||||
}
|
||||
|
||||
.mail-thread h1,
|
||||
.mail-thread h2,
|
||||
.mail-thread h3,
|
||||
.mail-thread h4,
|
||||
.mail-thread .content h1,
|
||||
.mail-thread .content h2,
|
||||
.mail-thread .content h3,
|
||||
.mail-thread .content h4,
|
||||
.news-post h1,
|
||||
.news-post h2,
|
||||
.news-post h3,
|
||||
@@ -27,12 +27,12 @@ html {
|
||||
margin-bottom: 1em !important;
|
||||
}
|
||||
|
||||
.mail-thread p,
|
||||
.mail-thread .content p,
|
||||
.news-post p {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.mail-thread pre,
|
||||
.mail-thread .content pre,
|
||||
.news-post pre {
|
||||
font-family: monospace;
|
||||
background-color: #eee !important;
|
||||
@@ -40,28 +40,28 @@ html {
|
||||
white-space: break-spaces;
|
||||
}
|
||||
|
||||
.mail-thread code,
|
||||
.mail-thread .content code,
|
||||
.news-post code {
|
||||
font-family: monospace;
|
||||
white-space: break-spaces;
|
||||
background-color: #eee !important;
|
||||
}
|
||||
|
||||
.mail-thread blockquote {
|
||||
.mail-thread .content blockquote {
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
}
|
||||
|
||||
.mail-thread ol,
|
||||
.mail-thread ul {
|
||||
.mail-thread .content ol,
|
||||
.mail-thread .content ul {
|
||||
margin-left: 2em;
|
||||
}
|
||||
|
||||
.mail-thread .noreply-news-bloomberg-com a {
|
||||
.mail-thread .content .noreply-news-bloomberg-com a {
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.mail-thread .noreply-news-bloomberg-com h2 {
|
||||
.mail-thread .content .noreply-news-bloomberg-com h2 {
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user