Compare commits
No commits in common. "dccfb6f71f5ffb71d2dd92953e188ffa09e8ce01" and "5cec8add5ef45d9607ff05778b868ebcf3e1085e" have entirely different histories.
dccfb6f71f
...
5cec8add5e
54
Cargo.lock
generated
54
Cargo.lock
generated
@ -2995,21 +2995,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "letterbox-notmuch"
|
name = "letterbox-notmuch"
|
||||||
version = "0.16.0"
|
version = "0.15.11"
|
||||||
source = "sparse+https://git.z.xinu.tv/api/packages/wathiede/cargo/"
|
|
||||||
checksum = "f7cdd2798042f4cc63342d798f450e7152231f4f592b3142cd63a0a9f4b879d8"
|
|
||||||
dependencies = [
|
|
||||||
"log",
|
|
||||||
"mailparse",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"thiserror 2.0.12",
|
|
||||||
"tracing",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "letterbox-notmuch"
|
|
||||||
version = "0.17.0"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"log",
|
"log",
|
||||||
@ -3024,12 +3010,10 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "letterbox-procmail2notmuch"
|
name = "letterbox-procmail2notmuch"
|
||||||
version = "0.17.0"
|
version = "0.15.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"letterbox-notmuch 0.16.0",
|
|
||||||
"letterbox-shared 0.16.0",
|
|
||||||
"serde",
|
"serde",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
"tokio 1.44.2",
|
"tokio 1.44.2",
|
||||||
@ -3037,7 +3021,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "letterbox-server"
|
name = "letterbox-server"
|
||||||
version = "0.17.0"
|
version = "0.15.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ammonia",
|
"ammonia",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -3055,8 +3039,8 @@ dependencies = [
|
|||||||
"futures 0.3.31",
|
"futures 0.3.31",
|
||||||
"headers",
|
"headers",
|
||||||
"html-escape",
|
"html-escape",
|
||||||
"letterbox-notmuch 0.16.0",
|
"letterbox-notmuch",
|
||||||
"letterbox-shared 0.16.0",
|
"letterbox-shared",
|
||||||
"linkify",
|
"linkify",
|
||||||
"log",
|
"log",
|
||||||
"lol_html",
|
"lol_html",
|
||||||
@ -3081,35 +3065,17 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "letterbox-shared"
|
name = "letterbox-shared"
|
||||||
version = "0.16.0"
|
version = "0.15.11"
|
||||||
source = "sparse+https://git.z.xinu.tv/api/packages/wathiede/cargo/"
|
|
||||||
checksum = "18fcc018014a200754ea7524f41fc4e5e14f5edc4cb0ca5d7afbaa476cb0d297"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build-info",
|
"build-info",
|
||||||
"letterbox-notmuch 0.16.0",
|
"letterbox-notmuch",
|
||||||
"regex",
|
|
||||||
"serde",
|
"serde",
|
||||||
"sqlx",
|
|
||||||
"strum_macros 0.27.1",
|
"strum_macros 0.27.1",
|
||||||
"tracing",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "letterbox-shared"
|
|
||||||
version = "0.17.0"
|
|
||||||
dependencies = [
|
|
||||||
"build-info",
|
|
||||||
"letterbox-notmuch 0.16.0",
|
|
||||||
"regex",
|
|
||||||
"serde",
|
|
||||||
"sqlx",
|
|
||||||
"strum_macros 0.27.1",
|
|
||||||
"tracing",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "letterbox-web"
|
name = "letterbox-web"
|
||||||
version = "0.17.0"
|
version = "0.15.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build-info",
|
"build-info",
|
||||||
"build-info-build",
|
"build-info-build",
|
||||||
@ -3121,8 +3087,8 @@ dependencies = [
|
|||||||
"graphql_client",
|
"graphql_client",
|
||||||
"human_format",
|
"human_format",
|
||||||
"itertools",
|
"itertools",
|
||||||
"letterbox-notmuch 0.16.0",
|
"letterbox-notmuch",
|
||||||
"letterbox-shared 0.16.0",
|
"letterbox-shared",
|
||||||
"log",
|
"log",
|
||||||
"seed",
|
"seed",
|
||||||
"seed_hooks",
|
"seed_hooks",
|
||||||
|
|||||||
@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "UNLICENSED"
|
license = "UNLICENSED"
|
||||||
publish = ["xinu"]
|
publish = ["xinu"]
|
||||||
version = "0.17.0"
|
version = "0.15.11"
|
||||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
|
|||||||
@ -598,11 +598,6 @@ impl Notmuch {
|
|||||||
|
|
||||||
#[instrument(skip_all, fields(id=id,part=part))]
|
#[instrument(skip_all, fields(id=id,part=part))]
|
||||||
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
||||||
let id = if id.starts_with("id:") {
|
|
||||||
id
|
|
||||||
} else {
|
|
||||||
&format!("id:{id}")
|
|
||||||
};
|
|
||||||
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,8 +13,6 @@ version.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.69"
|
anyhow = "1.0.69"
|
||||||
clap = { version = "4.5.37", features = ["derive", "env"] }
|
clap = { version = "4.5.37", features = ["derive", "env"] }
|
||||||
letterbox-notmuch = { version = "0.16.0", registry = "xinu" }
|
|
||||||
letterbox-shared = { version = "0.16.0", registry = "xinu" }
|
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
||||||
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
||||||
|
|||||||
@ -1,9 +1,165 @@
|
|||||||
use std::{collections::HashMap, io::Write};
|
use std::{collections::HashMap, convert::Infallible, io::Write, str::FromStr};
|
||||||
|
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use letterbox_shared::{cleanup_match, Match, MatchType, Rule};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{types::Json, PgPool};
|
use sqlx::{types::Json, PgPool};
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
||||||
|
)]
|
||||||
|
enum MatchType {
|
||||||
|
From,
|
||||||
|
Sender,
|
||||||
|
To,
|
||||||
|
Cc,
|
||||||
|
Subject,
|
||||||
|
ListId,
|
||||||
|
DeliveredTo,
|
||||||
|
XForwardedTo,
|
||||||
|
ReplyTo,
|
||||||
|
XOriginalTo,
|
||||||
|
XSpam,
|
||||||
|
Body,
|
||||||
|
#[default]
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
|
struct Match {
|
||||||
|
match_type: MatchType,
|
||||||
|
needle: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
|
struct Rule {
|
||||||
|
stop_on_match: bool,
|
||||||
|
matches: Vec<Match>,
|
||||||
|
tag: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unescape(s: &str) -> String {
|
||||||
|
s.replace('\\', "")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||||
|
unescape(&s[prefix.len()..]).replace(".*", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
mod matches {
|
||||||
|
// From https://linux.die.net/man/5/procmailrc
|
||||||
|
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
||||||
|
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
||||||
|
|
||||||
|
pub const TO: &'static str = "TO";
|
||||||
|
pub const CC: &'static str = "Cc";
|
||||||
|
pub const TOCC: &'static str = "(TO|Cc)";
|
||||||
|
pub const FROM: &'static str = "From";
|
||||||
|
pub const SENDER: &'static str = "Sender";
|
||||||
|
pub const SUBJECT: &'static str = "Subject";
|
||||||
|
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
||||||
|
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
||||||
|
pub const REPLY_TO: &'static str = "Reply-To";
|
||||||
|
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
||||||
|
pub const LIST_ID: &'static str = "List-ID";
|
||||||
|
pub const X_SPAM: &'static str = "X-Spam";
|
||||||
|
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Match {
|
||||||
|
type Err = Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
// Examples:
|
||||||
|
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
||||||
|
// "* ^TOsonyrewards.com@xinu.tv"
|
||||||
|
let mut it = s.split_whitespace().skip(1);
|
||||||
|
let mut needle = it.next().unwrap();
|
||||||
|
if needle == "1^0" {
|
||||||
|
needle = it.next().unwrap();
|
||||||
|
}
|
||||||
|
let mut needle = vec![needle];
|
||||||
|
needle.extend(it);
|
||||||
|
let needle = needle.join(" ");
|
||||||
|
let first = needle.chars().nth(0).unwrap_or(' ');
|
||||||
|
use matches::*;
|
||||||
|
if first == '^' {
|
||||||
|
let needle = &needle[1..];
|
||||||
|
if needle.starts_with(TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(FROM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::From,
|
||||||
|
needle: cleanup_match(FROM, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(CC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Cc,
|
||||||
|
needle: cleanup_match(CC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(TOCC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TOCC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SENDER) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Sender,
|
||||||
|
needle: cleanup_match(SENDER, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SUBJECT) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Subject,
|
||||||
|
needle: cleanup_match(SUBJECT, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_ORIGINAL_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XOriginalTo,
|
||||||
|
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(LIST_ID) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::ListId,
|
||||||
|
needle: cleanup_match(LIST_ID, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(REPLY_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::ReplyTo,
|
||||||
|
needle: cleanup_match(REPLY_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM_FLAG) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(DELIVERED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::DeliveredTo,
|
||||||
|
needle: cleanup_match(DELIVERED_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_FORWARDED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XForwardedTo,
|
||||||
|
needle: cleanup_match(X_FORWARDED_TO, needle),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
unreachable!("needle: '{needle}'")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Body,
|
||||||
|
needle: cleanup_match("", &needle),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
enum Mode {
|
enum Mode {
|
||||||
Debug,
|
Debug,
|
||||||
@ -48,9 +204,6 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
match first {
|
match first {
|
||||||
':' => {
|
':' => {
|
||||||
// start of rule
|
// start of rule
|
||||||
|
|
||||||
// If carbon-copy flag present, don't stop on match
|
|
||||||
cur_rule.stop_on_match = !l.contains('c');
|
|
||||||
}
|
}
|
||||||
'*' => {
|
'*' => {
|
||||||
// add to current rule
|
// add to current rule
|
||||||
@ -59,13 +212,13 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
'.' => {
|
'.' => {
|
||||||
// delivery to folder
|
// delivery to folder
|
||||||
cur_rule.tag = cleanup_match(
|
cur_rule.tag = Some(cleanup_match(
|
||||||
"",
|
"",
|
||||||
&l.replace('.', "/")
|
&l.replace('.', "/")
|
||||||
.replace(' ', "")
|
.replace(' ', "")
|
||||||
.trim_matches('/')
|
.trim_matches('/')
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
));
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
}
|
}
|
||||||
@ -73,7 +226,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
'|' => cur_rule = Rule::default(), // external command
|
'|' => cur_rule = Rule::default(), // external command
|
||||||
'$' => {
|
'$' => {
|
||||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||||
cur_rule.tag = cleanup_match("", "inbox");
|
cur_rule.tag = Some(cleanup_match("", "inbox"));
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
} // variable, should only be $DEFAULT in my config
|
} // variable, should only be $DEFAULT in my config
|
||||||
@ -109,7 +262,7 @@ fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()>
|
|||||||
let mut lines = Vec::new();
|
let mut lines = Vec::new();
|
||||||
for r in rules {
|
for r in rules {
|
||||||
for m in &r.matches {
|
for m in &r.matches {
|
||||||
let t = &r.tag;
|
if let Some(t) = &r.tag {
|
||||||
if let MatchType::Unknown = m.match_type {
|
if let MatchType::Unknown = m.match_type {
|
||||||
eprintln!("rule has unknown match {:?}", r);
|
eprintln!("rule has unknown match {:?}", r);
|
||||||
continue;
|
continue;
|
||||||
@ -146,6 +299,7 @@ fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()>
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
lines.sort();
|
lines.sort();
|
||||||
for l in lines {
|
for l in lines {
|
||||||
writeln!(w, "{l}")?;
|
writeln!(w, "{l}")?;
|
||||||
|
|||||||
@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT rule as \"rule: Json<Rule>\"\n FROM email_rule\n ORDER BY sort_order\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "rule: Json<Rule>",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c"
|
|
||||||
}
|
|
||||||
@ -27,8 +27,8 @@ css-inline = "0.14.0"
|
|||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
headers = "0.4.0"
|
headers = "0.4.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
letterbox-notmuch = { version = "0.16.0", registry = "xinu" }
|
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||||
letterbox-shared = { version = "0.16.0", registry = "xinu" }
|
letterbox-shared = { version = "0.15.11", path = "../shared", registry = "xinu" }
|
||||||
linkify = "0.10.0"
|
linkify = "0.10.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
lol_html = "2.0.0"
|
lol_html = "2.0.0"
|
||||||
|
|||||||
@ -29,9 +29,9 @@ use serde::Deserialize;
|
|||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use tokio::{net::TcpListener, sync::Mutex};
|
use tokio::{net::TcpListener, sync::Mutex};
|
||||||
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
||||||
use tracing::info;
|
use tracing::{info, warn};
|
||||||
|
|
||||||
// Make our own error that wraps `ServerError`.
|
// Make our own error that wraps `anyhow::Error`.
|
||||||
struct AppError(letterbox_server::ServerError);
|
struct AppError(letterbox_server::ServerError);
|
||||||
|
|
||||||
// Tell axum how to convert `AppError` into a response.
|
// Tell axum how to convert `AppError` into a response.
|
||||||
@ -148,9 +148,18 @@ async fn view_original(
|
|||||||
extract::Path(id): extract::Path<String>,
|
extract::Path(id): extract::Path<String>,
|
||||||
) -> Result<impl IntoResponse, AppError> {
|
) -> Result<impl IntoResponse, AppError> {
|
||||||
info!("view_original {id}");
|
info!("view_original {id}");
|
||||||
let bytes = nm.show_original(&id)?;
|
let mid = if id.starts_with("id:") {
|
||||||
let s = String::from_utf8_lossy(&bytes).to_string();
|
id.to_string()
|
||||||
Ok(s.into_response())
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
let files = nm.files(&mid)?;
|
||||||
|
let Some(path) = files.first() else {
|
||||||
|
warn!("failed to find files for message {mid}");
|
||||||
|
return Ok((StatusCode::NOT_FOUND, mid).into_response());
|
||||||
|
};
|
||||||
|
let str = std::fs::read_to_string(&path)?;
|
||||||
|
Ok(str.into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn graphiql() -> impl IntoResponse {
|
async fn graphiql() -> impl IntoResponse {
|
||||||
|
|||||||
@ -1,39 +0,0 @@
|
|||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use letterbox_notmuch::Notmuch;
|
|
||||||
use letterbox_server::nm::label_unprocessed;
|
|
||||||
use sqlx::postgres::PgPool;
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(version, about, long_about = None)]
|
|
||||||
struct Cli {
|
|
||||||
#[arg(short, long, default_value = env!("DATABASE_URL"))]
|
|
||||||
newsreader_database_url: String,
|
|
||||||
#[arg(short, long, default_value = "10")]
|
|
||||||
/// Set to 0 to process all matches
|
|
||||||
messages_to_process: usize,
|
|
||||||
#[arg(short, long, default_value = "false")]
|
|
||||||
execute: bool,
|
|
||||||
/// Process messages matching this notmuch query
|
|
||||||
#[arg(short, long, default_value = "tag:unprocessed")]
|
|
||||||
query: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
|
||||||
build_info::build_info!(fn bi);
|
|
||||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
|
||||||
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
|
||||||
let nm = Notmuch::default();
|
|
||||||
let limit = if cli.messages_to_process > 0 {
|
|
||||||
Some(cli.messages_to_process)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
label_unprocessed(&nm, &pool, !cli.execute, limit, &cli.query).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@ -17,7 +17,7 @@ use tracing::instrument;
|
|||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
#[cfg(feature = "tantivy")]
|
||||||
use crate::tantivy::TantivyConnection;
|
use crate::tantivy::TantivyConnection;
|
||||||
use crate::{newsreader, nm, nm::label_unprocessed, Query};
|
use crate::{newsreader, nm, Query};
|
||||||
|
|
||||||
/// # Number of seconds since the Epoch
|
/// # Number of seconds since the Epoch
|
||||||
pub type UnixTime = isize;
|
pub type UnixTime = isize;
|
||||||
@ -629,10 +629,6 @@ impl MutationRoot {
|
|||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
||||||
newsreader::refresh(pool, cacher).await?;
|
newsreader::refresh(pool, cacher).await?;
|
||||||
|
|
||||||
// Process email labels
|
|
||||||
label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
|
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
#[cfg(feature = "tantivy")]
|
||||||
{
|
{
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
|||||||
112
server/src/nm.rs
112
server/src/nm.rs
@ -1,14 +1,11 @@
|
|||||||
use std::{
|
use std::{collections::HashMap, fs::File};
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
fs::File,
|
|
||||||
};
|
|
||||||
|
|
||||||
use letterbox_notmuch::Notmuch;
|
use letterbox_notmuch::Notmuch;
|
||||||
use letterbox_shared::{compute_color, Rule};
|
use letterbox_shared::compute_color;
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||||
use memmap::MmapOptions;
|
use memmap::MmapOptions;
|
||||||
use sqlx::{types::Json, PgPool};
|
use sqlx::PgPool;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -928,106 +925,3 @@ WHERE
|
|||||||
.await?;
|
.await?;
|
||||||
Ok(row.map(|r| r.url))
|
Ok(row.map(|r| r.url))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* grab email_rules table from sql
|
|
||||||
* For each message with `unprocessed` label
|
|
||||||
* parse the message
|
|
||||||
* pass headers for each message through a matcher using email rules
|
|
||||||
* for each match, add label to message
|
|
||||||
* if any matches were found, remove unprocessed
|
|
||||||
* TODO: how to handle inbox label
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[instrument(name="nm::label_unprocessed", skip_all, fields(dryrun=dryrun, limit=?limit, query=%query))]
|
|
||||||
pub async fn label_unprocessed(
|
|
||||||
nm: &Notmuch,
|
|
||||||
pool: &PgPool,
|
|
||||||
dryrun: bool,
|
|
||||||
limit: Option<usize>,
|
|
||||||
query: &str,
|
|
||||||
) -> Result<(), ServerError> {
|
|
||||||
use futures::StreamExt;
|
|
||||||
let ids = nm.message_ids(query)?;
|
|
||||||
info!(
|
|
||||||
"Processing {limit:?} of {} messages with '{query}'",
|
|
||||||
ids.len()
|
|
||||||
);
|
|
||||||
let rules: Vec<_> = sqlx::query!(
|
|
||||||
r#"
|
|
||||||
SELECT rule as "rule: Json<Rule>"
|
|
||||||
FROM email_rule
|
|
||||||
ORDER BY sort_order
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.fetch(pool)
|
|
||||||
.map(|r| r.unwrap().rule.0)
|
|
||||||
.collect()
|
|
||||||
.await;
|
|
||||||
/*
|
|
||||||
use letterbox_shared::{Match, MatchType};
|
|
||||||
let rules = vec![Rule {
|
|
||||||
stop_on_match: false,
|
|
||||||
matches: vec![Match {
|
|
||||||
match_type: MatchType::From,
|
|
||||||
needle: "eftours".to_string(),
|
|
||||||
}],
|
|
||||||
tag: "EFTours".to_string(),
|
|
||||||
}];
|
|
||||||
*/
|
|
||||||
info!("Loaded {} rules", rules.len());
|
|
||||||
|
|
||||||
let ids = if let Some(limit) = limit {
|
|
||||||
&ids[..limit]
|
|
||||||
} else {
|
|
||||||
&ids[..]
|
|
||||||
};
|
|
||||||
for id in ids {
|
|
||||||
let id = format!("id:{id}");
|
|
||||||
let files = nm.files(&id)?;
|
|
||||||
// Only process the first file path is multiple files have the same id
|
|
||||||
let path = files.iter().next().unwrap();
|
|
||||||
let file = File::open(&path)?;
|
|
||||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
|
||||||
let m = parse_mail(&mmap)?;
|
|
||||||
let (matched_rule, add_tags) = find_tags(&rules, &m.headers);
|
|
||||||
if matched_rule {
|
|
||||||
if dryrun {
|
|
||||||
info!(
|
|
||||||
"\nAdd tags: {add_tags:?}\nTo: {} From: {} Subject: {}\n",
|
|
||||||
m.headers.get_first_value("to").expect("no from header"),
|
|
||||||
m.headers.get_first_value("from").expect("no from header"),
|
|
||||||
m.headers
|
|
||||||
.get_first_value("subject")
|
|
||||||
.expect("no subject header")
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
for t in &add_tags {
|
|
||||||
nm.tag_add(t, &id)?;
|
|
||||||
}
|
|
||||||
if !add_tags.contains("inbox") {
|
|
||||||
nm.tag_remove("inbox", &id)?;
|
|
||||||
}
|
|
||||||
nm.tag_remove("unprocessed", &id)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn find_tags<'a, 'b>(rules: &'a [Rule], headers: &'b [MailHeader]) -> (bool, HashSet<&'a str>) {
|
|
||||||
let mut matched_rule = false;
|
|
||||||
let mut add_tags = HashSet::new();
|
|
||||||
for rule in rules {
|
|
||||||
for hdr in headers {
|
|
||||||
if rule.is_match(&hdr.get_key(), &hdr.get_value()) {
|
|
||||||
//info!("Matched {rule:?}");
|
|
||||||
matched_rule = true;
|
|
||||||
add_tags.insert(rule.tag.as_str());
|
|
||||||
if rule.stop_on_match {
|
|
||||||
return (true, add_tags);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (matched_rule, add_tags);
|
|
||||||
}
|
|
||||||
|
|||||||
@ -12,9 +12,6 @@ version.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
build-info = "0.0.40"
|
build-info = "0.0.40"
|
||||||
letterbox-notmuch = { version = "0.16.0", registry = "xinu" }
|
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||||
regex = "1.11.1"
|
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
sqlx = "0.8.5"
|
|
||||||
strum_macros = "0.27.1"
|
strum_macros = "0.27.1"
|
||||||
tracing = "0.1.41"
|
|
||||||
|
|||||||
@ -1,14 +1,8 @@
|
|||||||
use std::{
|
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||||
convert::Infallible,
|
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use build_info::{BuildInfo, VersionControl};
|
use build_info::{BuildInfo, VersionControl};
|
||||||
use letterbox_notmuch::SearchSummary;
|
use letterbox_notmuch::SearchSummary;
|
||||||
use regex::{RegexBuilder, RegexSetBuilder};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SearchResult {
|
pub struct SearchResult {
|
||||||
@ -71,198 +65,3 @@ pub fn compute_color(data: &str) -> String {
|
|||||||
data.hash(&mut hasher);
|
data.hash(&mut hasher);
|
||||||
format!("#{:06x}", hasher.finish() % (1 << 24))
|
format!("#{:06x}", hasher.finish() % (1 << 24))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
pub enum MatchType {
|
|
||||||
From,
|
|
||||||
Sender,
|
|
||||||
To,
|
|
||||||
Cc,
|
|
||||||
Subject,
|
|
||||||
ListId,
|
|
||||||
DeliveredTo,
|
|
||||||
XForwardedTo,
|
|
||||||
ReplyTo,
|
|
||||||
XOriginalTo,
|
|
||||||
XSpam,
|
|
||||||
Body,
|
|
||||||
#[default]
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct Match {
|
|
||||||
pub match_type: MatchType,
|
|
||||||
pub needle: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct Rule {
|
|
||||||
pub stop_on_match: bool,
|
|
||||||
pub matches: Vec<Match>,
|
|
||||||
pub tag: String,
|
|
||||||
}
|
|
||||||
impl Rule {
|
|
||||||
pub fn is_match(&self, header_key: &str, header_value: &str) -> bool {
|
|
||||||
let pats: Vec<_> = self
|
|
||||||
.matches
|
|
||||||
.iter()
|
|
||||||
.filter_map(|m| match m.match_type {
|
|
||||||
MatchType::To => Some("^(to|cc|bcc|x-original-to)$"),
|
|
||||||
MatchType::From => Some("^from$"),
|
|
||||||
MatchType::Sender => Some("^sender$"),
|
|
||||||
MatchType::Subject => Some("^subject$"),
|
|
||||||
MatchType::ListId => Some("^list-id$"),
|
|
||||||
MatchType::XOriginalTo => Some("^x-original-to$"),
|
|
||||||
MatchType::ReplyTo => Some("^reply-to$"),
|
|
||||||
MatchType::XSpam => Some("^x-spam$"),
|
|
||||||
MatchType::Body => None,
|
|
||||||
c => panic!("TODO handle '{c:?}' match type"),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let set = RegexSetBuilder::new(&pats)
|
|
||||||
.case_insensitive(true)
|
|
||||||
.build()
|
|
||||||
.expect("failed to compile regex for matches");
|
|
||||||
let matches: Vec<_> = set.matches(header_key).into_iter().collect();
|
|
||||||
if !matches.is_empty() {
|
|
||||||
//info!("matched key '{header_key}' '{header_value}'");
|
|
||||||
for m_idx in matches {
|
|
||||||
let needle = regex::escape(&self.matches[m_idx].needle);
|
|
||||||
let pat = RegexBuilder::new(&needle)
|
|
||||||
.case_insensitive(true)
|
|
||||||
.build()
|
|
||||||
.expect("failed to compile regex for needle");
|
|
||||||
if pat.is_match(header_value) {
|
|
||||||
debug!("{header_key} matched {header_value} against {needle}");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod matches {
|
|
||||||
// From https://linux.die.net/man/5/procmailrc
|
|
||||||
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
|
||||||
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
|
||||||
|
|
||||||
pub const TO: &'static str = "TO";
|
|
||||||
pub const CC: &'static str = "Cc";
|
|
||||||
pub const TOCC: &'static str = "(TO|Cc)";
|
|
||||||
pub const FROM: &'static str = "From";
|
|
||||||
pub const SENDER: &'static str = "Sender";
|
|
||||||
pub const SUBJECT: &'static str = "Subject";
|
|
||||||
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
|
||||||
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
|
||||||
pub const REPLY_TO: &'static str = "Reply-To";
|
|
||||||
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
|
||||||
pub const LIST_ID: &'static str = "List-ID";
|
|
||||||
pub const X_SPAM: &'static str = "X-Spam";
|
|
||||||
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Match {
|
|
||||||
type Err = Infallible;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
// Examples:
|
|
||||||
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
|
||||||
// "* ^TOsonyrewards.com@xinu.tv"
|
|
||||||
let mut it = s.split_whitespace().skip(1);
|
|
||||||
let mut needle = it.next().unwrap();
|
|
||||||
if needle == "1^0" {
|
|
||||||
needle = it.next().unwrap();
|
|
||||||
}
|
|
||||||
let mut needle = vec![needle];
|
|
||||||
needle.extend(it);
|
|
||||||
let needle = needle.join(" ");
|
|
||||||
let first = needle.chars().nth(0).unwrap_or(' ');
|
|
||||||
use matches::*;
|
|
||||||
if first == '^' {
|
|
||||||
let needle = &needle[1..];
|
|
||||||
if needle.starts_with(TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(FROM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::From,
|
|
||||||
needle: cleanup_match(FROM, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(CC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Cc,
|
|
||||||
needle: cleanup_match(CC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(TOCC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TOCC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SENDER) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Sender,
|
|
||||||
needle: cleanup_match(SENDER, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SUBJECT) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Subject,
|
|
||||||
needle: cleanup_match(SUBJECT, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_ORIGINAL_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XOriginalTo,
|
|
||||||
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(LIST_ID) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::ListId,
|
|
||||||
needle: cleanup_match(LIST_ID, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(REPLY_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::ReplyTo,
|
|
||||||
needle: cleanup_match(REPLY_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM_FLAG) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(DELIVERED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::DeliveredTo,
|
|
||||||
needle: cleanup_match(DELIVERED_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_FORWARDED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XForwardedTo,
|
|
||||||
needle: cleanup_match(X_FORWARDED_TO, needle),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
unreachable!("needle: '{needle}'")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Body,
|
|
||||||
needle: cleanup_match("", &needle),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn unescape(s: &str) -> String {
|
|
||||||
s.replace('\\', "")
|
|
||||||
}
|
|
||||||
pub fn cleanup_match(prefix: &str, s: &str) -> String {
|
|
||||||
unescape(&s[prefix.len()..]).replace(".*", "")
|
|
||||||
}
|
|
||||||
|
|||||||
@ -33,8 +33,8 @@ wasm-bindgen = "=0.2.100"
|
|||||||
uuid = { version = "1.13.1", features = [
|
uuid = { version = "1.13.1", features = [
|
||||||
"js",
|
"js",
|
||||||
] } # direct dep to set js feature, prevents Rng issues
|
] } # direct dep to set js feature, prevents Rng issues
|
||||||
letterbox-shared = { version = "0.16.0", registry = "xinu" }
|
letterbox-shared = { version = "0.15.11", path = "../shared", registry = "xinu" }
|
||||||
letterbox-notmuch = { version = "0.16.0", registry = "xinu" }
|
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||||
seed_hooks = { version = "0.4.0", registry = "xinu" }
|
seed_hooks = { version = "0.4.0", registry = "xinu" }
|
||||||
strum_macros = "0.27.1"
|
strum_macros = "0.27.1"
|
||||||
gloo-console = "0.3.0"
|
gloo-console = "0.3.0"
|
||||||
|
|||||||
@ -341,10 +341,9 @@ fn search_results(
|
|||||||
let caught_up = query.contains("is:unread");
|
let caught_up = query.contains("is:unread");
|
||||||
let read_emoji = ["👻", "👽", "👾", "🤖", "💀"];
|
let read_emoji = ["👻", "👽", "👾", "🤖", "💀"];
|
||||||
let no_results_emoji = ["🙈", "👀", "🤦", "🤷", "🙅", "🛟", "🍩", "🌑", "💿", "🔍"];
|
let no_results_emoji = ["🙈", "👀", "🤦", "🤷", "🙅", "🛟", "🍩", "🌑", "💿", "🔍"];
|
||||||
// Randomly choose emoji based on what 10-second window we're currently in
|
|
||||||
let now = seed::window()
|
let now = seed::window()
|
||||||
.performance()
|
.performance()
|
||||||
.map(|p| p.now() as usize / 10_000)
|
.map(|p| p.now() as usize)
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
let (emoji, text) = if caught_up {
|
let (emoji, text) = if caught_up {
|
||||||
let idx = now % read_emoji.len();
|
let idx = now % read_emoji.len();
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user