Compare commits
220 Commits
letterbox-
...
letterbox-
| Author | SHA1 | Date | |
|---|---|---|---|
| 5cec8add5e | |||
| 0225dbde3a | |||
| f84b8fa6c2 | |||
| 979cbcd23e | |||
| b3070e1919 | |||
| e5fdde8f30 | |||
| 7de36bbc3d | |||
| 1c4f27902e | |||
| 7ee86f0d2f | |||
| a0b06fd5ef | |||
| 630bb20b35 | |||
| 17ea2a35cb | |||
| 7d9376d607 | |||
| 122e949072 | |||
| 9a69b4c51e | |||
| 251151244b | |||
| 9d232b666b | |||
| 1832d77e78 | |||
| aca6bce1ff | |||
| 7bb2f405da | |||
| 60e2824167 | |||
| cffc228b3a | |||
| 318c366d82 | |||
| 90d7f79ca0 | |||
| 3f87038776 | |||
| 92b880f03b | |||
| 94f1e84857 | |||
| 221b4f10df | |||
| 225615f4ea | |||
| b8ef753f85 | |||
| 33edd22f8f | |||
| 75e9232095 | |||
| 6daddf11de | |||
| 36d9eda303 | |||
| 4eb2d4c689 | |||
| edc7119fbf | |||
| aa1736a285 | |||
| 6f93aa4f34 | |||
| 0662e6230e | |||
| 30f3f14040 | |||
| f2042f284e | |||
| b2c73ffa15 | |||
| d7217d1b3c | |||
| 638d55a36c | |||
| b11f6b5149 | |||
| d0b5ecf4f2 | |||
| 7a67c30a2c | |||
| 5ea4694eb8 | |||
| e01dabe6ed | |||
| ecaf0dd0fc | |||
| 3d4dcc9e6b | |||
| 28a5d9f219 | |||
| 81876d37ea | |||
| 4a6b159ddb | |||
| d84957cc8c | |||
| d53db5b49a | |||
| 0448368011 | |||
| 36754136fd | |||
| 489acccf77 | |||
| 8ef4db63ad | |||
| 9f63205ff3 | |||
| 5a0378948d | |||
| 2b4c45be74 | |||
| 147896dc80 | |||
| 1ff6ec7653 | |||
| acd590111e | |||
| b5f24ba1f2 | |||
| 79ed24135f | |||
| a4949a25b5 | |||
| f16edef124 | |||
| 2fd6479cb9 | |||
| 85a6b3a9a4 | |||
| 9ac5216d6e | |||
| 82987dbd20 | |||
| 29de7c0727 | |||
| 5f6580fa2f | |||
| 5d4732d75d | |||
| a13bac813a | |||
| 85dcc9f7bd | |||
| b696629ad9 | |||
| b9e3128718 | |||
| 88fac4c2bc | |||
| 1fad5ec536 | |||
| 8e7214d531 | |||
| 333c4a3ebb | |||
| b9ba5a3bea | |||
| 2a0989e74d | |||
| e9319dc491 | |||
| 57481a77cd | |||
| 44915cce54 | |||
| 1225483b57 | |||
| daeb8c88a1 | |||
| 8a6b3ff501 | |||
| a6fffeafdc | |||
| d791b4ce49 | |||
| 8a0e4eb441 | |||
| fc84562419 | |||
| 37ebe1ebb3 | |||
| 2d06f070ea | |||
| 527a62069a | |||
| 40afafe1a8 | |||
| e3acf9ae6d | |||
| a68d067a68 | |||
| 5547c65af0 | |||
| b622bb7d7d | |||
| 43efdf18a0 | |||
| c71ab8e9e8 | |||
| 408d6ed8ba | |||
| 1411961e36 | |||
| dfd7ef466c | |||
| 2aa3dfbd0f | |||
| fba10e27cf | |||
| 5417c74f9c | |||
| eb0b0dbe81 | |||
| 561f522658 | |||
| 32d2ffeb3d | |||
| d41946e0a5 | |||
| 61402858f4 | |||
| 17de318645 | |||
| 3aa0144e8d | |||
| f9eafff4c7 | |||
| 4c6d67901d | |||
| e9aa97a089 | |||
| a82b047f75 | |||
| 9a8b44a8df | |||
| a96693004c | |||
| ed9fe11fbf | |||
| 09fb14a796 | |||
| 58a7936bba | |||
| cd0ee361f5 | |||
| 77bd5abe0d | |||
| 450c5496b3 | |||
| 4411e45a3c | |||
| e7d20896d5 | |||
| 32a1115abd | |||
| 4982057500 | |||
| 8977f8bab5 | |||
| 0962a6b3cf | |||
| 3c72929a4f | |||
| e4eb495a70 | |||
| 00e8b0342e | |||
| b1f9867c06 | |||
| 77943b3570 | |||
| 45e4edb1dd | |||
| 9bf53afebf | |||
| e1a502ac4b | |||
| 9346c46e62 | |||
| 1452746305 | |||
| 2e526dace1 | |||
| 76be5b7cac | |||
| 3f0b2caedf | |||
| ec6dc35ca8 | |||
| 01e1ca927e | |||
| 1cc52d6c96 | |||
| e6b3a5b5a9 | |||
| bc4b15a5aa | |||
| 00f61cf6be | |||
| 52e24437bd | |||
| 393ffc8506 | |||
| 2b6cb6ec6e | |||
| 0cba3a624c | |||
| 73433711ca | |||
| 965afa6871 | |||
| e70dbaf917 | |||
| 6b4ce11743 | |||
| d1980a55a7 | |||
| 8b78b39d4c | |||
| ae17651eb5 | |||
| 22fd8409f6 | |||
| d0a4ba417f | |||
| 7b09b098a4 | |||
| bd4c10a8fb | |||
| ed3c5f152e | |||
| 63232d1e92 | |||
| 4a3eba80d5 | |||
| 71d3745342 | |||
| 5fdc98633d | |||
| 57877f268d | |||
| 871a93d58f | |||
| 4b7cbd4f9b | |||
| aa2a9815df | |||
| 2e5b18a008 | |||
| d0a38114cc | |||
| ccc1d516c7 | |||
| 246b710fdd | |||
| 1a21c9fa8e | |||
| 9fd912b1d4 | |||
| 9ded32f97b | |||
| 10aac046bc | |||
| f4527baf89 | |||
| 11ec5bf747 | |||
| 6a53679755 | |||
| 7bedec0692 | |||
| 78feb95811 | |||
| 3aad2bb80e | |||
| 0df8de3661 | |||
| 83ecc73fbd | |||
| c10313cd12 | |||
| 4c98bcd9cb | |||
| 004de235a8 | |||
| 90dbeb6f20 | |||
| 9aa298febe | |||
| 5a13a497dc | |||
| 37711e14dd | |||
| e89fd28707 | |||
| 7a91ee2f49 | |||
| 4b76ea5392 | |||
| d2a81b7bd9 | |||
| 9dd39509b5 | |||
| d605bcfe7a | |||
| 73abdb535a | |||
| ab9506c4f6 | |||
| 994a629401 | |||
| 00c55160a7 | |||
| e3c6edb894 | |||
| 4574c016cd | |||
| ca6c19f4c8 | |||
| 0f51f6e71f | |||
| 4bd672bf94 | |||
| 136fd77f3b |
@@ -1,4 +1,4 @@
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
name: Continuous integration
|
||||
|
||||
@@ -49,3 +49,19 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- run: cargo build
|
||||
|
||||
udeps:
|
||||
name: Disallow unused dependencies
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
|
||||
- name: Run cargo-udeps
|
||||
uses: aig787/cargo-udeps-action@v1
|
||||
with:
|
||||
version: 'latest'
|
||||
args: '--all-targets'
|
||||
|
||||
|
||||
7405
Cargo.lock
generated
7405
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -1,8 +1,15 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
#default-members = ["server"]
|
||||
members = ["notmuch", "procmail2notmuch", "shared"]
|
||||
#members = ["web", "server", "notmuch", "procmail2notmuch", "shared"]
|
||||
default-members = ["server"]
|
||||
members = ["web", "server", "notmuch", "procmail2notmuch", "shared"]
|
||||
|
||||
[workspace.package]
|
||||
authors = ["Bill Thiede <git@xinu.tv>"]
|
||||
edition = "2021"
|
||||
license = "UNLICENSED"
|
||||
publish = ["xinu"]
|
||||
version = "0.15.11"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 1
|
||||
|
||||
19
Justfile
Normal file
19
Justfile
Normal file
@@ -0,0 +1,19 @@
|
||||
export CARGO_INCREMENTAL := "0"
|
||||
export RUSTFLAGS := "-D warnings"
|
||||
|
||||
default:
|
||||
@echo "Run: just patch|minor|major"
|
||||
|
||||
major: (_release "major")
|
||||
minor: (_release "minor")
|
||||
patch: (_release "patch")
|
||||
|
||||
sqlx-prepare:
|
||||
cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true
|
||||
|
||||
pull:
|
||||
git pull
|
||||
|
||||
|
||||
_release level: pull sqlx-prepare
|
||||
cargo-release release -x {{ level }} --workspace --no-confirm --registry=xinu
|
||||
2
dev.sh
2
dev.sh
@@ -3,5 +3,5 @@ tmux new-session -d -s letterbox-dev
|
||||
tmux rename-window web
|
||||
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||
tmux new-window -n server
|
||||
tmux send-keys "cd server; cargo watch -c -x run -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||
tmux send-keys "cd server; cargo watch -c -w ../.git -w ../shared -w ../notmuch -w ./ -x 'run postgres://newsreader@nixos-07.h.xinu.tv/newsreader ../target/database/newsreader /tmp/letterbox/slurp'" C-m
|
||||
tmux attach -d -t letterbox-dev
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
[package]
|
||||
name = "letterbox-notmuch"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
exclude = ["/testdata"]
|
||||
description = "Wrapper for calling notmuch cli"
|
||||
license = "UNLICENSED"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
publish = ["xinu"]
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.14"
|
||||
mailparse = "0.16.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||
thiserror = "2.0.0"
|
||||
|
||||
@@ -207,6 +207,7 @@
|
||||
//! ```
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
ffi::OsStr,
|
||||
io::{self},
|
||||
path::{Path, PathBuf},
|
||||
@@ -270,6 +271,12 @@ pub struct Headers {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub bcc: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(alias = "Delivered-To")]
|
||||
pub delivered_to: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(alias = "X-Original-To")]
|
||||
pub x_original_to: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reply_to: Option<String>,
|
||||
pub date: String,
|
||||
}
|
||||
@@ -459,9 +466,11 @@ pub enum NotmuchError {
|
||||
StringUtf8Error(#[from] std::string::FromUtf8Error),
|
||||
#[error("failed to parse str as int")]
|
||||
ParseIntError(#[from] std::num::ParseIntError),
|
||||
#[error("failed to parse mail: {0}")]
|
||||
MailParseError(#[from] mailparse::MailParseError),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Notmuch {
|
||||
config_path: Option<PathBuf>,
|
||||
}
|
||||
@@ -605,6 +614,80 @@ impl Notmuch {
|
||||
Ok(serde_json::from_slice(&res)?)
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub fn unread_recipients(&self) -> Result<HashMap<String, usize>, NotmuchError> {
|
||||
let slice = self.run_notmuch([
|
||||
"show",
|
||||
"--include-html=false",
|
||||
"--entire-thread=false",
|
||||
"--body=false",
|
||||
"--format=json",
|
||||
// Arbitrary limit to prevent too much work
|
||||
"--limit=1000",
|
||||
"is:unread",
|
||||
])?;
|
||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||
// use that for parsing in rust.
|
||||
let s = String::from_utf8_lossy(&slice);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||
deserializer.disable_recursion_limit();
|
||||
let ts: ThreadSet = serde::de::Deserialize::deserialize(&mut deserializer)?;
|
||||
deserializer.end()?;
|
||||
let mut r = HashMap::new();
|
||||
fn collect_from_thread_node(
|
||||
r: &mut HashMap<String, usize>,
|
||||
tn: &ThreadNode,
|
||||
) -> Result<(), NotmuchError> {
|
||||
let Some(msg) = &tn.0 else {
|
||||
return Ok(());
|
||||
};
|
||||
let mut addrs = vec![];
|
||||
let hdr = &msg.headers.to;
|
||||
if let Some(to) = hdr {
|
||||
addrs.push(to);
|
||||
} else {
|
||||
let hdr = &msg.headers.x_original_to;
|
||||
if let Some(to) = hdr {
|
||||
addrs.push(to);
|
||||
} else {
|
||||
let hdr = &msg.headers.delivered_to;
|
||||
if let Some(to) = hdr {
|
||||
addrs.push(to);
|
||||
};
|
||||
};
|
||||
};
|
||||
let hdr = &msg.headers.cc;
|
||||
if let Some(cc) = hdr {
|
||||
addrs.push(cc);
|
||||
};
|
||||
for recipient in addrs {
|
||||
mailparse::addrparse(&recipient)?
|
||||
.into_inner()
|
||||
.iter()
|
||||
.for_each(|a| {
|
||||
let mailparse::MailAddr::Single(si) = a else {
|
||||
return;
|
||||
};
|
||||
let addr = &si.addr;
|
||||
|
||||
if addr == "couchmoney@gmail.com" || addr.ends_with("@xinu.tv") {
|
||||
*r.entry(addr.to_lowercase()).or_default() += 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
for t in ts.0 {
|
||||
for tn in t.0 {
|
||||
collect_from_thread_node(&mut r, &tn)?;
|
||||
for sub_tn in tn.1 {
|
||||
collect_from_thread_node(&mut r, &sub_tn)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
[package]
|
||||
name = "letterbox-procmail2notmuch"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Tool for generating notmuch rules from procmail"
|
||||
license = "UNLICENSED"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
publish = ["xinu"]
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.69"
|
||||
clap = { version = "4.5.37", features = ["derive", "env"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
||||
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
use std::{convert::Infallible, io::Write, str::FromStr};
|
||||
use std::{collections::HashMap, convert::Infallible, io::Write, str::FromStr};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
use clap::{Parser, Subcommand};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{types::Json, PgPool};
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
||||
)]
|
||||
enum MatchType {
|
||||
From,
|
||||
Sender,
|
||||
To,
|
||||
Cc,
|
||||
Subject,
|
||||
List,
|
||||
ListId,
|
||||
DeliveredTo,
|
||||
XForwardedTo,
|
||||
ReplyTo,
|
||||
@@ -17,16 +23,17 @@ enum MatchType {
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
#[derive(Debug, Default)]
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
struct Match {
|
||||
match_type: MatchType,
|
||||
needle: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
struct Rule {
|
||||
stop_on_match: bool,
|
||||
matches: Vec<Match>,
|
||||
tags: Vec<String>,
|
||||
tag: Option<String>,
|
||||
}
|
||||
|
||||
fn unescape(s: &str) -> String {
|
||||
@@ -38,6 +45,10 @@ fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||
}
|
||||
|
||||
mod matches {
|
||||
// From https://linux.die.net/man/5/procmailrc
|
||||
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
||||
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
||||
|
||||
pub const TO: &'static str = "TO";
|
||||
pub const CC: &'static str = "Cc";
|
||||
pub const TOCC: &'static str = "(TO|Cc)";
|
||||
@@ -109,7 +120,7 @@ impl FromStr for Match {
|
||||
});
|
||||
} else if needle.starts_with(LIST_ID) {
|
||||
return Ok(Match {
|
||||
match_type: MatchType::List,
|
||||
match_type: MatchType::ListId,
|
||||
needle: cleanup_match(LIST_ID, needle),
|
||||
});
|
||||
} else if needle.starts_with(REPLY_TO) {
|
||||
@@ -149,13 +160,109 @@ impl FromStr for Match {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Mode {
|
||||
Debug,
|
||||
Notmuchrc,
|
||||
LoadSql {
|
||||
#[arg(short, long, default_value = env!("DATABASE_URL"))]
|
||||
dsn: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Simple program to greet a person
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
#[arg(short, long, default_value = "/home/wathiede/dotfiles/procmailrc")]
|
||||
input: String,
|
||||
|
||||
#[command(subcommand)]
|
||||
mode: Mode,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
let mut rules = Vec::new();
|
||||
let mut cur_rule = Rule::default();
|
||||
for l in std::fs::read_to_string(args.input)?.lines() {
|
||||
let l = if let Some(idx) = l.find('#') {
|
||||
&l[..idx]
|
||||
} else {
|
||||
l
|
||||
}
|
||||
.trim();
|
||||
if l.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if l.find('=').is_some() {
|
||||
// Probably a variable assignment, skip line
|
||||
continue;
|
||||
}
|
||||
let first = l.chars().nth(0).unwrap_or(' ');
|
||||
match first {
|
||||
':' => {
|
||||
// start of rule
|
||||
}
|
||||
'*' => {
|
||||
// add to current rule
|
||||
let m: Match = l.parse()?;
|
||||
cur_rule.matches.push(m);
|
||||
}
|
||||
'.' => {
|
||||
// delivery to folder
|
||||
cur_rule.tag = Some(cleanup_match(
|
||||
"",
|
||||
&l.replace('.', "/")
|
||||
.replace(' ', "")
|
||||
.trim_matches('/')
|
||||
.to_string(),
|
||||
));
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
}
|
||||
'/' => cur_rule = Rule::default(), // Ex. /dev/null
|
||||
'|' => cur_rule = Rule::default(), // external command
|
||||
'$' => {
|
||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||
cur_rule.tag = Some(cleanup_match("", "inbox"));
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
} // variable, should only be $DEFAULT in my config
|
||||
_ => panic!("Unhandled first character '{}'\nLine: {}", first, l),
|
||||
}
|
||||
}
|
||||
match args.mode {
|
||||
Mode::Debug => print_rules(&rules),
|
||||
Mode::Notmuchrc => notmuch_from_rules(std::io::stdout(), &rules)?,
|
||||
Mode::LoadSql { dsn } => load_sql(&dsn, &rules).await?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_rules(rules: &[Rule]) {
|
||||
let mut tally = HashMap::new();
|
||||
for r in rules {
|
||||
for m in &r.matches {
|
||||
*tally.entry(m.match_type).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
let mut sorted: Vec<_> = tally.iter().map(|(k, v)| (v, k)).collect();
|
||||
sorted.sort();
|
||||
sorted.reverse();
|
||||
for (v, k) in sorted {
|
||||
println!("{k:?}: {v}");
|
||||
}
|
||||
}
|
||||
|
||||
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
||||
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
||||
// faster.
|
||||
let mut lines = Vec::new();
|
||||
for r in rules {
|
||||
for m in &r.matches {
|
||||
for t in &r.tags {
|
||||
if let Some(t) = &r.tag {
|
||||
if let MatchType::Unknown = m.match_type {
|
||||
eprintln!("rule has unknown match {:?}", r);
|
||||
continue;
|
||||
@@ -168,7 +275,7 @@ fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()>
|
||||
MatchType::To => "to:",
|
||||
MatchType::Cc => "to:",
|
||||
MatchType::Subject => "subject:",
|
||||
MatchType::List => "List-ID:",
|
||||
MatchType::ListId => "List-ID:",
|
||||
MatchType::Body => "",
|
||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||
// index. Handle them later.
|
||||
@@ -200,56 +307,25 @@ fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()>
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let input = "/home/wathiede/dotfiles/procmailrc";
|
||||
let mut rules = Vec::new();
|
||||
let mut cur_rule = Rule::default();
|
||||
for l in std::fs::read_to_string(input)?.lines() {
|
||||
let l = if let Some(idx) = l.find('#') {
|
||||
&l[..idx]
|
||||
} else {
|
||||
l
|
||||
}
|
||||
.trim();
|
||||
if l.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if l.find('=').is_some() {
|
||||
// Probably a variable assignment, skip line
|
||||
continue;
|
||||
}
|
||||
let first = l.chars().nth(0).unwrap_or(' ');
|
||||
match first {
|
||||
':' => {
|
||||
// start of rule
|
||||
}
|
||||
'*' => {
|
||||
// add to current rule
|
||||
let m: Match = l.parse()?;
|
||||
cur_rule.matches.push(m);
|
||||
}
|
||||
'.' => {
|
||||
// delivery to folder
|
||||
cur_rule.tags.push(cleanup_match(
|
||||
"",
|
||||
&l.replace('.', "/")
|
||||
.replace(' ', "")
|
||||
.trim_matches('/')
|
||||
.to_string(),
|
||||
));
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
}
|
||||
'|' => cur_rule = Rule::default(), // external command
|
||||
'$' => {
|
||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||
cur_rule.tags.push(cleanup_match("", "inbox"));
|
||||
rules.push(cur_rule);
|
||||
cur_rule = Rule::default();
|
||||
} // variable, should only be $DEFAULT in my config
|
||||
_ => panic!("Unhandled first character '{}' {}", first, l),
|
||||
}
|
||||
async fn load_sql(dsn: &str, rules: &[Rule]) -> anyhow::Result<()> {
|
||||
let pool = PgPool::connect(dsn).await?;
|
||||
println!("clearing email_rule table");
|
||||
sqlx::query!("DELETE FROM email_rule")
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
for (order, rule) in rules.iter().enumerate() {
|
||||
println!("inserting {order}: {rule:?}");
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO email_rule (sort_order, rule)
|
||||
VALUES ($1, $2)
|
||||
"#,
|
||||
order as i32,
|
||||
Json(rule) as _
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
}
|
||||
notmuch_from_rules(std::io::stdout(), &rules)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\nSELECT id\nFROM feed\nWHERE slug = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "dabd12987369cb273c0191d46645c376439d246d5a697340574c6afdac93d2cc"
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\nINSERT INTO feed ( name, slug, url, homepage, selector )\nVALUES ( $1, $2, $3, '', '' )\nRETURNING id\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e2a448aaf4fe92fc1deda10bf844f6b9225d35758cba7c9f337c1a730aee41bd"
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
[package]
|
||||
name = "letterbox-server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
default-run = "letterbox-server"
|
||||
description = "Backend for letterbox"
|
||||
license = "UNLICENSED"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
publish = ["xinu"]
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@@ -14,44 +15,45 @@ publish = ["xinu"]
|
||||
ammonia = "4.0.0"
|
||||
anyhow = "1.0.79"
|
||||
async-graphql = { version = "7", features = ["log"] }
|
||||
async-graphql-rocket = "7"
|
||||
async-graphql-axum = "7.0.15"
|
||||
async-trait = "0.1.81"
|
||||
build-info = "0.0.39"
|
||||
cacher = { version = "0.1.0", registry = "xinu" }
|
||||
axum = { version = "0.8.3", features = ["ws"] }
|
||||
axum-macros = "0.5.0"
|
||||
build-info = "0.0.40"
|
||||
cacher = { version = "0.2.0", registry = "xinu" }
|
||||
chrono = "0.4.39"
|
||||
clap = { version = "4.5.23", features = ["derive"] }
|
||||
clap = { version = "4.5.36", features = ["derive"] }
|
||||
css-inline = "0.14.0"
|
||||
futures = "0.3.31"
|
||||
headers = "0.4.0"
|
||||
html-escape = "0.2.13"
|
||||
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.15.11", path = "../shared", registry = "xinu" }
|
||||
linkify = "0.10.0"
|
||||
log = "0.4.17"
|
||||
lol_html = "2.0.0"
|
||||
mailparse = "0.16.0"
|
||||
maplit = "1.0.2"
|
||||
memmap = "0.7.0"
|
||||
opentelemetry = "0.28.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.7", features = ["blocking"] }
|
||||
rocket = { version = "0.5.0-rc.2", features = ["json"] }
|
||||
rocket_cors = "0.6.0"
|
||||
scraper = "0.22.0"
|
||||
scraper = "0.23.0"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = "1.0.87"
|
||||
sqlx = { version = "0.8.2", features = ["postgres", "runtime-tokio", "time"] }
|
||||
tantivy = { version = "0.22.0", optional = true }
|
||||
tantivy = { version = "0.24.0", optional = true }
|
||||
thiserror = "2.0.0"
|
||||
tokio = "1.26.0"
|
||||
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||
tracing = "0.1.41"
|
||||
url = "2.5.2"
|
||||
urlencoding = "2.1.3"
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
||||
xtracing = { version = "0.2.0", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.1.0", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.1.0", registry = "xinu" }
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
xtracing = { version = "0.3.0", registry = "xinu" }
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.39"
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
[features]
|
||||
#default = [ "tantivy" ]
|
||||
|
||||
@@ -5,7 +5,6 @@ newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||
|
||||
[debug]
|
||||
address = "0.0.0.0"
|
||||
port = 9345
|
||||
# Uncomment to make it production like.
|
||||
#log_level = "critical"
|
||||
|
||||
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
-- Bad examples:
|
||||
-- https://nzbfinder.ws/getnzb/d2c3e5a08abadd985dccc6a574122892030b6a9a.nzb&i=95972&r=b55082d289937c050dedc203c9653850
|
||||
-- https://nzbfinder.ws/getnzb?id=45add174-7da4-4445-bf2b-a67dbbfc07fe.nzb&r=b55082d289937c050dedc203c9653850
|
||||
-- https://nzbfinder.ws/api/v1/getnzb?id=82486020-c192-4fa0-a7e7-798d7d72e973.nzb&r=b55082d289937c050dedc203c9653850
|
||||
UPDATE nzb_posts
|
||||
SET link =
|
||||
regexp_replace(
|
||||
regexp_replace(
|
||||
regexp_replace(
|
||||
link,
|
||||
'https://nzbfinder.ws/getnzb/',
|
||||
'https://nzbfinder.ws/api/v1/getnzb?id='
|
||||
),
|
||||
'https://nzbfinder.ws/getnzb',
|
||||
'https://nzbfinder.ws/api/v1/getnzb'
|
||||
),
|
||||
'&r=',
|
||||
'&apikey='
|
||||
)
|
||||
;
|
||||
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
DROP TABLE IF NOT EXISTS email_rule;
|
||||
|
||||
-- Add down migration script here
|
||||
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE TABLE IF NOT EXISTS email_rule (
|
||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||
sort_order integer NOT NULL,
|
||||
rule jsonb NOT NULL
|
||||
);
|
||||
@@ -1,22 +0,0 @@
|
||||
use clap::Parser;
|
||||
use letterbox_server::mail::read_mail_to_db;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
/// Add certain emails as posts in newsfeed app.
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// DB URL, something like postgres://newsreader@nixos-07.h.xinu.tv/newsreader
|
||||
#[arg(short, long)]
|
||||
db_url: String,
|
||||
/// path to parse
|
||||
path: String,
|
||||
}
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||
let args = Args::parse();
|
||||
let pool = PgPool::connect(&args.db_url).await?;
|
||||
read_mail_to_db(&pool, &args.path).await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,114 +1,101 @@
|
||||
// Rocket generates a lot of warnings for handlers
|
||||
// TODO: figure out why
|
||||
#![allow(unreachable_patterns)]
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
use std::{error::Error, io::Cursor, str::FromStr};
|
||||
use std::{error::Error, net::SocketAddr, sync::Arc, time::Duration};
|
||||
|
||||
use async_graphql::{extensions, http::GraphiQLSource, EmptySubscription, Schema};
|
||||
use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
|
||||
use async_graphql::{extensions, http::GraphiQLSource, Schema};
|
||||
use async_graphql_axum::{GraphQL, GraphQLSubscription};
|
||||
//allows to extract the IP of connecting user
|
||||
use axum::extract::connect_info::ConnectInfo;
|
||||
use axum::{
|
||||
extract::{self, ws::WebSocketUpgrade, Query, State},
|
||||
http::{header, StatusCode},
|
||||
response::{self, IntoResponse, Response},
|
||||
routing::{any, get, post},
|
||||
Router,
|
||||
};
|
||||
use cacher::FilesystemCacher;
|
||||
use letterbox_notmuch::{Notmuch, NotmuchError, ThreadSet};
|
||||
use clap::Parser;
|
||||
use letterbox_notmuch::Notmuch;
|
||||
#[cfg(feature = "tantivy")]
|
||||
use letterbox_server::tantivy::TantivyConnection;
|
||||
use letterbox_server::{
|
||||
config::Config,
|
||||
error::ServerError,
|
||||
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||
graphql::{compute_catchup_ids, Attachment, MutationRoot, QueryRoot, SubscriptionRoot},
|
||||
nm::{attachment_bytes, cid_attachment_bytes},
|
||||
ws::ConnectionTracker,
|
||||
};
|
||||
use rocket::{
|
||||
fairing::AdHoc,
|
||||
http::{ContentType, Header},
|
||||
request::Request,
|
||||
response::{content, Debug, Responder},
|
||||
serde::json::Json,
|
||||
Response, State,
|
||||
};
|
||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use serde::Deserialize;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tokio::{net::TcpListener, sync::Mutex};
|
||||
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
||||
use tracing::{info, warn};
|
||||
|
||||
#[get("/show/<query>/pretty")]
|
||||
async fn show_pretty(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
) -> Result<Json<ThreadSet>, Debug<ServerError>> {
|
||||
let query = urlencoding::decode(query).map_err(|e| ServerError::from(NotmuchError::from(e)))?;
|
||||
let res = nm.show(&query).map_err(ServerError::from)?;
|
||||
Ok(Json(res))
|
||||
// Make our own error that wraps `anyhow::Error`.
|
||||
struct AppError(letterbox_server::ServerError);
|
||||
|
||||
// Tell axum how to convert `AppError` into a response.
|
||||
impl IntoResponse for AppError {
|
||||
fn into_response(self) -> Response {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Something went wrong: {}", self.0),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/show/<query>")]
|
||||
async fn show(nm: &State<Notmuch>, query: &str) -> Result<Json<ThreadSet>, Debug<NotmuchError>> {
|
||||
let query = urlencoding::decode(query).map_err(NotmuchError::from)?;
|
||||
let res = nm.show(&query)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
struct InlineAttachmentResponder(Attachment);
|
||||
|
||||
impl<'r, 'o: 'r> Responder<'r, 'o> for InlineAttachmentResponder {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||
let mut resp = Response::build();
|
||||
if let Some(filename) = self.0.filename {
|
||||
resp.header(Header::new(
|
||||
"Content-Disposition",
|
||||
format!(r#"inline; filename="{}""#, filename),
|
||||
));
|
||||
}
|
||||
if let Some(content_type) = self.0.content_type {
|
||||
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||
resp.header(ct);
|
||||
}
|
||||
}
|
||||
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||
.ok()
|
||||
// This enables using `?` on functions that return `Result<_, letterbox_server::Error>` to turn them into
|
||||
// `Result<_, AppError>`. That way you don't need to do that manually.
|
||||
impl<E> From<E> for AppError
|
||||
where
|
||||
E: Into<letterbox_server::ServerError>,
|
||||
{
|
||||
fn from(err: E) -> Self {
|
||||
Self(err.into())
|
||||
}
|
||||
}
|
||||
|
||||
struct DownloadAttachmentResponder(Attachment);
|
||||
|
||||
impl<'r, 'o: 'r> Responder<'r, 'o> for DownloadAttachmentResponder {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||
let mut resp = Response::build();
|
||||
if let Some(filename) = self.0.filename {
|
||||
resp.header(Header::new(
|
||||
"Content-Disposition",
|
||||
format!(r#"attachment; filename="{}""#, filename),
|
||||
));
|
||||
}
|
||||
if let Some(content_type) = self.0.content_type {
|
||||
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||
resp.header(ct);
|
||||
}
|
||||
}
|
||||
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||
.ok()
|
||||
fn inline_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||
info!("attachment filename {:?}", attachment.filename);
|
||||
let mut hdr_map = headers::HeaderMap::new();
|
||||
if let Some(filename) = attachment.filename {
|
||||
hdr_map.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!(r#"inline; filename="{}""#, filename)
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
if let Some(ct) = attachment.content_type {
|
||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||
}
|
||||
info!("hdr_map {hdr_map:?}");
|
||||
(hdr_map, attachment.bytes).into_response()
|
||||
}
|
||||
|
||||
#[get("/cid/<id>/<cid>")]
|
||||
async fn view_cid(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
cid: &str,
|
||||
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
info!("view cid attachment {mid} {cid}");
|
||||
let attachment = cid_attachment_bytes(nm, &mid, &cid)?;
|
||||
Ok(InlineAttachmentResponder(attachment))
|
||||
fn download_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||
info!("attachment filename {:?}", attachment.filename);
|
||||
let mut hdr_map = headers::HeaderMap::new();
|
||||
if let Some(filename) = attachment.filename {
|
||||
hdr_map.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!(r#"attachment; filename="{}""#, filename)
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
if let Some(ct) = attachment.content_type {
|
||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||
}
|
||||
info!("hdr_map {hdr_map:?}");
|
||||
(hdr_map, attachment.bytes).into_response()
|
||||
}
|
||||
|
||||
#[get("/view/attachment/<id>/<idx>/<_>")]
|
||||
#[axum_macros::debug_handler]
|
||||
async fn view_attachment(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
idx: &str,
|
||||
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
@@ -119,16 +106,14 @@ async fn view_attachment(
|
||||
.split('.')
|
||||
.map(|s| s.parse().expect("not a usize"))
|
||||
.collect();
|
||||
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||
Ok(InlineAttachmentResponder(attachment))
|
||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||
Ok(inline_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[get("/download/attachment/<id>/<idx>/<_>")]
|
||||
async fn download_attachment(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
idx: &str,
|
||||
) -> Result<DownloadAttachmentResponder, Debug<ServerError>> {
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
@@ -139,102 +124,193 @@ async fn download_attachment(
|
||||
.split('.')
|
||||
.map(|s| s.parse().expect("not a usize"))
|
||||
.collect();
|
||||
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||
Ok(DownloadAttachmentResponder(attachment))
|
||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||
Ok(download_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[get("/original/<id>")]
|
||||
async fn original(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
) -> Result<(ContentType, Vec<u8>), Debug<NotmuchError>> {
|
||||
async fn view_cid(
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, cid)): extract::Path<(String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
let res = nm.show_original(&mid)?;
|
||||
Ok((ContentType::Plain, res))
|
||||
info!("view cid attachment {mid} {cid}");
|
||||
let attachment = cid_attachment_bytes(&nm, &mid, &cid)?;
|
||||
Ok(inline_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[rocket::get("/")]
|
||||
fn graphiql() -> content::RawHtml<String> {
|
||||
content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
|
||||
// TODO make this work with gitea message ids like `wathiede/letterbox/pulls/91@git.z.xinu.tv`
|
||||
async fn view_original(
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path(id): extract::Path<String>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
info!("view_original {id}");
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
let files = nm.files(&mid)?;
|
||||
let Some(path) = files.first() else {
|
||||
warn!("failed to find files for message {mid}");
|
||||
return Ok((StatusCode::NOT_FOUND, mid).into_response());
|
||||
};
|
||||
let str = std::fs::read_to_string(&path)?;
|
||||
Ok(str.into_response())
|
||||
}
|
||||
|
||||
#[rocket::get("/graphql?<query..>")]
|
||||
async fn graphql_query(schema: &State<GraphqlSchema>, query: GraphQLQuery) -> GraphQLResponse {
|
||||
query.execute(schema.inner()).await
|
||||
async fn graphiql() -> impl IntoResponse {
|
||||
response::Html(
|
||||
GraphiQLSource::build()
|
||||
.endpoint("/api/graphql/")
|
||||
.subscription_endpoint("/api/graphql/ws")
|
||||
.finish(),
|
||||
)
|
||||
}
|
||||
|
||||
#[rocket::post("/graphql", data = "<request>", format = "application/json")]
|
||||
async fn graphql_request(
|
||||
schema: &State<GraphqlSchema>,
|
||||
request: GraphQLRequest,
|
||||
) -> GraphQLResponse {
|
||||
request.execute(schema.inner()).await
|
||||
async fn start_ws(
|
||||
ws: WebSocketUpgrade,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
State(AppState {
|
||||
connection_tracker, ..
|
||||
}): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
info!("intiating websocket connection for {addr}");
|
||||
ws.on_upgrade(async move |socket| connection_tracker.lock().await.add_peer(socket, addr).await)
|
||||
}
|
||||
|
||||
#[rocket::main]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct NotificationParams {
|
||||
delay_ms: Option<u64>,
|
||||
}
|
||||
|
||||
async fn send_refresh_websocket_handler(
|
||||
State(AppState {
|
||||
connection_tracker, ..
|
||||
}): State<AppState>,
|
||||
params: Query<NotificationParams>,
|
||||
) -> impl IntoResponse {
|
||||
info!("send_refresh_websocket_handler params {params:?}");
|
||||
if let Some(delay_ms) = params.delay_ms {
|
||||
let delay = Duration::from_millis(delay_ms);
|
||||
info!("sleeping {delay:?}");
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
connection_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await;
|
||||
"refresh triggered"
|
||||
}
|
||||
|
||||
async fn watch_new(
|
||||
nm: Notmuch,
|
||||
pool: PgPool,
|
||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
poll_time: Duration,
|
||||
) -> Result<(), async_graphql::Error> {
|
||||
let mut old_ids = Vec::new();
|
||||
loop {
|
||||
let ids = compute_catchup_ids(&nm, &pool, "is:unread").await?;
|
||||
if old_ids != ids {
|
||||
info!("old_ids: {old_ids:?}\n ids: {ids:?}");
|
||||
conn_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await
|
||||
}
|
||||
old_ids = ids;
|
||||
tokio::time::sleep(poll_time).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
nm: Notmuch,
|
||||
connection_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[arg(short, long, default_value = "0.0.0.0:9345")]
|
||||
addr: SocketAddr,
|
||||
newsreader_database_url: String,
|
||||
newsreader_tantivy_db_path: String,
|
||||
slurp_cache_path: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let cli = Cli::parse();
|
||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||
build_info::build_info!(fn bi);
|
||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
||||
let allowed_origins = AllowedOrigins::all();
|
||||
let cors = rocket_cors::CorsOptions {
|
||||
allowed_origins,
|
||||
allowed_methods: vec!["Get"]
|
||||
.into_iter()
|
||||
.map(|s| FromStr::from_str(s).unwrap())
|
||||
.collect(),
|
||||
allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]),
|
||||
allow_credentials: true,
|
||||
..Default::default()
|
||||
if !std::fs::exists(&cli.slurp_cache_path)? {
|
||||
info!("Creating slurp cache @ '{}'", &cli.slurp_cache_path);
|
||||
std::fs::create_dir_all(&cli.slurp_cache_path)?;
|
||||
}
|
||||
.to_cors()?;
|
||||
|
||||
let rkt = rocket::build()
|
||||
.mount(
|
||||
letterbox_shared::urls::MOUNT_POINT,
|
||||
routes![
|
||||
original,
|
||||
show_pretty,
|
||||
show,
|
||||
graphql_query,
|
||||
graphql_request,
|
||||
graphiql,
|
||||
view_cid,
|
||||
view_attachment,
|
||||
download_attachment,
|
||||
],
|
||||
)
|
||||
.attach(cors)
|
||||
.attach(AdHoc::config::<Config>());
|
||||
|
||||
let config: Config = rkt.figment().extract()?;
|
||||
if !std::fs::exists(&config.slurp_cache_path)? {
|
||||
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
|
||||
std::fs::create_dir_all(&config.slurp_cache_path)?;
|
||||
}
|
||||
let pool = PgPool::connect(&config.newsreader_database_url).await?;
|
||||
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
||||
let nm = Notmuch::default();
|
||||
sqlx::migrate!("./migrations").run(&pool).await?;
|
||||
#[cfg(feature = "tantivy")]
|
||||
let tantivy_conn = TantivyConnection::new(&config.newsreader_tantivy_db_path)?;
|
||||
let tantivy_conn = TantivyConnection::new(&cli.newsreader_tantivy_db_path)?;
|
||||
|
||||
let cacher = FilesystemCacher::new(&config.slurp_cache_path)?;
|
||||
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||
.data(Notmuch::default())
|
||||
let cacher = FilesystemCacher::new(&cli.slurp_cache_path)?;
|
||||
let schema = Schema::build(QueryRoot, MutationRoot, SubscriptionRoot)
|
||||
.data(nm.clone())
|
||||
.data(cacher)
|
||||
.data(pool.clone());
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
let schema = schema.data(tantivy_conn);
|
||||
|
||||
let schema = schema.extension(extensions::Logger).finish();
|
||||
|
||||
let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default());
|
||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||
let connection_tracker = Arc::new(Mutex::new(ConnectionTracker::default()));
|
||||
let ct = Arc::clone(&connection_tracker);
|
||||
let poll_time = Duration::from_secs(60);
|
||||
let _h = tokio::spawn(watch_new(nm.clone(), pool, ct, poll_time));
|
||||
|
||||
rkt.launch().await?;
|
||||
let api_routes = Router::new()
|
||||
.route(
|
||||
"/download/attachment/{id}/{idx}/{*rest}",
|
||||
get(download_attachment),
|
||||
)
|
||||
.route("/view/attachment/{id}/{idx}/{*rest}", get(view_attachment))
|
||||
.route("/original/{id}", get(view_original))
|
||||
.route("/cid/{id}/{cid}", get(view_cid))
|
||||
.route("/ws", any(start_ws))
|
||||
.route_service("/graphql/ws", GraphQLSubscription::new(schema.clone()))
|
||||
.route(
|
||||
"/graphql/",
|
||||
get(graphiql).post_service(GraphQL::new(schema.clone())),
|
||||
);
|
||||
|
||||
let notification_routes = Router::new()
|
||||
.route("/mail", post(send_refresh_websocket_handler))
|
||||
.route("/news", post(send_refresh_websocket_handler));
|
||||
let app = Router::new()
|
||||
.nest("/api", api_routes)
|
||||
.nest("/notification", notification_routes)
|
||||
.with_state(AppState {
|
||||
nm,
|
||||
connection_tracker,
|
||||
})
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(DefaultMakeSpan::default().include_headers(true)),
|
||||
);
|
||||
|
||||
let listener = TcpListener::bind(cli.addr).await.unwrap();
|
||||
tracing::info!("listening on {}", listener.local_addr().unwrap());
|
||||
axum::serve(
|
||||
listener,
|
||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@ use std::{fmt, str::FromStr};
|
||||
|
||||
use async_graphql::{
|
||||
connection::{self, Connection, Edge, OpaqueCursor},
|
||||
Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema,
|
||||
SimpleObject, Union,
|
||||
futures_util::Stream,
|
||||
Context, Enum, Error, FieldResult, InputObject, Object, Schema, SimpleObject, Subscription,
|
||||
Union,
|
||||
};
|
||||
use cacher::FilesystemCacher;
|
||||
use futures::stream;
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -95,6 +97,10 @@ pub struct Message {
|
||||
pub to: Vec<Email>,
|
||||
// All CC headers found in email
|
||||
pub cc: Vec<Email>,
|
||||
// X-Original-To header found in email
|
||||
pub x_original_to: Option<Email>,
|
||||
// Delivered-To header found in email
|
||||
pub delivered_to: Option<Email>,
|
||||
// First Subject header found in email
|
||||
pub subject: Option<String>,
|
||||
// Parsed Date header, if found and valid
|
||||
@@ -285,8 +291,7 @@ impl QueryRoot {
|
||||
build_info::build_info!(fn bi);
|
||||
Ok(letterbox_shared::build_version(bi))
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query))]
|
||||
#[instrument(skip_all, fields(query=query, request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
@@ -306,10 +311,20 @@ impl QueryRoot {
|
||||
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
|
||||
Ok(total)
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||
async fn catchup<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
query: String,
|
||||
) -> Result<Vec<String>, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
compute_catchup_ids(nm, pool, &query).await
|
||||
}
|
||||
|
||||
// TODO: this function doesn't get parallelism, possibly because notmuch is sync and blocks,
|
||||
// rewrite that with tokio::process:Command
|
||||
#[instrument(skip_all, fields(query=query, request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||
async fn search<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
@@ -467,7 +482,7 @@ impl QueryRoot {
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, fields(request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(rid=request_id()))]
|
||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
@@ -476,7 +491,7 @@ impl QueryRoot {
|
||||
tags.append(&mut nm::tags(nm, needs_unread)?);
|
||||
Ok(tags)
|
||||
}
|
||||
#[instrument(skip_all, fields(thread_id=thread_id, request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(thread_id=thread_id, rid=request_id()))]
|
||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
||||
@@ -550,10 +565,10 @@ async fn tantivy_search(
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub struct Mutation;
|
||||
pub struct MutationRoot;
|
||||
#[Object]
|
||||
impl Mutation {
|
||||
#[instrument(skip_all, fields(query=query, unread=unread, request_id=request_id()))]
|
||||
impl MutationRoot {
|
||||
#[instrument(skip_all, fields(query=query, unread=unread, rid=request_id()))]
|
||||
async fn set_read_status<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
@@ -572,7 +587,7 @@ impl Mutation {
|
||||
nm::set_read_status(nm, &query, unread).await?;
|
||||
Ok(true)
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query, tag=tag, request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
||||
async fn tag_add<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
@@ -584,7 +599,7 @@ impl Mutation {
|
||||
nm.tag_add(&tag, &query)?;
|
||||
Ok(true)
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query, tag=tag, request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
||||
async fn tag_remove<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
@@ -607,7 +622,7 @@ impl Mutation {
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
#[instrument(skip_all, fields(request_id=request_id()))]
|
||||
#[instrument(skip_all, fields(rid=request_id()))]
|
||||
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
||||
@@ -624,4 +639,51 @@ impl Mutation {
|
||||
}
|
||||
}
|
||||
|
||||
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;
|
||||
pub struct SubscriptionRoot;
|
||||
#[Subscription]
|
||||
impl SubscriptionRoot {
|
||||
async fn values(&self, _ctx: &Context<'_>) -> Result<impl Stream<Item = usize>, Error> {
|
||||
Ok(stream::iter(0..10))
|
||||
}
|
||||
}
|
||||
|
||||
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
||||
|
||||
#[instrument(skip_all, fields(query=query))]
|
||||
pub async fn compute_catchup_ids(
|
||||
nm: &Notmuch,
|
||||
pool: &PgPool,
|
||||
query: &str,
|
||||
) -> Result<Vec<String>, Error> {
|
||||
let query: Query = query.parse()?;
|
||||
// TODO: implement optimized versions of fetching just IDs
|
||||
let newsreader_fut = newsreader_search(pool, None, None, None, None, &query);
|
||||
let notmuch_fut = notmuch_search(nm, None, None, None, None, &query);
|
||||
let (newsreader_results, notmuch_results) = join!(newsreader_fut, notmuch_fut);
|
||||
|
||||
let newsreader_results = newsreader_results?;
|
||||
let notmuch_results = notmuch_results?;
|
||||
info!(
|
||||
"newsreader_results ({}) notmuch_results ({})",
|
||||
newsreader_results.len(),
|
||||
notmuch_results.len(),
|
||||
);
|
||||
|
||||
let mut results: Vec<_> = newsreader_results
|
||||
.into_iter()
|
||||
.chain(notmuch_results)
|
||||
.collect();
|
||||
// The leading '-' is to reverse sort
|
||||
results.sort_by_key(|item| match item {
|
||||
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
||||
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
||||
});
|
||||
let ids = results
|
||||
.into_iter()
|
||||
.map(|r| match r {
|
||||
ThreadSummaryCursor::Newsreader(_, ts) => ts.thread,
|
||||
ThreadSummaryCursor::Notmuch(_, ts) => ts.thread,
|
||||
})
|
||||
.collect();
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod graphql;
|
||||
pub mod mail;
|
||||
pub mod newsreader;
|
||||
pub mod nm;
|
||||
pub mod ws;
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
pub mod tantivy;
|
||||
|
||||
@@ -18,6 +19,7 @@ use std::{
|
||||
use async_trait::async_trait;
|
||||
use cacher::{Cacher, FilesystemCacher};
|
||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||
pub use error::ServerError;
|
||||
use linkify::{LinkFinder, LinkKind};
|
||||
use log::{debug, error, info, warn};
|
||||
use lol_html::{
|
||||
@@ -33,7 +35,6 @@ use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
error::ServerError,
|
||||
graphql::{Corpus, ThreadSummary},
|
||||
newsreader::is_newsreader_thread,
|
||||
nm::is_notmuch_thread_or_id,
|
||||
@@ -318,13 +319,16 @@ impl<'c> Transformer for SlurpContents<'c> {
|
||||
} else {
|
||||
let resp = reqwest::get(link.as_str()).await?;
|
||||
let status = resp.status();
|
||||
if status.is_server_error() || retryable_status.contains(&status) {
|
||||
return Err(TransformError::RetryableHttpStatusError(
|
||||
status,
|
||||
link.to_string(),
|
||||
));
|
||||
if status.is_server_error() {
|
||||
error!("status error for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
if retryable_status.contains(&status) {
|
||||
error!("retryable error for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
if !status.is_success() {
|
||||
error!("unsuccessful for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
let body = resp.text().await?;
|
||||
@@ -341,7 +345,7 @@ impl<'c> Transformer for SlurpContents<'c> {
|
||||
"/* chrome-default.css */\n",
|
||||
include_str!("chrome-default.css"),
|
||||
"\n/* vars.css */\n",
|
||||
include_str!("../../web/static/vars.css"),
|
||||
include_str!("../static/vars.css"),
|
||||
//"\n/* Xinu Specific overrides */\n",
|
||||
//include_str!("custom.css"),
|
||||
);
|
||||
@@ -438,6 +442,34 @@ pub fn sanitize_html(
|
||||
}
|
||||
};
|
||||
let mut element_content_handlers = vec![
|
||||
// Remove width and height attributes on elements
|
||||
element!("[width],[height]", |el| {
|
||||
el.remove_attribute("width");
|
||||
el.remove_attribute("height");
|
||||
Ok(())
|
||||
}),
|
||||
// Remove width and height values from inline styles
|
||||
element!("[style]", |el| {
|
||||
let style = el.get_attribute("style").unwrap();
|
||||
let style = style
|
||||
.split(";")
|
||||
.filter(|s| {
|
||||
let Some((k, _)) = s.split_once(':') else {
|
||||
return true;
|
||||
};
|
||||
match k {
|
||||
"width" | "max-width" | "min-width" | "height" | "max-height"
|
||||
| "min-height" => false,
|
||||
_ => true,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(";");
|
||||
if let Err(e) = el.set_attribute("style", &style) {
|
||||
error!("Failed to set style attribute: {e}");
|
||||
}
|
||||
Ok(())
|
||||
}),
|
||||
// Open links in new tab
|
||||
element!("a[href]", |el| {
|
||||
el.set_attribute("target", "_blank").unwrap();
|
||||
@@ -682,7 +714,7 @@ fn compute_offset_limit(
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
) -> (i32, i32) {
|
||||
let default_page_size = 100;
|
||||
let default_page_size = 10000;
|
||||
match (after, before, first, last) {
|
||||
// Reasonable defaults
|
||||
(None, None, None, None) => (0, default_page_size),
|
||||
@@ -773,7 +805,19 @@ impl Query {
|
||||
for uid in &self.uids {
|
||||
parts.push(uid.clone());
|
||||
}
|
||||
parts.extend(self.remainder.clone());
|
||||
for r in &self.remainder {
|
||||
// Rewrite "to:" to include ExtraTo:. ExtraTo: is configured in
|
||||
// notmuch-config to index Delivered-To and X-Original-To headers.
|
||||
if r.starts_with("to:") {
|
||||
parts.push("(".to_string());
|
||||
parts.push(r.to_string());
|
||||
parts.push("OR".to_string());
|
||||
parts.push(r.replace("to:", "ExtraTo:"));
|
||||
parts.push(")".to_string());
|
||||
} else {
|
||||
parts.push(r.to_string());
|
||||
}
|
||||
}
|
||||
parts.join(" ")
|
||||
}
|
||||
}
|
||||
@@ -793,7 +837,17 @@ impl FromStr for Query {
|
||||
if word == "is:unread" {
|
||||
unread_only = true
|
||||
} else if word.starts_with("tag:") {
|
||||
tags.push(word["tag:".len()..].to_string());
|
||||
let t = &word["tag:".len()..];
|
||||
// Per-address emails are faked as `tag:@<domain>/<username>`, rewrite to `to:` form
|
||||
if t.starts_with('@') && t.contains('.') {
|
||||
let t = match t.split_once('/') {
|
||||
None => format!("to:{t}"),
|
||||
Some((domain, user)) => format!("to:{user}{domain}"),
|
||||
};
|
||||
remainder.push(t);
|
||||
} else {
|
||||
tags.push(t.to_string());
|
||||
};
|
||||
|
||||
/*
|
||||
} else if word.starts_with("tag:") {
|
||||
@@ -888,3 +942,21 @@ async fn clean_title(title: &str) -> Result<String, ServerError> {
|
||||
}
|
||||
Ok(title)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{SanitizeHtml, Transformer};
|
||||
|
||||
#[tokio::test]
|
||||
async fn strip_sizes() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let ss = SanitizeHtml {
|
||||
cid_prefix: "",
|
||||
base_url: &None,
|
||||
};
|
||||
let input = r#"<p width=16 height=16 style="color:blue;width:16px;height:16px;">This el has width and height attributes and inline styles</p>"#;
|
||||
let want = r#"<p style="color:blue;">This el has width and height attributes and inline styles</p>"#;
|
||||
let got = ss.transform(&None, input).await?;
|
||||
assert_eq!(got, want);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use mailparse::{
|
||||
addrparse_header, dateparse, parse_mail, MailHeaderMap, MailParseError, ParsedMail,
|
||||
};
|
||||
use sqlx::postgres::PgPool;
|
||||
use thiserror::Error;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum MailError {
|
||||
#[error("missing from header")]
|
||||
MissingFrom,
|
||||
#[error("missing from header display name")]
|
||||
MissingFromDisplayName,
|
||||
#[error("missing subject header")]
|
||||
MissingSubject,
|
||||
#[error("missing html part")]
|
||||
MissingHtmlPart,
|
||||
#[error("missing message ID")]
|
||||
MissingMessageId,
|
||||
#[error("missing date")]
|
||||
MissingDate,
|
||||
#[error("DB error {0}")]
|
||||
SqlxError(#[from] sqlx::Error),
|
||||
#[error("IO error {0}")]
|
||||
IOError(#[from] std::io::Error),
|
||||
#[error("mail parse error {0}")]
|
||||
MailParseError(#[from] MailParseError),
|
||||
}
|
||||
|
||||
pub async fn read_mail_to_db(pool: &PgPool, path: &str) -> Result<(), MailError> {
|
||||
let mut file = File::open(path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
let m = parse_mail(&buffer)?;
|
||||
|
||||
let subject = m
|
||||
.headers
|
||||
.get_first_value("subject")
|
||||
.ok_or(MailError::MissingSubject)?;
|
||||
|
||||
let from = addrparse_header(
|
||||
m.headers
|
||||
.get_first_header("from")
|
||||
.ok_or(MailError::MissingFrom)?,
|
||||
)?;
|
||||
let from = from.extract_single_info().ok_or(MailError::MissingFrom)?;
|
||||
let name = from.display_name.ok_or(MailError::MissingFromDisplayName)?;
|
||||
let slug = name.to_lowercase().replace(' ', "-");
|
||||
let url = from.addr;
|
||||
let message_id = m
|
||||
.headers
|
||||
.get_first_value("Message-ID")
|
||||
.ok_or(MailError::MissingMessageId)?;
|
||||
let uid = &message_id;
|
||||
let feed_id = find_feed(&pool, &name, &slug, &url).await?;
|
||||
let date = dateparse(
|
||||
&m.headers
|
||||
.get_first_value("Date")
|
||||
.ok_or(MailError::MissingDate)?,
|
||||
)?;
|
||||
|
||||
println!("Feed: {feed_id} Subject: {}", subject);
|
||||
|
||||
if let Some(_m) = first_html(&m) {
|
||||
info!("add email {slug} {subject} {message_id} {date} {uid} {url}");
|
||||
} else {
|
||||
return Err(MailError::MissingHtmlPart.into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
fn first_html<'m>(m: &'m ParsedMail<'m>) -> Option<&'m ParsedMail<'m>> {
|
||||
for ele in m.parts() {
|
||||
if ele.ctype.mimetype == "text/html" {
|
||||
return Some(ele);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
async fn find_feed(pool: &PgPool, name: &str, slug: &str, url: &str) -> Result<i32, MailError> {
|
||||
match sqlx::query!(
|
||||
r#"
|
||||
SELECT id
|
||||
FROM feed
|
||||
WHERE slug = $1
|
||||
"#,
|
||||
slug
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
{
|
||||
Err(sqlx::Error::RowNotFound) => {
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO feed ( name, slug, url, homepage, selector )
|
||||
VALUES ( $1, $2, $3, '', '' )
|
||||
RETURNING id
|
||||
"#,
|
||||
name,
|
||||
slug,
|
||||
url
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
return Ok(rec.id);
|
||||
}
|
||||
Ok(rec) => return Ok(rec.id),
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
}
|
||||
@@ -1,11 +1,7 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
time::Instant,
|
||||
};
|
||||
use std::{collections::HashMap, fs::File};
|
||||
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use letterbox_shared::compute_color;
|
||||
use log::{error, info, warn};
|
||||
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
@@ -107,7 +103,6 @@ pub async fn search(
|
||||
|
||||
#[instrument(name="nm::tags", skip_all, fields(needs_unread=needs_unread))]
|
||||
pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
let now = Instant::now();
|
||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||
// get an inaccurate count.
|
||||
@@ -123,13 +118,11 @@ pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
let tags = nm
|
||||
let tags: Vec<_> = nm
|
||||
.tags()?
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let hex = compute_color(&tag);
|
||||
let unread = if needs_unread {
|
||||
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||
} else {
|
||||
@@ -142,8 +135,24 @@ pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
unread,
|
||||
}
|
||||
})
|
||||
.chain(
|
||||
nm.unread_recipients()?
|
||||
.into_iter()
|
||||
.filter_map(|(name, unread)| {
|
||||
let Some(idx) = name.find('@') else {
|
||||
return None;
|
||||
};
|
||||
let name = format!("{}/{}", &name[idx..], &name[..idx]);
|
||||
let bg_color = compute_color(&name);
|
||||
Some(Tag {
|
||||
name,
|
||||
fg_color: "white".to_string(),
|
||||
bg_color,
|
||||
unread,
|
||||
})
|
||||
}),
|
||||
)
|
||||
.collect();
|
||||
info!("Fetching tags took {} seconds", now.elapsed().as_secs_f32());
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
@@ -187,6 +196,8 @@ pub async fn thread(
|
||||
|
||||
let to = email_addresses(&path, &m, "to")?;
|
||||
let cc = email_addresses(&path, &m, "cc")?;
|
||||
let delivered_to = email_addresses(&path, &m, "delivered-to")?.pop();
|
||||
let x_original_to = email_addresses(&path, &m, "x-original-to")?.pop();
|
||||
let subject = m.headers.get_first_value("subject");
|
||||
let timestamp = m
|
||||
.headers
|
||||
@@ -306,6 +317,8 @@ pub async fn thread(
|
||||
body,
|
||||
path,
|
||||
attachments,
|
||||
delivered_to,
|
||||
x_original_to,
|
||||
});
|
||||
}
|
||||
messages.reverse();
|
||||
@@ -325,7 +338,7 @@ pub async fn thread(
|
||||
}
|
||||
|
||||
fn email_addresses(
|
||||
path: &str,
|
||||
_path: &str,
|
||||
m: &ParsedMail,
|
||||
header_name: &str,
|
||||
) -> Result<Vec<Email>, ServerError> {
|
||||
@@ -336,9 +349,7 @@ fn email_addresses(
|
||||
for ma in mal.into_inner() {
|
||||
match ma {
|
||||
mailparse::MailAddr::Group(gi) => {
|
||||
if !gi.group_name.contains("ndisclosed") {
|
||||
println!("[{path}][{header_name}] Group: {gi}");
|
||||
}
|
||||
if !gi.group_name.contains("ndisclosed") {}
|
||||
}
|
||||
mailparse::MailAddr::Single(s) => addrs.push(Email {
|
||||
name: s.display_name,
|
||||
|
||||
35
server/src/ws.rs
Normal file
35
server/src/ws.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use std::{collections::HashMap, net::SocketAddr};
|
||||
|
||||
use axum::extract::ws::{Message, WebSocket};
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use tracing::{info, warn};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ConnectionTracker {
|
||||
peers: HashMap<SocketAddr, WebSocket>,
|
||||
}
|
||||
|
||||
impl ConnectionTracker {
|
||||
pub async fn add_peer(&mut self, socket: WebSocket, who: SocketAddr) {
|
||||
warn!("adding {who:?} to connection tracker");
|
||||
self.peers.insert(who, socket);
|
||||
self.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await;
|
||||
}
|
||||
pub async fn send_message_all(&mut self, msg: WebsocketMessage) {
|
||||
info!("send_message_all {msg}");
|
||||
let m = serde_json::to_string(&msg).expect("failed to json encode WebsocketMessage");
|
||||
let mut bad_peers = Vec::new();
|
||||
for (who, socket) in &mut self.peers.iter_mut() {
|
||||
if let Err(e) = socket.send(Message::Text(m.clone().into())).await {
|
||||
warn!("{:?} is bad, scheduling for removal: {e}", who);
|
||||
bad_peers.push(who.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for b in bad_peers {
|
||||
info!("removing bad peer {b:?}");
|
||||
self.peers.remove(&b);
|
||||
}
|
||||
}
|
||||
}
|
||||
42
server/static/vars.css
Normal file
42
server/static/vars.css
Normal file
@@ -0,0 +1,42 @@
|
||||
:root {
|
||||
--active-brightness: 0.85;
|
||||
--border-radius: 5px;
|
||||
--box-shadow: 2px 2px 10px;
|
||||
--color-accent: #118bee15;
|
||||
--color-bg: #fff;
|
||||
--color-bg-secondary: #e9e9e9;
|
||||
--color-link: #118bee;
|
||||
--color-secondary: #920de9;
|
||||
--color-secondary-accent: #920de90b;
|
||||
--color-shadow: #f4f4f4;
|
||||
--color-table: #118bee;
|
||||
--color-text: #000;
|
||||
--color-text-secondary: #999;
|
||||
--color-scrollbar: #cacae8;
|
||||
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
|
||||
--hover-brightness: 1.2;
|
||||
--justify-important: center;
|
||||
--justify-normal: left;
|
||||
--line-height: 1.5;
|
||||
/*
|
||||
--width-card: 285px;
|
||||
--width-card-medium: 460px;
|
||||
--width-card-wide: 800px;
|
||||
*/
|
||||
--width-content: 1080px;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root[color-mode="user"] {
|
||||
--color-accent: #0097fc4f;
|
||||
--color-bg: #333;
|
||||
--color-bg-secondary: #555;
|
||||
--color-link: #0097fc;
|
||||
--color-secondary: #e20de9;
|
||||
--color-secondary-accent: #e20de94f;
|
||||
--color-shadow: #bbbbbb20;
|
||||
--color-table: #0097fc;
|
||||
--color-text: #f7f7f7;
|
||||
--color-text-secondary: #aaa;
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,17 @@
|
||||
[package]
|
||||
name = "letterbox-shared"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Shared module for letterbox"
|
||||
license = "UNLICENSED"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
publish = ["xinu"]
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
build-info = "0.0.39"
|
||||
letterbox-notmuch = { version = "0.1.0", registry = "xinu" }
|
||||
build-info = "0.0.40"
|
||||
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
strum_macros = "0.27.1"
|
||||
|
||||
@@ -13,11 +13,20 @@ pub struct SearchResult {
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Message {}
|
||||
#[derive(Serialize, Deserialize, Debug, strum_macros::Display)]
|
||||
pub enum WebsocketMessage {
|
||||
RefreshMessages,
|
||||
}
|
||||
|
||||
pub mod urls {
|
||||
pub const MOUNT_POINT: &'static str = "/api";
|
||||
pub fn view_original(host: Option<&str>, id: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!("//{host}/api/original/{id}")
|
||||
} else {
|
||||
format!("/api/original/{id}")
|
||||
}
|
||||
}
|
||||
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!("//{host}/api/cid/{cid}/")
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
[package]
|
||||
version = "0.1.0"
|
||||
name = "letterbox-web"
|
||||
authors = ["Bill Thiede <git@xinu.tv>"]
|
||||
edition = "2021"
|
||||
description = "Web frontend for letterbox"
|
||||
license = "UNLICENSED"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
publish = ["xinu"]
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.39"
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.33"
|
||||
@@ -26,16 +26,20 @@ serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||
chrono = "0.4.31"
|
||||
graphql_client = "0.14.0"
|
||||
thiserror = "2.0.0"
|
||||
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
|
||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||
human_format = "1.1.0"
|
||||
build-info = "0.0.39"
|
||||
build-info = "0.0.40"
|
||||
wasm-bindgen = "=0.2.100"
|
||||
uuid = { version = "1.13.1", features = [
|
||||
"js",
|
||||
] } # direct dep to set js feature, prevents Rng issues
|
||||
letterbox-shared = { version = "0.1.0", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.1.0", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.15.11", path = "../shared", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.15.11", path = "../notmuch", registry = "xinu" }
|
||||
seed_hooks = { version = "0.4.0", registry = "xinu" }
|
||||
strum_macros = "0.27.1"
|
||||
gloo-console = "0.3.0"
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
wasm-sockets = "1.0.0"
|
||||
|
||||
[package.metadata.wasm-pack.profile.release]
|
||||
wasm-opt = ['-Os']
|
||||
@@ -46,7 +50,10 @@ features = [
|
||||
"Clipboard",
|
||||
"DomRect",
|
||||
"Element",
|
||||
"History",
|
||||
"MediaQueryList",
|
||||
"Navigator",
|
||||
"Performance",
|
||||
"ScrollRestoration",
|
||||
"Window",
|
||||
]
|
||||
|
||||
@@ -6,9 +6,16 @@ release = false
|
||||
address = "0.0.0.0"
|
||||
port = 6758
|
||||
|
||||
[[proxy]]
|
||||
ws = true
|
||||
backend = "ws://localhost:9345/api/ws"
|
||||
|
||||
[[proxy]]
|
||||
backend = "http://localhost:9345/api/"
|
||||
|
||||
[[proxy]]
|
||||
backend = "http://localhost:9345/notification/"
|
||||
|
||||
[[hooks]]
|
||||
stage = "pre_build"
|
||||
command = "printf"
|
||||
|
||||
3
web/graphql/catchup.graphql
Normal file
3
web/graphql/catchup.graphql
Normal file
@@ -0,0 +1,3 @@
|
||||
query CatchupQuery($query: String!) {
|
||||
catchup(query: $query)
|
||||
}
|
||||
@@ -671,6 +671,30 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "xOriginalTo",
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Email",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "deliveredTo",
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Email",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
@@ -1268,6 +1292,45 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [
|
||||
{
|
||||
"defaultValue": null,
|
||||
"description": null,
|
||||
"name": "query",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "catchup",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [
|
||||
{
|
||||
|
||||
@@ -31,6 +31,14 @@ query ShowThreadQuery($threadId: String!) {
|
||||
name
|
||||
addr
|
||||
}
|
||||
xOriginalTo {
|
||||
name
|
||||
addr
|
||||
}
|
||||
deliveredTo {
|
||||
name
|
||||
addr
|
||||
}
|
||||
timestamp
|
||||
body {
|
||||
__typename
|
||||
|
||||
@@ -12,6 +12,14 @@ use serde::{de::DeserializeOwned, Serialize};
|
||||
)]
|
||||
pub struct FrontPageQuery;
|
||||
|
||||
#[derive(GraphQLQuery)]
|
||||
#[graphql(
|
||||
schema_path = "graphql/schema.json",
|
||||
query_path = "graphql/catchup.graphql",
|
||||
response_derives = "Debug"
|
||||
)]
|
||||
pub struct CatchupQuery;
|
||||
|
||||
#[derive(GraphQLQuery)]
|
||||
#[graphql(
|
||||
schema_path = "graphql/schema.json",
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
// - it's useful when you want to check your code with `cargo make verify`
|
||||
// but some rules are too "annoying" or are not applicable for your case.)
|
||||
#![allow(clippy::wildcard_imports)]
|
||||
// Until https://github.com/rust-lang/rust/issues/138762 is addressed in dependencies
|
||||
#![allow(wasm_c_abi)]
|
||||
|
||||
use log::Level;
|
||||
use seed::App;
|
||||
@@ -11,6 +13,7 @@ mod consts;
|
||||
mod graphql;
|
||||
mod state;
|
||||
mod view;
|
||||
mod websocket;
|
||||
|
||||
fn main() {
|
||||
// This provides better error messages in debug mode.
|
||||
@@ -18,6 +21,9 @@ fn main() {
|
||||
#[cfg(debug_assertions)]
|
||||
console_error_panic_hook::set_once();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let lvl = Level::Debug;
|
||||
#[cfg(not(debug_assertions))]
|
||||
let lvl = Level::Info;
|
||||
console_log::init_with_level(lvl).expect("failed to initialize console logging");
|
||||
// Mount the `app` to the element with the `id` "app".
|
||||
|
||||
220
web/src/state.rs
220
web/src/state.rs
@@ -1,6 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use graphql_client::GraphQLQuery;
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{debug, error, info, warn};
|
||||
use seed::{prelude::*, *};
|
||||
use thiserror::Error;
|
||||
@@ -11,6 +12,7 @@ use crate::{
|
||||
consts::SEARCH_RESULTS_PER_PAGE,
|
||||
graphql,
|
||||
graphql::{front_page_query::*, send_graphql, show_thread_query::*},
|
||||
websocket,
|
||||
};
|
||||
|
||||
/// Used to fake the unread string while in development
|
||||
@@ -29,16 +31,21 @@ pub fn unread_query() -> &'static str {
|
||||
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
let version = letterbox_shared::build_version(bi);
|
||||
info!("Build Info: {}", version);
|
||||
// Disable restoring to scroll position when navigating
|
||||
window()
|
||||
.history()
|
||||
.expect("couldn't get history")
|
||||
.set_scroll_restoration(web_sys::ScrollRestoration::Manual)
|
||||
.expect("failed to set scroll restoration to manual");
|
||||
if url.hash().is_none() {
|
||||
orders.request_url(urls::search(unread_query(), 0));
|
||||
} else {
|
||||
orders.notify(subs::UrlRequested::new(url));
|
||||
orders.request_url(url.clone());
|
||||
};
|
||||
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
||||
// TODO(wathiede): only do this while viewing the index? Or maybe add a new message that force
|
||||
// 'notmuch new' on the server periodically?
|
||||
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||
orders.subscribe(on_url_changed);
|
||||
//orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||
orders.subscribe(Msg::OnUrlChanged);
|
||||
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
|
||||
|
||||
build_info::build_info!(fn bi);
|
||||
@@ -53,18 +60,24 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
client: version,
|
||||
server: None,
|
||||
},
|
||||
catchup: None,
|
||||
last_url: Url::current(),
|
||||
websocket: websocket::init("/api/ws", &mut orders.proxy(Msg::WebSocket)),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
let mut url = uc.0;
|
||||
fn on_url_changed(old: &Url, mut new: Url) -> Msg {
|
||||
let did_change = *old != new;
|
||||
let mut messages = Vec::new();
|
||||
if did_change {
|
||||
messages.push(Msg::ScrollToTop)
|
||||
}
|
||||
info!(
|
||||
"url changed '{}', history {}",
|
||||
url,
|
||||
"url changed\nold '{old}'\nnew '{new}', history {}",
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = url.remaining_hash_path_parts();
|
||||
match hpp.as_slice() {
|
||||
let hpp = new.remaining_hash_path_parts();
|
||||
let msg = match hpp.as_slice() {
|
||||
["t", tid] => Msg::ShowThreadRequest {
|
||||
thread_id: tid.to_string(),
|
||||
},
|
||||
@@ -101,11 +114,14 @@ fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
last: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
messages.push(msg);
|
||||
Msg::MultiMsg(messages)
|
||||
}
|
||||
|
||||
// `update` describes how to handle each `Msg`.
|
||||
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
info!("update({})", msg);
|
||||
match msg {
|
||||
Msg::Noop => {}
|
||||
Msg::RefreshStart => {
|
||||
@@ -131,7 +147,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
orders.perform_cmd(async move { Msg::Refresh });
|
||||
}
|
||||
Msg::Refresh => {
|
||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||
orders.request_url(Url::current());
|
||||
}
|
||||
Msg::Reload => {
|
||||
window()
|
||||
@@ -139,7 +155,10 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
.reload()
|
||||
.expect("failed to reload window");
|
||||
}
|
||||
Msg::OnResize => (),
|
||||
Msg::OnUrlChanged(new_url) => {
|
||||
orders.send_msg(on_url_changed(&model.last_url, new_url.0.clone()));
|
||||
model.last_url = new_url.0;
|
||||
}
|
||||
|
||||
Msg::NextPage => {
|
||||
match &model.context {
|
||||
@@ -181,10 +200,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
};
|
||||
}
|
||||
Msg::GoToSearchResults => {
|
||||
let url = urls::search(&model.query, 0);
|
||||
info!("GoToSearchRestuls Start");
|
||||
orders.request_url(url);
|
||||
info!("GoToSearchRestuls End");
|
||||
orders.send_msg(Msg::SearchQuery(model.query.clone()));
|
||||
}
|
||||
|
||||
Msg::UpdateQuery(query) => model.query = query,
|
||||
@@ -279,7 +295,9 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
)
|
||||
});
|
||||
}
|
||||
Msg::FrontPageResult(Err(e)) => error!("error FrontPageResult: {e:?}"),
|
||||
Msg::FrontPageResult(Err(e)) => {
|
||||
error!("error FrontPageResult: {e:?}");
|
||||
}
|
||||
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||
data: None,
|
||||
errors: None,
|
||||
@@ -307,7 +325,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
info!("pager {:#?}", data.search.page_info);
|
||||
let selected_threads = 'context: {
|
||||
if let Context::SearchResult {
|
||||
results,
|
||||
@@ -390,6 +407,38 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
Msg::ShowThreadResult(bad) => {
|
||||
error!("show_thread_query error: {bad:#?}");
|
||||
}
|
||||
Msg::CatchupRequest { query } => {
|
||||
orders.perform_cmd(async move {
|
||||
Msg::CatchupResult(
|
||||
send_graphql::<_, graphql::catchup_query::ResponseData>(
|
||||
graphql::CatchupQuery::build_query(graphql::catchup_query::Variables {
|
||||
query,
|
||||
}),
|
||||
)
|
||||
.await,
|
||||
)
|
||||
});
|
||||
}
|
||||
Msg::CatchupResult(Ok(graphql_client::Response {
|
||||
data: Some(data), ..
|
||||
})) => {
|
||||
let items = data.catchup;
|
||||
if items.is_empty() {
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
model.catchup = None;
|
||||
} else {
|
||||
orders.request_url(urls::thread(&items[0]));
|
||||
model.catchup = Some(Catchup {
|
||||
items: items
|
||||
.into_iter()
|
||||
.map(|id| CatchupItem { id, seen: false })
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Msg::CatchupResult(bad) => {
|
||||
error!("catchup_query error: {bad:#?}");
|
||||
}
|
||||
Msg::SelectionSetNone => {
|
||||
if let Context::SearchResult {
|
||||
selected_threads, ..
|
||||
@@ -503,8 +552,12 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
.expect("failed to copy to clipboard");
|
||||
});
|
||||
}
|
||||
Msg::ScrollToTop => {
|
||||
info!("scrolling to the top");
|
||||
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||
}
|
||||
Msg::WindowScrolled => {
|
||||
info!("WindowScrolled");
|
||||
// TODO: model.content_el doesn't go to None like it should when a DOM is recreated and the refrenced element goes away
|
||||
if let Some(el) = model.content_el.get() {
|
||||
let ih = window()
|
||||
.inner_height()
|
||||
@@ -513,7 +566,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
.value_of();
|
||||
|
||||
let r = el.get_bounding_client_rect();
|
||||
info!("r {r:?} ih {ih}");
|
||||
if r.height() < ih {
|
||||
// The whole content fits in the window, no scrollbar
|
||||
orders.send_msg(Msg::SetProgress(0.));
|
||||
@@ -545,7 +597,8 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
model.read_completion_ratio = ratio;
|
||||
}
|
||||
Msg::UpdateServerVersion(version) => {
|
||||
if version != model.versions.client {
|
||||
// Only git versions contain dash, don't autoreload there
|
||||
if !version.contains('-') && version != model.versions.client {
|
||||
warn!(
|
||||
"Server ({}) and client ({}) version mismatch, reloading",
|
||||
version, model.versions.client
|
||||
@@ -554,8 +607,94 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
}
|
||||
model.versions.server = Some(version);
|
||||
}
|
||||
|
||||
Msg::CatchupStart => {
|
||||
let query = if model.query.contains("is:unread") {
|
||||
model.query.to_string()
|
||||
} else {
|
||||
format!("{} is:unread", model.query)
|
||||
};
|
||||
info!("starting catchup mode w/ {}", query);
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
orders.send_msg(Msg::CatchupRequest { query });
|
||||
}
|
||||
Msg::CatchupKeepUnread => {
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupMarkAsRead => {
|
||||
if let Some(thread_id) = current_thread_id(&model.context) {
|
||||
orders.send_msg(Msg::SetUnread(thread_id, false));
|
||||
};
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupNext => {
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
let Some(catchup) = &mut model.catchup else {
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
let Some(thread_id) = current_thread_id(&model.context) else {
|
||||
return;
|
||||
};
|
||||
let Some(idx) = catchup
|
||||
.items
|
||||
.iter()
|
||||
.inspect(|i| info!("i {i:?} thread_id {thread_id}"))
|
||||
.position(|i| i.id == thread_id)
|
||||
else {
|
||||
// All items have been seen
|
||||
orders.send_msg(Msg::CatchupExit);
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
catchup.items[idx].seen = true;
|
||||
if idx < catchup.items.len() - 1 {
|
||||
// Reached last item
|
||||
orders.request_url(urls::thread(&catchup.items[idx + 1].id));
|
||||
return;
|
||||
} else {
|
||||
orders.send_msg(Msg::CatchupExit);
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
}
|
||||
Msg::CatchupExit => {
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
model.catchup = None;
|
||||
}
|
||||
|
||||
Msg::WebSocket(ws) => {
|
||||
websocket::update(ws, &mut model.websocket, &mut orders.proxy(Msg::WebSocket));
|
||||
while let Some(msg) = model.websocket.updates.pop_front() {
|
||||
orders.send_msg(Msg::WebsocketMessage(msg));
|
||||
}
|
||||
}
|
||||
Msg::WebsocketMessage(msg) => {
|
||||
match msg {
|
||||
WebsocketMessage::RefreshMessages => orders.send_msg(Msg::Refresh),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn current_thread_id(context: &Context) -> Option<String> {
|
||||
match context {
|
||||
Context::ThreadResult {
|
||||
thread:
|
||||
ShowThreadQueryThread::EmailThread(ShowThreadQueryThreadOnEmailThread {
|
||||
thread_id, ..
|
||||
}),
|
||||
..
|
||||
} => Some(thread_id.clone()),
|
||||
Context::ThreadResult {
|
||||
thread:
|
||||
ShowThreadQueryThread::NewsPost(ShowThreadQueryThreadOnNewsPost { thread_id, .. }),
|
||||
..
|
||||
} => Some(thread_id.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// `Model` describes our app state.
|
||||
pub struct Model {
|
||||
pub query: String,
|
||||
@@ -565,6 +704,9 @@ pub struct Model {
|
||||
pub read_completion_ratio: f64,
|
||||
pub content_el: ElRef<HtmlElement>,
|
||||
pub versions: Version,
|
||||
pub catchup: Option<Catchup>,
|
||||
pub last_url: Url,
|
||||
pub websocket: websocket::Model,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -601,6 +743,16 @@ pub enum Context {
|
||||
},
|
||||
}
|
||||
|
||||
pub struct Catchup {
|
||||
pub items: Vec<CatchupItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CatchupItem {
|
||||
pub id: String,
|
||||
pub seen: bool,
|
||||
}
|
||||
|
||||
pub struct Tag {
|
||||
pub name: String,
|
||||
pub bg_color: String,
|
||||
@@ -614,20 +766,22 @@ pub enum RefreshingState {
|
||||
Error(String),
|
||||
}
|
||||
// `Msg` describes the different events you can modify state with.
|
||||
#[derive(strum_macros::Display)]
|
||||
pub enum Msg {
|
||||
Noop,
|
||||
// Tell the client to refresh its state
|
||||
Refresh,
|
||||
// Tell the client to reload whole page from server
|
||||
Reload,
|
||||
// Window has changed size
|
||||
OnResize,
|
||||
// TODO: add GoToUrl
|
||||
OnUrlChanged(subs::UrlChanged),
|
||||
// Tell the server to update state
|
||||
RefreshStart,
|
||||
RefreshDone(Option<gloo_net::Error>),
|
||||
NextPage,
|
||||
PreviousPage,
|
||||
GoToSearchResults,
|
||||
|
||||
UpdateQuery(String),
|
||||
SearchQuery(String),
|
||||
|
||||
@@ -651,10 +805,14 @@ pub enum Msg {
|
||||
ShowThreadResult(
|
||||
Result<graphql_client::Response<graphql::show_thread_query::ResponseData>, gloo_net::Error>,
|
||||
),
|
||||
CatchupRequest {
|
||||
query: String,
|
||||
},
|
||||
CatchupResult(
|
||||
Result<graphql_client::Response<graphql::catchup_query::ResponseData>, gloo_net::Error>,
|
||||
),
|
||||
|
||||
#[allow(dead_code)]
|
||||
SelectionSetNone,
|
||||
#[allow(dead_code)]
|
||||
SelectionSetAll,
|
||||
SelectionAddTag(String),
|
||||
#[allow(dead_code)]
|
||||
@@ -670,7 +828,17 @@ pub enum Msg {
|
||||
|
||||
CopyToClipboard(String),
|
||||
|
||||
ScrollToTop,
|
||||
WindowScrolled,
|
||||
SetProgress(f64),
|
||||
UpdateServerVersion(String),
|
||||
|
||||
CatchupStart,
|
||||
CatchupKeepUnread,
|
||||
CatchupMarkAsRead,
|
||||
CatchupNext,
|
||||
CatchupExit,
|
||||
|
||||
WebSocket(websocket::Msg),
|
||||
WebsocketMessage(WebsocketMessage),
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
220
web/src/websocket.rs
Normal file
220
web/src/websocket.rs
Normal file
@@ -0,0 +1,220 @@
|
||||
use std::{collections::VecDeque, rc::Rc};
|
||||
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{error, info};
|
||||
use seed::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(dead_code)]
|
||||
mod wasm_sockets {
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use thiserror::Error;
|
||||
use web_sys::{CloseEvent, ErrorEvent};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct JsValue;
|
||||
#[derive(Debug)]
|
||||
pub enum ConnectionStatus {
|
||||
/// Connecting to a server
|
||||
Connecting,
|
||||
/// Connected to a server
|
||||
Connected,
|
||||
/// Disconnected from a server due to an error
|
||||
Error,
|
||||
/// Disconnected from a server without an error
|
||||
Disconnected,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct EventClient {
|
||||
pub status: Rc<RefCell<ConnectionStatus>>,
|
||||
}
|
||||
impl EventClient {
|
||||
pub fn new(_: &str) -> Result<Self, WebSocketError> {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn send_string(&self, _essage: &str) -> Result<(), JsValue> {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_error(&mut self, _: Option<Box<dyn Fn(ErrorEvent)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_connection(&mut self, _: Option<Box<dyn Fn(&EventClient)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_close(&mut self, _: Option<Box<dyn Fn(CloseEvent)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_message(&mut self, _: Option<Box<dyn Fn(&EventClient, Message)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Message {
|
||||
Text(String),
|
||||
Binary(Vec<u8>),
|
||||
}
|
||||
#[derive(Debug, Clone, Error)]
|
||||
pub enum WebSocketError {}
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
use web_sys::CloseEvent;
|
||||
|
||||
/// Message from the server to the client.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ServerMessage {
|
||||
pub id: usize,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
/// Message from the client to the server.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ClientMessage {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
//const WS_URL: &str = "wss://9000.z.xinu.tv/api/ws";
|
||||
//const WS_URL: &str = "wss://9345.z.xinu.tv/api/graphql/ws";
|
||||
//const WS_URL: &str = "wss://6758.z.xinu.tv/api/ws";
|
||||
|
||||
// ------ ------
|
||||
// Model
|
||||
// ------ ------
|
||||
|
||||
pub struct Model {
|
||||
ws_url: String,
|
||||
web_socket: EventClient,
|
||||
web_socket_reconnector: Option<StreamHandle>,
|
||||
pub updates: VecDeque<WebsocketMessage>,
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// Init
|
||||
// ------ ------
|
||||
|
||||
pub fn init(ws_url: &str, orders: &mut impl Orders<Msg>) -> Model {
|
||||
Model {
|
||||
ws_url: ws_url.to_string(),
|
||||
web_socket: create_websocket(ws_url, orders).unwrap(),
|
||||
web_socket_reconnector: None,
|
||||
updates: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// Update
|
||||
// ------ ------
|
||||
|
||||
pub enum Msg {
|
||||
WebSocketOpened,
|
||||
TextMessageReceived(WebsocketMessage),
|
||||
WebSocketClosed(CloseEvent),
|
||||
WebSocketFailed,
|
||||
ReconnectWebSocket(usize),
|
||||
#[allow(dead_code)]
|
||||
SendMessage(ClientMessage),
|
||||
}
|
||||
|
||||
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
match msg {
|
||||
Msg::WebSocketOpened => {
|
||||
model.web_socket_reconnector = None;
|
||||
info!("WebSocket connection is open now");
|
||||
}
|
||||
Msg::TextMessageReceived(msg) => {
|
||||
model.updates.push_back(msg);
|
||||
}
|
||||
Msg::WebSocketClosed(close_event) => {
|
||||
info!(
|
||||
r#"==================
|
||||
WebSocket connection was closed:
|
||||
Clean: {0}
|
||||
Code: {1}
|
||||
Reason: {2}
|
||||
=================="#,
|
||||
close_event.was_clean(),
|
||||
close_event.code(),
|
||||
close_event.reason()
|
||||
);
|
||||
|
||||
// Chrome doesn't invoke `on_error` when the connection is lost.
|
||||
if !close_event.was_clean() && model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
);
|
||||
}
|
||||
}
|
||||
Msg::WebSocketFailed => {
|
||||
info!("WebSocket failed");
|
||||
if model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
);
|
||||
}
|
||||
}
|
||||
Msg::ReconnectWebSocket(retries) => {
|
||||
info!("Reconnect attempt: {}", retries);
|
||||
model.web_socket = create_websocket(&model.ws_url, orders).unwrap();
|
||||
}
|
||||
Msg::SendMessage(msg) => {
|
||||
let txt = serde_json::to_string(&msg).unwrap();
|
||||
model.web_socket.send_string(&txt).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_websocket(url: &str, orders: &impl Orders<Msg>) -> Result<EventClient, WebSocketError> {
|
||||
let msg_sender = orders.msg_sender();
|
||||
|
||||
let mut client = EventClient::new(url)?;
|
||||
|
||||
client.set_on_error(Some(Box::new(|error| {
|
||||
gloo_console::error!("WS: ", error);
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_connection(Some(Box::new(move |client: &EventClient| {
|
||||
info!("{:#?}", client.status);
|
||||
let msg = match *client.status.borrow() {
|
||||
ConnectionStatus::Connecting => {
|
||||
info!("Connecting...");
|
||||
None
|
||||
}
|
||||
ConnectionStatus::Connected => Some(Msg::WebSocketOpened),
|
||||
ConnectionStatus::Error => Some(Msg::WebSocketFailed),
|
||||
ConnectionStatus::Disconnected => {
|
||||
info!("Disconnected");
|
||||
None
|
||||
}
|
||||
};
|
||||
send(msg);
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_close(Some(Box::new(move |ev| {
|
||||
info!("WS: Connection closed");
|
||||
send(Some(Msg::WebSocketClosed(ev)));
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_message(Some(Box::new(move |_: &EventClient, msg: Message| {
|
||||
decode_message(msg, Rc::clone(&send))
|
||||
})));
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
fn decode_message(message: Message, msg_sender: Rc<dyn Fn(Option<Msg>)>) {
|
||||
match message {
|
||||
Message::Text(txt) => {
|
||||
let msg: WebsocketMessage = serde_json::from_str(&txt).unwrap_or_else(|e| {
|
||||
panic!("failed to parse json into WebsocketMessage: {e}\n'{txt}'")
|
||||
});
|
||||
msg_sender(Some(Msg::TextMessageReceived(msg)));
|
||||
}
|
||||
m => error!("unexpected message type received of {m:?}"),
|
||||
}
|
||||
}
|
||||
@@ -1,69 +1,84 @@
|
||||
html {
|
||||
background-color: black;
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.mail-thread a,
|
||||
.mail-thread .content a,
|
||||
.news-post a {
|
||||
color: var(--color-link) !important;
|
||||
text-decoration: underline;
|
||||
color: var(--color-link) !important;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.mail-thread br,
|
||||
.mail-thread .content br,
|
||||
.news-post br {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
content: " ";
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
content: " ";
|
||||
}
|
||||
|
||||
.mail-thread h1,
|
||||
.mail-thread h2,
|
||||
.mail-thread h3,
|
||||
.mail-thread h4,
|
||||
.mail-thread .content h1,
|
||||
.mail-thread .content h2,
|
||||
.mail-thread .content h3,
|
||||
.mail-thread .content h4,
|
||||
.news-post h1,
|
||||
.news-post h2,
|
||||
.news-post h3,
|
||||
.news-post h4 {
|
||||
margin-top: 1em !important;
|
||||
margin-bottom: 1em !important;
|
||||
margin-top: 1em !important;
|
||||
margin-bottom: 1em !important;
|
||||
}
|
||||
|
||||
.mail-thread p,
|
||||
.mail-thread .content p,
|
||||
.news-post p {
|
||||
margin-bottom: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.mail-thread pre,
|
||||
.mail-thread code,
|
||||
.news-post pre,
|
||||
.mail-thread .content pre,
|
||||
.news-post pre {
|
||||
font-family: monospace;
|
||||
background-color: #eee !important;
|
||||
padding: 0.5em;
|
||||
white-space: break-spaces;
|
||||
}
|
||||
|
||||
.mail-thread .content code,
|
||||
.news-post code {
|
||||
font-family: monospace;
|
||||
background-color: #eee !important;
|
||||
padding: 0.5em !important;
|
||||
font-family: monospace;
|
||||
white-space: break-spaces;
|
||||
background-color: #eee !important;
|
||||
}
|
||||
|
||||
.mail-thread blockquote {
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
.mail-thread .content blockquote {
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
}
|
||||
|
||||
.mail-thread ol,
|
||||
.mail-thread ul {
|
||||
margin-left: 2em;
|
||||
.mail-thread .content ol,
|
||||
.mail-thread .content ul {
|
||||
margin-left: 2em;
|
||||
}
|
||||
|
||||
.mail-thread .content .noreply-news-bloomberg-com a {
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.mail-thread .content .noreply-news-bloomberg-com h2 {
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
/* Hackaday figures have unreadable black on dark grey */
|
||||
.news-post figcaption.wp-caption-text {
|
||||
background-color: initial !important;
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.news-post.site-nautilus .article-ad,
|
||||
.news-post.site-nautilus .primis-ad {
|
||||
display: none !important;
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.news-post.site-slashdot .story-byline {
|
||||
display: block !important;
|
||||
height: initial !important;
|
||||
overflow: auto !important;
|
||||
position: static !important;
|
||||
}
|
||||
display: block !important;
|
||||
height: initial !important;
|
||||
overflow: auto !important;
|
||||
position: static !important;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user