Compare commits
89 Commits
letterbox-
...
d0f4716d83
| Author | SHA1 | Date | |
|---|---|---|---|
| d0f4716d83 | |||
| 59e35062e7 | |||
| 43827b4d87 | |||
| b29e92cd9c | |||
| 42bea43de9 | |||
| 4048edde11 | |||
| 90768d0d1b | |||
| 70e6271ca3 | |||
| 0bda21e5e9 | |||
| f987b4e4b4 | |||
| a873ec9208 | |||
| d8d26e1f59 | |||
| 1322dde5c5 | |||
| a2147081e8 | |||
| 8c6a24e400 | |||
| 8a08d97930 | |||
| d24a851cd7 | |||
| f6ff597f66 | |||
| 387d133f09 | |||
| a9674e8b7b | |||
| 457f9ac1c2 | |||
| d62759565f | |||
| 4fd97700f7 | |||
| 99b9a88663 | |||
| 56e6036892 | |||
| 232e436378 | |||
| e2bf4d890f | |||
| e9584785a8 | |||
| 7a4d2abdd5 | |||
| b764d725b1 | |||
| 7bac98762c | |||
| 2bedd92e1a | |||
| da72c09fa3 | |||
| 38c1942ebb | |||
| 05a7386dd1 | |||
| 477ffe8d82 | |||
| 5d80f32b49 | |||
| ae76bdf9a5 | |||
| 50e3c77e49 | |||
| e85a505775 | |||
| 86ea5a13f3 | |||
| a30bff925f | |||
| 6fdfbb1ee2 | |||
| 561316ddd4 | |||
| 495e495888 | |||
| ddb4c812ce | |||
| 1aaf914ac5 | |||
| 982b5dae2f | |||
| 8807c1b1f5 | |||
| fa23658ef0 | |||
| f175faed98 | |||
| 8971c16117 | |||
| fbecf564b5 | |||
| e5643c6fd0 | |||
| a8734269f7 | |||
| cab4e571f3 | |||
| 4d6c6af7d9 | |||
| cf08831ed1 | |||
| e1509c5978 | |||
| 13db8e6f1f | |||
| 136a837fa4 | |||
| 1ea058c664 | |||
| f4c11c5b3f | |||
| 8dc8f3a0f8 | |||
| 7b9450b65b | |||
| b5de0719dd | |||
| 58da28a19b | |||
| 75ad27ec2f | |||
| f904fa0001 | |||
| b94596bf65 | |||
| aa24599921 | |||
| c81a8c1cd3 | |||
| 7c3cfec3d1 | |||
| a2920fde3b | |||
| 8bc449ae6e | |||
| 0febd0535a | |||
| a9e00a54e4 | |||
| 6811c689ff | |||
| 8ba6b3d0b0 | |||
| a7c5585e80 | |||
| 4ef4d49113 | |||
| f8af303110 | |||
| fa5aac34ba | |||
| b58556254e | |||
| e365ced7dd | |||
| 93d569fb14 | |||
| f86a5f464d | |||
| 956c20b156 | |||
| 1eb498712b |
2022
Cargo.lock
generated
2022
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
|
||||
edition = "2021"
|
||||
license = "UNLICENSED"
|
||||
publish = ["xinu"]
|
||||
version = "0.17.20"
|
||||
version = "0.17.27"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
|
||||
[profile.dev]
|
||||
|
||||
@@ -214,9 +214,8 @@ use std::{
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
use tracing::{error, info, instrument, warn};
|
||||
|
||||
/// # Number of seconds since the Epoch
|
||||
pub type UnixTime = isize;
|
||||
@@ -718,6 +717,13 @@ impl Notmuch {
|
||||
cmd.args(args);
|
||||
info!("{:?}", &cmd);
|
||||
let out = cmd.output()?;
|
||||
if !out.stderr.is_empty() {
|
||||
warn!(
|
||||
"{:?}: STDERR:\n{}",
|
||||
&cmd,
|
||||
String::from_utf8_lossy(&out.stderr)
|
||||
);
|
||||
}
|
||||
Ok(out.stdout)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,21 +19,23 @@ async-graphql-axum = "7.0.16"
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.3", features = ["ws"] }
|
||||
axum-macros = "0.5.0"
|
||||
build-info = "0.0.40"
|
||||
build-info = "0.0.41"
|
||||
cacher = { version = "0.2.0", registry = "xinu" }
|
||||
chrono = "0.4.40"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
css-inline = "0.14.4"
|
||||
css-inline = "0.17.0"
|
||||
flate2 = "1.1.2"
|
||||
futures = "0.3.31"
|
||||
headers = "0.4.0"
|
||||
html-escape = "0.2.13"
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.20", registry = "xinu" }
|
||||
letterbox-shared = { path = "../shared", version = "0.17.20", registry = "xinu" }
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.27", registry = "xinu" }
|
||||
letterbox-shared = { path = "../shared", version = "0.17.27", registry = "xinu" }
|
||||
linkify = "0.10.0"
|
||||
lol_html = "2.3.0"
|
||||
mailparse = "0.16.1"
|
||||
maplit = "1.0.2"
|
||||
memmap = "0.7.0"
|
||||
quick-xml = { version = "0.38.1", features = ["serialize"] }
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.15", features = ["blocking"] }
|
||||
scraper = "0.23.1"
|
||||
@@ -50,9 +52,10 @@ urlencoding = "2.1.3"
|
||||
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
xtracing = { version = "0.3.2", registry = "xinu" }
|
||||
zip = "4.3.0"
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.40"
|
||||
build-info-build = "0.0.41"
|
||||
|
||||
[features]
|
||||
#default = [ "tantivy" ]
|
||||
|
||||
2
server/migrations/20250623193718_disable-feed.down.sql
Normal file
2
server/migrations/20250623193718_disable-feed.down.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Add down migration script here
|
||||
ALTER TABLE feed DROP COLUMN IF EXISTS disabled;
|
||||
2
server/migrations/20250623193718_disable-feed.up.sql
Normal file
2
server/migrations/20250623193718_disable-feed.up.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Add up migration script here
|
||||
ALTER TABLE feed ADD disabled boolean;
|
||||
@@ -352,6 +352,9 @@ fn slurp_contents_selectors() -> HashMap<String, Vec<Selector>> {
|
||||
"natwelch.com".to_string() => vec![
|
||||
Selector::parse("article div.prose").unwrap(),
|
||||
],
|
||||
"seiya.me".to_string() => vec![
|
||||
Selector::parse("header + div").unwrap(),
|
||||
],
|
||||
"rustacean-station.org".to_string() => vec![
|
||||
Selector::parse("article").unwrap(),
|
||||
],
|
||||
|
||||
272
server/src/nm.rs
272
server/src/nm.rs
@@ -1,14 +1,17 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs::File,
|
||||
io::Cursor,
|
||||
};
|
||||
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use letterbox_shared::{compute_color, Rule};
|
||||
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
use quick_xml::de::from_str as xml_from_str;
|
||||
use sqlx::{types::Json, PgPool};
|
||||
use tracing::{error, info, info_span, instrument, warn};
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::{
|
||||
compute_offset_limit,
|
||||
@@ -20,6 +23,7 @@ use crate::{
|
||||
linkify_html, InlineStyle, Query, SanitizeHtml, Transformer,
|
||||
};
|
||||
|
||||
const APPLICATION_ZIP: &'static str = "application/zip";
|
||||
const IMAGE_JPEG: &'static str = "image/jpeg";
|
||||
const IMAGE_PJPEG: &'static str = "image/pjpeg";
|
||||
const IMAGE_PNG: &'static str = "image/png";
|
||||
@@ -447,6 +451,7 @@ fn extract_body(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Ser
|
||||
MULTIPART_MIXED => extract_mixed(m, part_addr),
|
||||
MULTIPART_ALTERNATIVE => extract_alternative(m, part_addr),
|
||||
MULTIPART_RELATED => extract_related(m, part_addr),
|
||||
APPLICATION_ZIP => extract_zip(m),
|
||||
_ => extract_unhandled(m),
|
||||
};
|
||||
if let Err(err) = ret {
|
||||
@@ -456,6 +461,69 @@ fn extract_body(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Ser
|
||||
ret
|
||||
}
|
||||
|
||||
const APPLICATION_GZIP: &'static str = "application/gzip";
|
||||
|
||||
fn extract_zip(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||
if let Ok(zip_bytes) = m.get_body_raw() {
|
||||
if let Ok(mut archive) = ZipArchive::new(Cursor::new(&zip_bytes)) {
|
||||
for i in 0..archive.len() {
|
||||
if let Ok(mut file) = archive.by_index(i) {
|
||||
let name = file.name().to_lowercase();
|
||||
// Google DMARC reports are typically named like "google.com!example.com!...xml"
|
||||
// and may or may not contain "dmarc" in the filename.
|
||||
if name.ends_with(".xml") && (name.contains("dmarc") || name.starts_with("google.com!")) {
|
||||
let mut xml = String::new();
|
||||
use std::io::Read;
|
||||
if file.read_to_string(&mut xml).is_ok() {
|
||||
match parse_dmarc_report(&xml) {
|
||||
Ok(report) => {
|
||||
return Ok(Body::html(format!(
|
||||
"<div class=\"dmarc-report\">Google DMARC report summary:<br>{}</div>",
|
||||
report
|
||||
)));
|
||||
}
|
||||
Err(e) => {
|
||||
return Ok(Body::html(format!(
|
||||
"<div class=\"dmarc-report-error\">Failed to parse DMARC report XML: {}</div>",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If no DMARC report found, fall through to unhandled
|
||||
extract_unhandled(m)
|
||||
}
|
||||
|
||||
fn extract_gzip(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||
if let Ok(gz_bytes) = m.get_body_raw() {
|
||||
let mut decoder = flate2::read::GzDecoder::new(&gz_bytes[..]);
|
||||
let mut xml = String::new();
|
||||
use std::io::Read;
|
||||
if decoder.read_to_string(&mut xml).is_ok() {
|
||||
match parse_dmarc_report(&xml) {
|
||||
Ok(report) => {
|
||||
return Ok(Body::html(format!(
|
||||
"<div class=\"dmarc-report\">Microsoft DMARC report summary:<br>{}</div>",
|
||||
report
|
||||
)));
|
||||
}
|
||||
Err(e) => {
|
||||
return Ok(Body::html(format!(
|
||||
"<div class=\"dmarc-report-error\">Failed to parse DMARC report XML: {}</div>",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
extract_unhandled(m)
|
||||
}
|
||||
|
||||
fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||
let msg = format!(
|
||||
"Unhandled body content type:\n{}\n{}",
|
||||
@@ -525,6 +593,7 @@ fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Se
|
||||
MULTIPART_RELATED,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
APPLICATION_GZIP,
|
||||
];
|
||||
let mut unhandled_types: Vec<_> = m
|
||||
.subparts
|
||||
@@ -568,6 +637,7 @@ fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Se
|
||||
)));
|
||||
}
|
||||
}
|
||||
APPLICATION_GZIP => parts.push(extract_gzip(sp)?),
|
||||
mt => parts.push(unhandled_html(MULTIPART_MIXED, mt)),
|
||||
}
|
||||
part_addr.pop();
|
||||
@@ -976,11 +1046,10 @@ pub async fn label_unprocessed(
|
||||
*/
|
||||
info!("Loaded {} rules", rules.len());
|
||||
|
||||
let ids = if let Some(limit) = limit {
|
||||
&ids[..limit]
|
||||
} else {
|
||||
&ids[..]
|
||||
};
|
||||
let limit = limit.unwrap_or(ids.len());
|
||||
let limit = limit.min(ids.len());
|
||||
let ids = &ids[..limit];
|
||||
|
||||
let mut add_mutations = HashMap::new();
|
||||
let mut rm_mutations = HashMap::new();
|
||||
for id in ids {
|
||||
@@ -1104,3 +1173,196 @@ fn find_tags<'a, 'b>(rules: &'a [Rule], headers: &'b [MailHeader]) -> (bool, Has
|
||||
}
|
||||
return (matched_rule, add_tags);
|
||||
}
|
||||
|
||||
// Add this helper function to parse the DMARC XML and summarize it.
|
||||
fn parse_dmarc_report(xml: &str) -> Result<String, ServerError> {
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct Feedback {
|
||||
report_metadata: Option<ReportMetadata>,
|
||||
policy_published: Option<PolicyPublished>,
|
||||
record: Option<Vec<Record>>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct ReportMetadata {
|
||||
org_name: Option<String>,
|
||||
email: Option<String>,
|
||||
report_id: Option<String>,
|
||||
date_range: Option<DateRange>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct DateRange {
|
||||
begin: Option<u64>,
|
||||
end: Option<u64>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct PolicyPublished {
|
||||
domain: Option<String>,
|
||||
adkim: Option<String>,
|
||||
aspf: Option<String>,
|
||||
p: Option<String>,
|
||||
sp: Option<String>,
|
||||
pct: Option<String>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct Record {
|
||||
row: Option<Row>,
|
||||
identifiers: Option<Identifiers>,
|
||||
auth_results: Option<AuthResults>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct Row {
|
||||
source_ip: Option<String>,
|
||||
count: Option<u64>,
|
||||
policy_evaluated: Option<PolicyEvaluated>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct PolicyEvaluated {
|
||||
disposition: Option<String>,
|
||||
dkim: Option<String>,
|
||||
spf: Option<String>,
|
||||
reason: Option<Vec<Reason>>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct Reason {
|
||||
#[serde(rename = "type")]
|
||||
reason_type: Option<String>,
|
||||
comment: Option<String>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct Identifiers {
|
||||
header_from: Option<String>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct AuthResults {
|
||||
dkim: Option<Vec<AuthDKIM>>,
|
||||
spf: Option<Vec<AuthSPF>>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct AuthDKIM {
|
||||
domain: Option<String>,
|
||||
result: Option<String>,
|
||||
selector: Option<String>,
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct AuthSPF {
|
||||
domain: Option<String>,
|
||||
result: Option<String>,
|
||||
scope: Option<String>,
|
||||
}
|
||||
|
||||
let feedback: Feedback = xml_from_str(xml)
|
||||
.map_err(|e| ServerError::StringError(format!("DMARC XML parse error: {e}")))?;
|
||||
let mut summary = String::new();
|
||||
if let Some(meta) = feedback.report_metadata {
|
||||
if let Some(org) = meta.org_name {
|
||||
summary += &format!("<b>Reporter:</b> {}<br>", org);
|
||||
}
|
||||
if let Some(email) = meta.email {
|
||||
summary += &format!("<b>Contact:</b> {}<br>", email);
|
||||
}
|
||||
if let Some(rid) = meta.report_id {
|
||||
summary += &format!("<b>Report ID:</b> {}<br>", rid);
|
||||
}
|
||||
if let Some(dr) = meta.date_range {
|
||||
if let (Some(begin), Some(end)) = (dr.begin, dr.end) {
|
||||
use chrono::{NaiveDateTime, TimeZone, Utc};
|
||||
let begin_dt = Utc.timestamp_opt(begin as i64, 0).single();
|
||||
let end_dt = Utc.timestamp_opt(end as i64, 0).single();
|
||||
summary += &format!("<b>Date range:</b> {} to {}<br>",
|
||||
begin_dt.map(|d| d.format("%Y-%m-%d").to_string()).unwrap_or(begin.to_string()),
|
||||
end_dt.map(|d| d.format("%Y-%m-%d").to_string()).unwrap_or(end.to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(pol) = feedback.policy_published {
|
||||
summary += "<b>Policy Published:</b><ul>";
|
||||
if let Some(domain) = pol.domain {
|
||||
summary += &format!("<li>Domain: {}</li>", domain);
|
||||
}
|
||||
if let Some(adkim) = pol.adkim {
|
||||
summary += &format!("<li>ADKIM: {}</li>", adkim);
|
||||
}
|
||||
if let Some(aspf) = pol.aspf {
|
||||
summary += &format!("<li>ASPF: {}</li>", aspf);
|
||||
}
|
||||
if let Some(p) = pol.p {
|
||||
summary += &format!("<li>Policy: {}</li>", p);
|
||||
}
|
||||
if let Some(sp) = pol.sp {
|
||||
summary += &format!("<li>Subdomain Policy: {}</li>", sp);
|
||||
}
|
||||
if let Some(pct) = pol.pct {
|
||||
summary += &format!("<li>Percent: {}</li>", pct);
|
||||
}
|
||||
summary += "</ul>";
|
||||
}
|
||||
if let Some(records) = feedback.record {
|
||||
summary += "<b>Records:</b><table style=\"border-collapse:collapse;width:100%;font-size:0.95em;\"><thead><tr style=\"background:#f0f0f0;\"><th style=\"border:1px solid #bbb;padding:4px 8px;\">Source IP</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">Count</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">Header From</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">Disposition</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">DKIM</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">SPF</th><th style=\"border:1px solid #bbb;padding:4px 8px;\">Auth Results</th></tr></thead><tbody>";
|
||||
for rec in records {
|
||||
let mut row_html = String::new();
|
||||
let mut source_ip = String::new();
|
||||
let mut count = String::new();
|
||||
let mut header_from = String::new();
|
||||
let mut disposition = String::new();
|
||||
let mut dkim = String::new();
|
||||
let mut spf = String::new();
|
||||
if let Some(r) = &rec.row {
|
||||
if let Some(ref s) = r.source_ip {
|
||||
source_ip = s.clone();
|
||||
}
|
||||
if let Some(c) = r.count {
|
||||
count = c.to_string();
|
||||
}
|
||||
if let Some(ref pe) = r.policy_evaluated {
|
||||
if let Some(ref disp) = pe.disposition {
|
||||
disposition = disp.clone();
|
||||
}
|
||||
if let Some(ref d) = pe.dkim {
|
||||
dkim = d.clone();
|
||||
}
|
||||
if let Some(ref s) = pe.spf {
|
||||
spf = s.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(ids) = &rec.identifiers {
|
||||
if let Some(ref hf) = ids.header_from {
|
||||
header_from = hf.clone();
|
||||
}
|
||||
}
|
||||
row_html += &format!("<tr><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">{}</td><td style=\"border:1px solid #bbb;padding:4px 8px;\">",
|
||||
source_ip, count, header_from, disposition, dkim, spf);
|
||||
// Auth Results
|
||||
let mut auths = String::new();
|
||||
if let Some(auth) = &rec.auth_results {
|
||||
if let Some(dkims) = &auth.dkim {
|
||||
for dkimres in dkims {
|
||||
auths += &format!("<span style=\"white-space:nowrap;\">DKIM: domain=<b>{}</b> selector=<b>{}</b> result=<b>{}</b></span><br>",
|
||||
dkimres.domain.as_deref().unwrap_or(""),
|
||||
dkimres.selector.as_deref().unwrap_or(""),
|
||||
dkimres.result.as_deref().unwrap_or("")
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(spfs) = &auth.spf {
|
||||
for spfres in spfs {
|
||||
auths += &format!("<span style=\"white-space:nowrap;\">SPF: domain=<b>{}</b> scope=<b>{}</b> result=<b>{}</b></span><br>",
|
||||
spfres.domain.as_deref().unwrap_or(""),
|
||||
spfres.scope.as_deref().unwrap_or(""),
|
||||
spfres.result.as_deref().unwrap_or("")
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
row_html += &auths;
|
||||
row_html += "</td></tr>";
|
||||
summary += &row_html;
|
||||
}
|
||||
summary += "</tbody></table>";
|
||||
}
|
||||
if summary.is_empty() {
|
||||
summary = "No DMARC summary found.".to_string();
|
||||
}
|
||||
Ok(summary)
|
||||
}
|
||||
|
||||
@@ -11,8 +11,8 @@ version.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
build-info = "0.0.40"
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.20", registry = "xinu" }
|
||||
build-info = "0.0.41"
|
||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.27", registry = "xinu" }
|
||||
regex = "1.11.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
sqlx = "0.8.5"
|
||||
|
||||
@@ -9,7 +9,7 @@ repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.40"
|
||||
build-info-build = "0.0.41"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
@@ -28,12 +28,12 @@ graphql_client = "0.14.0"
|
||||
thiserror = "2.0.12"
|
||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||
human_format = "1.1.0"
|
||||
build-info = "0.0.40"
|
||||
build-info = "0.0.41"
|
||||
wasm-bindgen = "=0.2.100"
|
||||
uuid = { version = "1.16.0", features = [
|
||||
"js",
|
||||
] } # direct dep to set js feature, prevents Rng issues
|
||||
letterbox-shared = { version = "0.17.9", registry = "xinu" }
|
||||
letterbox-shared = { path = "../shared/", version = "0.17.27", registry = "xinu" }
|
||||
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
||||
strum_macros = "0.27.1"
|
||||
gloo-console = "0.3.0"
|
||||
|
||||
@@ -16,10 +16,11 @@
|
||||
<link data-trunk rel="css" href="static/vars.css" />
|
||||
<link data-trunk rel="tailwind-css" href="./src/tailwind.css" />
|
||||
<link data-trunk rel="css" href="static/overrides.css" />
|
||||
<link data-trunk rel="css" href="static/email-specific.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<section id="app"></section>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
// - it's useful when you want to check your code with `cargo make verify`
|
||||
// but some rules are too "annoying" or are not applicable for your case.)
|
||||
#![allow(clippy::wildcard_imports)]
|
||||
// Until https://github.com/rust-lang/rust/issues/138762 is addressed in dependencies
|
||||
#![allow(wasm_c_abi)]
|
||||
|
||||
use log::Level;
|
||||
use seed::App;
|
||||
|
||||
@@ -72,10 +72,6 @@ fn on_url_changed(old: &Url, mut new: Url) -> Msg {
|
||||
if did_change {
|
||||
messages.push(Msg::ScrollToTop)
|
||||
}
|
||||
info!(
|
||||
"url changed\nold '{old}'\nnew '{new}', history {}",
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = new.remaining_hash_path_parts();
|
||||
let msg = match hpp.as_slice() {
|
||||
["t", tid] => Msg::ShowThreadRequest {
|
||||
@@ -553,7 +549,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
});
|
||||
}
|
||||
Msg::ScrollToTop => {
|
||||
info!("scrolling to the top");
|
||||
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||
}
|
||||
Msg::WindowScrolled => {
|
||||
@@ -619,6 +614,36 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
orders.send_msg(Msg::CatchupRequest { query });
|
||||
}
|
||||
Msg::CatchupKeepUnread => {
|
||||
if let Some(thread_id) = current_thread_id(&model.context) {
|
||||
if let Context::ThreadResult {
|
||||
thread:
|
||||
ShowThreadQueryThread::EmailThread(ShowThreadQueryThreadOnEmailThread {
|
||||
messages,
|
||||
..
|
||||
}),
|
||||
..
|
||||
} = &model.context
|
||||
{
|
||||
//orders.send_msg(Msg::SetUnread(thread_id, false));
|
||||
let unread_messages: Vec<_> = messages
|
||||
.iter()
|
||||
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
||||
.map(|msg| &msg.id)
|
||||
.collect();
|
||||
if unread_messages.is_empty() {
|
||||
// All messages are read, so mark them all unread
|
||||
orders.send_msg(Msg::SetUnread(thread_id, true));
|
||||
} else {
|
||||
// Do nothing if there are some messages unread
|
||||
}
|
||||
} else {
|
||||
// News post, not email, just mark unread
|
||||
orders.send_msg(Msg::SetUnread(thread_id, true));
|
||||
};
|
||||
} else {
|
||||
// This shouldn't happen
|
||||
warn!("no current thread_id");
|
||||
}
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupMarkAsRead => {
|
||||
|
||||
@@ -1025,7 +1025,7 @@ fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool)
|
||||
],
|
||||
IF!(open =>
|
||||
div![
|
||||
C!["content", "bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto", from],
|
||||
C!["content", "bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto", from.map(|f|format!("from-{f}"))],
|
||||
match &msg.body {
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBody::UnhandledContentType(
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::{collections::VecDeque, rc::Rc};
|
||||
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{error, info};
|
||||
use log::{debug, error};
|
||||
use seed::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -63,13 +63,6 @@ use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
use web_sys::CloseEvent;
|
||||
|
||||
/// Message from the server to the client.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ServerMessage {
|
||||
pub id: usize,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
/// Message from the client to the server.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ClientMessage {
|
||||
@@ -122,13 +115,13 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
match msg {
|
||||
Msg::WebSocketOpened => {
|
||||
model.web_socket_reconnector = None;
|
||||
info!("WebSocket connection is open now");
|
||||
debug!("WebSocket connection is open now");
|
||||
}
|
||||
Msg::TextMessageReceived(msg) => {
|
||||
model.updates.push_back(msg);
|
||||
}
|
||||
Msg::WebSocketClosed(close_event) => {
|
||||
info!(
|
||||
debug!(
|
||||
r#"==================
|
||||
WebSocket connection was closed:
|
||||
Clean: {0}
|
||||
@@ -148,7 +141,7 @@ Reason: {2}
|
||||
}
|
||||
}
|
||||
Msg::WebSocketFailed => {
|
||||
info!("WebSocket failed");
|
||||
debug!("WebSocket failed");
|
||||
if model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
@@ -156,7 +149,7 @@ Reason: {2}
|
||||
}
|
||||
}
|
||||
Msg::ReconnectWebSocket(retries) => {
|
||||
info!("Reconnect attempt: {}", retries);
|
||||
debug!("Reconnect attempt: {}", retries);
|
||||
model.web_socket = create_websocket(&model.ws_url, orders).unwrap();
|
||||
}
|
||||
Msg::SendMessage(msg) => {
|
||||
@@ -177,16 +170,16 @@ fn create_websocket(url: &str, orders: &impl Orders<Msg>) -> Result<EventClient,
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_connection(Some(Box::new(move |client: &EventClient| {
|
||||
info!("{:#?}", client.status);
|
||||
debug!("{:#?}", client.status);
|
||||
let msg = match *client.status.borrow() {
|
||||
ConnectionStatus::Connecting => {
|
||||
info!("Connecting...");
|
||||
debug!("Connecting...");
|
||||
None
|
||||
}
|
||||
ConnectionStatus::Connected => Some(Msg::WebSocketOpened),
|
||||
ConnectionStatus::Error => Some(Msg::WebSocketFailed),
|
||||
ConnectionStatus::Disconnected => {
|
||||
info!("Disconnected");
|
||||
debug!("Disconnected");
|
||||
None
|
||||
}
|
||||
};
|
||||
@@ -195,7 +188,7 @@ fn create_websocket(url: &str, orders: &impl Orders<Msg>) -> Result<EventClient,
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_close(Some(Box::new(move |ev| {
|
||||
info!("WS: Connection closed");
|
||||
debug!("WS: Connection closed");
|
||||
send(Some(Msg::WebSocketClosed(ev)));
|
||||
})));
|
||||
|
||||
|
||||
@@ -57,15 +57,6 @@ html {
|
||||
margin-left: 2em;
|
||||
}
|
||||
|
||||
.mail-thread .content .noreply-news-bloomberg-com a {
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.mail-thread .content .noreply-news-bloomberg-com h2 {
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
/* Hackaday figures have unreadable black on dark grey */
|
||||
.news-post figcaption.wp-caption-text {
|
||||
background-color: initial !important;
|
||||
@@ -76,6 +67,11 @@ html {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.news-post.site-seiya-me figure>pre,
|
||||
.news-post.site-seiya-me figure>pre>code {
|
||||
background-color: black !important;
|
||||
}
|
||||
|
||||
.news-post.site-slashdot .story-byline {
|
||||
display: block !important;
|
||||
height: initial !important;
|
||||
|
||||
Reference in New Issue
Block a user