Compare commits

..

No commits in common. "master" and "letterbox-shared-v0.17.40" have entirely different histories.

28 changed files with 1382 additions and 1917 deletions

View File

@ -1,40 +0,0 @@
# Copilot/AI Agent Instructions for Letterbox
## Project Overview
- **Letterbox** is a Rust monorepo for a mail/newsreader system with a web frontend and a Rocket/GraphQL backend.
- Major crates: `server` (backend, Rocket+async-graphql), `web` (Seed-based WASM frontend), `notmuch` (mail integration), `shared` (common types), `procmail2notmuch` (migration/utility).
- Data flows: Email/news data is indexed and queried via the backend, exposed to the frontend via GraphQL. SQLx/Postgres is used for persistence. Notmuch and custom SQL are both used for mail storage/search.
## Key Workflows
- **Development**: Use `dev.sh` to launch a tmux session with live-reloading for both frontend (`trunk serve`) and backend (`cargo watch ... run`).
- **Build/Release**: Use `just patch|minor|major` for versioned releases (runs SQLx prepare, bumps versions, pushes). `Makefile`'s `release` target does similar steps.
- **Frontend**: In `web/`, use `cargo make serve` and `cargo make watch` for local dev. See `web/README.md` for Seed-specific details.
- **Backend**: In `server/`, run with `cargo run` or via the tmux/dev.sh workflow. SQL migrations are in `server/migrations/`.
## Project Conventions & Patterns
- **GraphQL**: All API boundaries are defined in `server/src/graphql.rs`. Use the `Query`, `Mutation`, and `Subscription` roots. Types are defined with `async-graphql` derive macros.
- **HTML Sanitization**: See `server/src/lib.rs` for custom HTML/CSS sanitization and transformation logic (e.g., `Transformer` trait, `sanitize_html`).
- **Tag/Query Parsing**: The `Query` struct in `server/src/lib.rs` parses user queries into filters for notmuch/newsreader/tantivy.
- **Shared Types**: Use the `shared` crate for types and helpers shared between frontend and backend.
- **Custom SQL**: Raw SQL queries are in `server/sql/`. Use these for complex queries not handled by SQLx macros.
- **Feature Flags**: The `tantivy` feature enables full-text search via Tantivy. Check for `#[cfg(feature = "tantivy")]` in backend code.
## Integration Points
- **Notmuch**: Integrated via the `notmuch` crate for mail indexing/search.
- **Postgres**: Used for newsreader and other persistent data (see `server/migrations/`).
- **GraphQL**: All client-server communication is via GraphQL endpoints defined in the backend.
- **Seed/Trunk**: Frontend is built with Seed (Rust/WASM) and served via Trunk.
## Examples
- To add a new GraphQL query, update `server/src/graphql.rs` and expose it in the `QueryRoot`.
- To add a new frontend page, add a module in `web/src/` and register it in the Seed app's router.
- To run the full dev environment: `./dev.sh` (requires tmux, trunk, cargo-watch, etc.).
## References
- See `web/README.md` for frontend/Seed workflow details.
- See `Justfile` and `Makefile` for release/versioning automation.
- See `server/src/lib.rs` and `server/src/graphql.rs` for backend architecture and conventions.
- See `server/sql/` for custom SQL queries.
---
If any conventions or workflows are unclear, please ask for clarification or check the referenced files for examples.

2206
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
edition = "2021"
license = "UNLICENSED"
publish = ["xinu"]
version = "0.17.55"
version = "0.17.40"
repository = "https://git.z.xinu.tv/wathiede/letterbox"
[profile.dev]

View File

@ -13,8 +13,8 @@ version.workspace = true
[dependencies]
anyhow = "1.0.98"
clap = { version = "4.5.37", features = ["derive", "env"] }
letterbox-notmuch = { version = "0.17", registry = "xinu" }
letterbox-shared = { version = "0.17", registry = "xinu" }
letterbox-notmuch = { version = "0.17.9", registry = "xinu" }
letterbox-shared = { version = "0.17.9", registry = "xinu" }
serde = { version = "1.0.219", features = ["derive"] }
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }

View File

@ -3,11 +3,4 @@
"extends": [
"config:recommended"
]
,
"packageRules": [
{
"matchPackageNames": ["wasm-bindgen"],
"enabled": false
}
]
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 1000;\n",
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 100;\n",
"describe": {
"columns": [
{
@ -28,5 +28,5 @@
true
]
},
"hash": "cf369e3d5547f400cb54004dd03783ef6998a000aec91c50a79405dcf1c53b17"
"hash": "3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04"
}

View File

@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM snooze WHERE id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": []
},
"hash": "77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795"
}

View File

@ -1,26 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\nSELECT id, message_id\nFROM snooze\nWHERE wake < NOW();\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "message_id",
"type_info": "Text"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false
]
},
"hash": "c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084"
}

View File

@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO snooze (message_id, wake)\n VALUES ($1, $2)\n ON CONFLICT (message_id) DO UPDATE\n SET wake = $2\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Timestamptz"
]
},
"nullable": []
},
"hash": "effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664"
}

View File

@ -13,27 +13,27 @@ version.workspace = true
[dependencies]
chrono-tz = "0.10"
html2text = "0.16"
html2text = "0.15"
ammonia = "4.1.0"
anyhow = "1.0.98"
askama = { version = "0.14.0", features = ["derive"] }
async-graphql = { version = "7", features = ["log", "chrono"] }
async-graphql = { version = "7", features = ["log"] }
async-graphql-axum = "7.0.16"
async-trait = "0.1.88"
axum = { version = "0.8.3", features = ["ws"] }
axum-macros = "0.5.0"
build-info = "0.0.42"
build-info = "0.0.41"
cacher = { version = "0.2.0", registry = "xinu" }
chrono = "0.4.40"
clap = { version = "4.5.37", features = ["derive"] }
css-inline = "0.18.0"
css-inline = "0.17.0"
flate2 = "1.1.2"
futures = "0.3.31"
headers = "0.4.0"
html-escape = "0.2.13"
ical = "0.11"
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" }
letterbox-shared = { path = "../shared", version = "0.17", registry = "xinu" }
letterbox-notmuch = { path = "../notmuch", version = "0.17.40", registry = "xinu" }
letterbox-shared = { path = "../shared", version = "0.17.40", registry = "xinu" }
linkify = "0.10.0"
lol_html = "2.3.0"
mailparse = "0.16.1"
@ -42,10 +42,10 @@ memmap = "0.7.0"
quick-xml = { version = "0.38.1", features = ["serialize"] }
regex = "1.11.1"
reqwest = { version = "0.12.15", features = ["blocking"] }
scraper = "0.25.0"
scraper = "0.24.0"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "chrono"] }
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "time"] }
tantivy = { version = "0.25.0", optional = true }
thiserror = "2.0.12"
tokio = "1.44.2"
@ -56,11 +56,11 @@ urlencoding = "2.1.3"
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
#xtracing = { path = "../../xtracing" }
xtracing = { version = "0.3.2", registry = "xinu" }
zip = "6.0.0"
zip = "4.3.0"
[build-dependencies]
build-info-build = "0.0.42"
build-info-build = "0.0.41"
[features]
#default = [ "tantivy" ]

View File

@ -1,2 +0,0 @@
-- Add down migration script here
DROP TABLE IF EXISTS snooze;

View File

@ -1,6 +0,0 @@
-- Add up migration script here
CREATE TABLE IF NOT EXISTS snooze (
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
message_id text NOT NULL UNIQUE,
wake timestamptz NOT NULL
);

View File

@ -10,4 +10,4 @@ WHERE
-- TODO remove AND link ~ '^<'
ORDER BY
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
LIMIT 1000;
LIMIT 100;

View File

@ -1,7 +1,7 @@
use std::io::{Cursor, Read};
use askama::Template;
use chrono::{Datelike, LocalResult, TimeZone, Utc};
use chrono::{Datelike, Local, LocalResult, TimeZone, Utc};
use chrono_tz::Tz;
use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
use quick_xml::de::from_str as xml_from_str;
@ -17,11 +17,9 @@ use crate::{
const APPLICATION_GZIP: &'static str = "application/gzip";
const APPLICATION_ZIP: &'static str = "application/zip";
const APPLICATION_TLSRPT_GZIP: &'static str = "application/tlsrpt+gzip";
const IMAGE_JPEG: &'static str = "image/jpeg";
const IMAGE_PJPEG: &'static str = "image/pjpeg";
const IMAGE_PNG: &'static str = "image/png";
const MESSAGE_DELIVERY_STATUS: &'static str = "message/delivery-status";
const MESSAGE_RFC822: &'static str = "message/rfc822";
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
const MULTIPART_MIXED: &'static str = "multipart/mixed";
@ -643,15 +641,19 @@ pub fn extract_gzip(m: &ParsedMail) -> Result<(Body, Option<String>), ServerErro
Ok((extract_unhandled(m)?, None))
}
pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
let mut parts = Vec::new();
for (idx, sp) in m.subparts.iter().enumerate() {
part_addr.push(idx.to_string());
pub fn extract_report(m: &ParsedMail, _part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
let mut html_part = None;
let mut tlsrpt_part = None;
for sp in &m.subparts {
match sp.ctype.mimetype.as_str() {
APPLICATION_TLSRPT_GZIP => {
let gz_bytes = sp.get_body_raw()?;
TEXT_HTML => html_part = Some(sp.get_body()?),
"application/tlsrpt+gzip" => tlsrpt_part = Some(sp.get_body_raw()?),
_ => {} // Ignore other parts for now
}
}
let tlsrpt_summary_html = if let Some(gz_bytes) = tlsrpt_part {
let mut decoder = flate2::read::GzDecoder::new(&gz_bytes[..]);
let mut buffer = Vec::new();
if decoder.read_to_end(&mut buffer).is_ok() {
@ -664,9 +666,7 @@ pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bod
start_datetime: tlsrpt.date_range.start_datetime,
end_datetime: tlsrpt.date_range.end_datetime,
},
contact_info: tlsrpt
.contact_info
.unwrap_or_else(|| "".to_string()),
contact_info: tlsrpt.contact_info.unwrap_or_else(|| "".to_string()),
report_id: tlsrpt.report_id,
policies: tlsrpt
.policies
@ -682,20 +682,16 @@ pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bod
.unwrap_or_else(|| Vec::new())
.into_iter()
.map(|mx| match mx {
MxHost::String(s) => {
FormattedTlsRptMxHost {
MxHost::String(s) => FormattedTlsRptMxHost {
hostname: s,
failure_count: 0,
result_type: "".to_string(),
}
}
MxHost::Object(o) => {
FormattedTlsRptMxHost {
},
MxHost::Object(o) => FormattedTlsRptMxHost {
hostname: o.hostname,
failure_count: o.failure_count,
result_type: o.result_type,
}
}
},
})
.collect(),
},
@ -715,8 +711,7 @@ pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bod
receiving_mx_hostname: detail
.receiving_mx_hostname
.unwrap_or_else(|| "".to_string()),
failed_session_count: detail
.failed_session_count,
failed_session_count: detail.failed_session_count,
additional_info: detail
.additional_info
.unwrap_or_else(|| "".to_string()),
@ -731,98 +726,30 @@ pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bod
let template = TlsReportTemplate {
report: &formatted_tlsrpt,
};
let html = template.render().unwrap_or_else(|e| format!("<div class=\"tlsrpt-error\">Failed to render TLS report template: {}</div>", e));
parts.push(Body::html(html));
template.render().unwrap_or_else(|e| format!("<div class=\"tlsrpt-error\">Failed to render TLS report template: {}</div>", e))
}
Err(e) => {
let html = format!(
Err(e) => format!(
"<div class=\"tlsrpt-error\">Failed to parse TLS report JSON: {}</div>",
e
);
parts.push(Body::html(html));
}
),
}
} else {
let html = format!("<div class=\"tlsrpt-error\">Failed to convert decompressed data to UTF-8.</div>");
parts.push(Body::html(html));
format!("<div class=\"tlsrpt-error\">Failed to convert decompressed data to UTF-8.</div>")
}
} else {
let html =
format!("<div class=\"tlsrpt-error\">Failed to decompress data.</div>");
parts.push(Body::html(html));
}
}
MESSAGE_RFC822 => {
parts.push(extract_rfc822(&sp, part_addr)?);
}
TEXT_HTML => {
let body = sp.get_body()?;
parts.push(Body::html(body));
}
MESSAGE_DELIVERY_STATUS => {
let body = extract_delivery_status(sp)?;
parts.push(body);
}
TEXT_PLAIN => {
let body = sp.get_body()?;
parts.push(Body::text(body));
}
_ => {
// For any other content type, try to extract the body using the general extract_body function
match extract_body(sp, part_addr) {
Ok(body) => parts.push(body),
Err(_) => {
// If extraction fails, create an unhandled content type body
let msg = format!(
"Unhandled report subpart content type: {}\n{}",
sp.ctype.mimetype,
sp.get_body()
.unwrap_or_else(|_| "Failed to get body".to_string())
);
parts.push(Body::UnhandledContentType(UnhandledContentType {
text: msg,
content_tree: render_content_type_tree(sp),
}));
}
}
}
format!("<div class=\"tlsrpt-error\">Failed to decompressed data.</div>")
}
} else {
"".to_string()
};
part_addr.pop();
}
let final_html = if let Some(html) = html_part {
format!("{}<hr>{} ", html, tlsrpt_summary_html)
} else {
tlsrpt_summary_html
};
if parts.is_empty() {
return Ok(Body::html(
"<div class=\"report-error\">No report content found</div>".to_string(),
));
}
// Add <hr> tags between subparts for better visual separation
let html = parts
.iter()
.map(|p| match p {
Body::PlainText(PlainText { text, .. }) => {
format!(
r#"<p class="view-part-text-plain font-mono whitespace-pre-line">{}</p>"#,
linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
)
}
Body::Html(Html { html, .. }) => html.clone(),
Body::UnhandledContentType(UnhandledContentType { text, .. }) => {
format!(
r#"<p class="view-part-unhandled">{}</p>"#,
linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
)
}
})
.collect::<Vec<_>>()
.join("<hr>\n");
Ok(Body::html(html))
}
pub fn extract_delivery_status(m: &ParsedMail) -> Result<Body, ServerError> {
Ok(Body::text(m.get_body()?))
Ok(Body::html(final_html))
}
pub fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
@ -1910,42 +1837,39 @@ pub fn render_ical_summary(ical_data: &str) -> Result<String, ServerError> {
}
}
// Always use America/Los_Angeles for Google Calendar events if no TZID is present
let event_tz: Tz = tzid
.as_deref()
.unwrap_or("America/Los_Angeles")
.parse()
.unwrap_or(chrono_tz::America::Los_Angeles);
// Parse start/end as chrono DateTime
let (local_fmt_start, local_fmt_end, event_days, recurrence_display) =
if let Some(dtstart) = dtstart {
let tz: Tz = tzid
.as_deref()
.unwrap_or("UTC")
.parse()
.unwrap_or(chrono_tz::UTC);
let fallback = chrono::DateTime::<chrono::Utc>::from_timestamp(0, 0)
.map(|dt| dt.with_timezone(&event_tz))
.map(|dt| dt.with_timezone(&tz))
.unwrap_or_else(|| {
event_tz
.with_ymd_and_hms(1970, 1, 1, 0, 0, 0)
tz.with_ymd_and_hms(1970, 1, 1, 0, 0, 0)
.single()
.unwrap_or_else(|| event_tz.timestamp_opt(0, 0).single().unwrap())
.unwrap_or_else(|| tz.timestamp_opt(0, 0).single().unwrap())
});
let start = parse_ical_datetime_tz(dtstart, event_tz).unwrap_or(fallback);
let start = parse_ical_datetime_tz(dtstart, tz).unwrap_or(fallback);
let end = dtend
.and_then(|d| parse_ical_datetime_tz(d, event_tz))
.and_then(|d| parse_ical_datetime_tz(d, tz))
.unwrap_or(start);
// Use the event's TZ for all calendar grid/highlighting logic
let local_start = start.with_timezone(&Local);
let local_end = end.with_timezone(&Local);
let allday =
dtstart.len() == 8 && (dtend.map(|s| s.len() == 8).unwrap_or(false));
let fmt_start = if allday {
start.format("%a %b %e, %Y").to_string()
local_start.format("%a %b %e, %Y").to_string()
} else {
start.format("%-I:%M %p %a %b %e, %Y").to_string()
local_start.format("%-I:%M %p %a %b %e, %Y").to_string()
};
let fmt_end = if allday {
end.format("%a %b %e, %Y").to_string()
local_end.format("%a %b %e, %Y").to_string()
} else {
end.format("%-I:%M %p %a %b %e, %Y").to_string()
local_end.format("%-I:%M %p %a %b %e, %Y").to_string()
};
// All calendar grid and event_days logic below uses start/end in event's TZ
// Recurrence support: parse RRULE and generate event_days accordingly
let mut days = vec![];
@ -2220,39 +2144,6 @@ fn parse_ical_datetime_tz(dt: &str, tz: Tz) -> Option<chrono::DateTime<Tz>> {
#[cfg(test)]
mod tests {
#[test]
fn google_calendar_email_thursday_highlights_thursday() {
use mailparse::parse_mail;
let raw_email = include_str!("../../server/testdata/google-calendar-example-thursday.eml");
let parsed = parse_mail(raw_email.as_bytes()).expect("parse_mail");
let mut part_addr = vec![];
let body = extract_body(&parsed, &mut part_addr).expect("extract_body");
let meta = extract_calendar_metadata_from_mail(&parsed, &body);
// Assert detection as Google Calendar
assert!(meta.is_google_calendar_event);
let html = meta.body_html.expect("body_html");
// Print event date info for debugging
for part in parsed.subparts.iter() {
if part.ctype.mimetype == TEXT_CALENDAR {
if let Ok(ical) = part.get_body() {
println!("ICAL data: {}", ical);
if let Some(start) = ical.lines().find(|l| l.starts_with("DTSTART:")) {
println!("Start date: {}", start);
}
}
}
}
println!("Rendered HTML: {}", html);
// Look for September 11 (Thursday) being highlighted
// The calendar should show Sept 11 highlighted with background:#ffd700 and the correct data-event-day
assert!(html.contains(r#"data-event-day="2025-09-11""#));
assert!(html.contains(r#"background:#ffd700"#));
// Since 1:00 AM UTC on Friday 9/12 is 6:00 PM PDT on Thursday 9/11, verify times are correct
assert!(html.contains("6:00 PM Thu Sep 11, 2025"));
}
use super::*;
#[test]
fn google_calendar_email_3_single_event_metadata() {

View File

@ -7,7 +7,6 @@ use async_graphql::{
Union,
};
use cacher::FilesystemCacher;
use chrono::{DateTime, Utc};
use futures::stream;
use letterbox_notmuch::Notmuch;
use serde::{Deserialize, Serialize};
@ -629,42 +628,6 @@ impl MutationRoot {
nm.tag_remove(&tag, &query)?;
Ok(true)
}
#[instrument(skip_all, fields(query=query, wake_time=wake_time.to_string(), rid=request_id()))]
async fn snooze<'ctx>(
&self,
ctx: &Context<'ctx>,
query: String,
wake_time: DateTime<Utc>,
) -> Result<bool, Error> {
info!("TODO snooze {query} until {wake_time})");
let pool = ctx.data_unchecked::<PgPool>();
sqlx::query!(
r#"
INSERT INTO snooze (message_id, wake)
VALUES ($1, $2)
ON CONFLICT (message_id) DO UPDATE
SET wake = $2
"#,
query,
wake_time
)
.execute(pool)
.await?;
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
#[cfg(feature = "tantivy")]
let tantivy = ctx.data_unchecked::<TantivyConnection>();
let unread = false;
let query: Query = query.parse()?;
newsreader::set_read_status(pool, &query, unread).await?;
#[cfg(feature = "tantivy")]
tantivy.reindex_thread(pool, &query).await?;
nm::set_read_status(nm, &query, unread).await?;
Ok(true)
}
/// Drop and recreate tantivy index. Warning this is slow
#[cfg(feature = "tantivy")]
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
@ -676,18 +639,6 @@ impl MutationRoot {
Ok(true)
}
#[instrument(skip_all, fields(rid=request_id()))]
async fn label_unprocessed<'ctx>(
&self,
ctx: &Context<'ctx>,
limit: Option<usize>,
) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await?;
Ok(true)
}
#[instrument(skip_all, fields(rid=request_id()))]
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
@ -697,10 +648,7 @@ impl MutationRoot {
newsreader::refresh(pool, cacher).await?;
// Process email labels
label_unprocessed(&nm, &pool, false, Some(1000), "tag:unprocessed").await?;
// Look for snoozed messages and mark unread
wakeup(&nm, &pool).await?;
label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
#[cfg(feature = "tantivy")]
{
@ -722,33 +670,6 @@ impl SubscriptionRoot {
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
#[instrument(name = "wakeup", skip_all)]
pub async fn wakeup(nm: &Notmuch, pool: &PgPool) -> Result<(), Error> {
for row in sqlx::query!(
r#"
SELECT id, message_id
FROM snooze
WHERE wake < NOW();
"#
)
.fetch_all(pool)
.await?
{
let query: Query = row.message_id.parse()?;
info!("need to wake {query}");
let unread = true;
newsreader::set_read_status(pool, &query, unread).await?;
#[cfg(feature = "tantivy")]
tantivy.reindex_thread(pool, &query).await?;
nm::set_read_status(nm, &query, unread).await?;
sqlx::query!("DELETE FROM snooze WHERE id = $1", row.id)
.execute(pool)
.await?;
}
Ok(())
}
#[instrument(skip_all, fields(query=query))]
pub async fn compute_catchup_ids(
nm: &Notmuch,

View File

@ -19,7 +19,6 @@ use std::{
use async_trait::async_trait;
use cacher::{Cacher, FilesystemCacher};
use chrono::NaiveDateTime;
use css_inline::{CSSInliner, InlineError, InlineOptions};
pub use error::ServerError;
use linkify::{LinkFinder, LinkKind};
@ -31,6 +30,7 @@ use maplit::{hashmap, hashset};
use regex::Regex;
use reqwest::StatusCode;
use scraper::{Html, Selector};
use sqlx::types::time::PrimitiveDateTime;
use thiserror::Error;
use tracing::{debug, error, info, warn};
use url::Url;
@ -754,7 +754,6 @@ pub struct Query {
pub is_notmuch: bool,
pub is_newsreader: bool,
pub is_tantivy: bool,
pub is_snoozed: bool,
pub corpus: Option<Corpus>,
}
@ -778,9 +777,6 @@ impl fmt::Display for Query {
if self.is_newsreader {
write!(f, "is:news ")?;
}
if self.is_snoozed {
write!(f, "is:snoozed ")?;
}
match self.corpus {
Some(c) => write!(f, "corpus:{c:?}")?,
_ => (),
@ -837,7 +833,6 @@ impl FromStr for Query {
let mut is_notmuch = false;
let mut is_newsreader = false;
let mut is_tantivy = false;
let mut is_snoozed = false;
let mut corpus = None;
for word in s.split_whitespace() {
if word == "is:unread" {
@ -877,8 +872,6 @@ impl FromStr for Query {
is_newsreader = true;
} else if word == "is:newsreader" {
is_newsreader = true;
} else if word == "is:snoozed" {
is_snoozed = true;
} else {
remainder.push(word.to_string());
}
@ -897,14 +890,13 @@ impl FromStr for Query {
is_notmuch,
is_newsreader,
is_tantivy,
is_snoozed,
corpus,
})
}
}
pub struct ThreadSummaryRecord {
pub site: Option<String>,
pub date: Option<NaiveDateTime>,
pub date: Option<PrimitiveDateTime>,
pub is_read: Option<bool>,
pub title: Option<String>,
pub uid: String,
@ -922,7 +914,11 @@ async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
title = clean_title(&title).await.expect("failed to clean title");
ThreadSummary {
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
timestamp: r.date.expect("post missing date").and_utc().timestamp() as isize,
timestamp: r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp() as isize,
date_relative: format!("{:?}", r.date),
//date_relative: "TODO date_relative".to_string(),
matched: 0,

View File

@ -6,7 +6,7 @@ use letterbox_shared::compute_color;
use maplit::hashmap;
use scraper::Selector;
use sqlx::postgres::PgPool;
use tracing::{error, info, instrument, warn};
use tracing::{error, info, instrument};
use url::Url;
use crate::{
@ -86,10 +86,6 @@ pub async fn search(
query: &Query,
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
if query.is_snoozed {
warn!("TODO implement snooze for newsreader::search");
return Ok(Vec::new());
}
if !is_newsreader_query(query) {
return Ok(Vec::new());
}
@ -215,7 +211,11 @@ pub async fn thread(
}
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
let is_read = r.is_read.unwrap_or(false);
let timestamp = r.date.expect("post missing date").and_utc().timestamp();
let timestamp = r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp();
Ok(Thread::News(NewsPost {
thread_id,
is_read,

View File

@ -64,10 +64,6 @@ pub async fn search(
last: Option<i32>,
query: &Query,
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
if query.is_snoozed {
warn!("TODO implement snooze for nm::search");
return Ok(Vec::new());
}
if !is_notmuch_query(query) {
return Ok(Vec::new());
}

View File

@ -74,7 +74,13 @@
{% for week in all_days|batch(7) %}
<tr>
{% for day in week %}
{% if event_days.contains(day) %}
{% if event_days.contains(day) && today.is_some() && today.unwrap() == day %}
<td
data-event-day="{{ day.format("%Y-%m-%d") }}"
style="background:#ffd700; color:#222; font-weight:bold; border:2px solid #2196f3; border-radius:4px; text-align:center; box-shadow:0 0 0 2px #2196f3;">
{{ day.day() }}
</td>
{% elif event_days.contains(day) %}
<td
data-event-day="{{ day.format("%Y-%m-%d") }}"
style="background:#ffd700; color:#222; font-weight:bold; border:1px solid #aaa; border-radius:4px; text-align:center;">

View File

@ -1,175 +0,0 @@
Return-Path: <couchmoney+caf_=gmail=xinu.tv@gmail.com>
Delivered-To: bill@xinu.tv
Received: from phx.xinu.tv [74.207.253.222]
by nixos-01.h.xinu.tv with IMAP (fetchmail-6.5.1)
for <wathiede@localhost> (single-drop); Thu, 11 Sep 2025 12:27:35 -0700 (PDT)
Received: from phx.xinu.tv
by phx.xinu.tv with LMTP
id CqRrBqciw2hiKicAJR8clQ
(envelope-from <couchmoney+caf_=gmail=xinu.tv@gmail.com>)
for <bill@xinu.tv>; Thu, 11 Sep 2025 12:27:35 -0700
X-Original-To: gmail@xinu.tv
Received-SPF: Pass (mailfrom) identity=mailfrom; client-ip=2a00:1450:4864:20::130; helo=mail-lf1-x130.google.com; envelope-from=couchmoney+caf_=gmail=xinu.tv@gmail.com; receiver=xinu.tv
Authentication-Results: phx.xinu.tv;
dkim=pass (2048-bit key; unprotected) header.d=google.com header.i=@google.com header.a=rsa-sha256 header.s=20230601 header.b=dc+iKaXd;
dkim=pass (2048-bit key; unprotected) header.d=gmail.com header.i=@gmail.com header.a=rsa-sha256 header.s=20230601 header.b=kf8o8wAd
Received: from mail-lf1-x130.google.com (mail-lf1-x130.google.com [IPv6:2a00:1450:4864:20::130])
by phx.xinu.tv (Postfix) with ESMTPS id D7E2D80037
for <gmail@xinu.tv>; Thu, 11 Sep 2025 12:27:33 -0700 (PDT)
Received: by mail-lf1-x130.google.com with SMTP id 2adb3069b0e04-55f716e25d9so1141446e87.1
for <gmail@xinu.tv>; Thu, 11 Sep 2025 12:27:33 -0700 (PDT)
ARC-Seal: i=2; a=rsa-sha256; t=1757618852; cv=pass;
d=google.com; s=arc-20240605;
b=MZ+1JfQuPR9luCCxiZNUeqSEpjt1vLuM3bTRCaal/W0NBxkCH0y5v9WfPR0KJ2BPb1
Rtnt/5ayDtmsLf8l6yTTVsBlFYW70ehqXWMD10MMcDEMvnib4KKDAacGaSmijAK4cYGq
FOU9CGNY986OMXMk54TD9NF3fkKDIKcAoh81D6at5/DE3Puuxofq0vZmtmVqQBNKG169
REkhcDpkXTMs/4rJpmZwXp2HbjD84avusBwSlYIQUWsBgO4g7THHjoR4Uk56cek9aEds
ip8IkTO6KRFe6u8FebQsZ/Q9sSAK3pheMExWFVMha9Y0XhACVOZiV600zRCPS9MNHhYw
XEaA==
ARC-Message-Signature: i=2; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
:mime-version:dkim-signature:dkim-signature:delivered-to;
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
fh=WnbwIlqFRbBot/H7TyqablNBDgXRuegsgjC3piothTI=;
b=aYMo5f7VI2b4CiAvLELRJ9zM3dF7ZH8FEqmoAtCcfPHrT9kLLCnriuyXG1R6sC3eoR
++boT29xoScVroIlfcI77Ty7N5X1fawOABkVDWWt7z5w4WhiesT0klxw5nINj9hnLBiK
22nrMevpRpFtmuDO7cle78lSAFZoZuyv+aXCK9RnLKvIm2JuXRrvU8LivxbbpNB4gNl0
hE1jsGuZm1SOJ54SRLwwa4HpSiOJV2x2txTtPCzmvE/LZvNESPjfi3Y2u7gaR87OzkNs
gNi5Xoc+D908zBsmcYKpUYiQcPL79s3DfNwYFIs/rR8Z2xgaHbFD/YmqRUmCEeNLv7o2
RR8g==;
darn=xinu.tv
ARC-Authentication-Results: i=2; mx.google.com;
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
dara=pass header.i=@gmail.com
X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
d=1e100.net; s=20230601; t=1757618852; x=1758223652;
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
:mime-version:dkim-signature:dkim-signature:delivered-to
:x-forwarded-for:x-forwarded-to:x-gm-message-state:from:to:cc
:subject:date:message-id:reply-to;
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
b=GKJkb+LmE79XIMEhHRvoCodKS+GBTOCShzMe06Q+zKxUZFHi6XMg8GqteuXQO9LVbw
nPUVN4QO2Hvqch0xzjbc0ryyMOD0u7HqpDUAEZCzamFXIfsX6hZXKLhFqy4YomtsG3os
TCOWBGLqwu7KalfOVg2p+csOR68i0mGyBII1sKcL9vUv9kIQJZxQKHGkuIc48cf6tbUB
L+mkVbMwXLSbpuTJszPmIVZV5o0K52KN+2QoLcmXGfw0mUOnjNI0oSovdbPg4SSDZ3cw
iIsC9vjvtCSFS3pf+Fp807s+Zjh5P6xeSxGU57qhC+HT9kTzIioh5EqKnGqcskDTqrI1
uCiQ==
X-Forwarded-Encrypted: i=2; AJvYcCUfSSA2sT31daRt2+W7dAD9YPx1gqa4JFpVuqCtxVtjqbKfKhOX/EcDQiECQ4BEWjmAP+IqTQ==@xinu.tv
X-Gm-Message-State: AOJu0Ywn7D0BjTaGiM/UFG0WhGuyYGfpLijg+ouhrOaGZzSREyTcRa37
XA3bzQ/LKTpzWhhh01GMwnigmELbWdIVr/BeRLVCuJdh+m+JBMgnAjBTIDs9RF3/xfR7rpG7VOB
6k+ugF+8QRKB4BcL2t8MvfJD03CkrzuhhvUtFTRHopcSZrkqzh8GOJayq42VveQ==
X-Received: by 2002:a05:6512:3b24:b0:55f:6580:818c with SMTP id 2adb3069b0e04-57050fe2fa3mr165340e87.46.1757618851553;
Thu, 11 Sep 2025 12:27:31 -0700 (PDT)
X-Forwarded-To: gmail@xinu.tv
X-Forwarded-For: couchmoney@gmail.com gmail@xinu.tv
Delivered-To: couchmoney@gmail.com
Received: by 2002:a05:6504:d09:b0:2c3:f6c4:ad72 with SMTP id c9csp3388833lty;
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
X-Received: by 2002:a05:6602:36ce:b0:889:b536:779b with SMTP id ca18e2360f4ac-8903378d714mr78653239f.7.1757618849269;
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
ARC-Seal: i=1; a=rsa-sha256; t=1757618849; cv=none;
d=google.com; s=arc-20240605;
b=Ln2bufZfSNhR/NmMPrG2QFdtvupjJtLDQnFvsL8HTPn+Dlrt5ff+6k6Wpupab/5mS7
hXjtVD0jnryGUiM5h+SNjxwzNPM3PBoueTpAzzBkjHQqMxJVpspgsGJUVOWAVRBWtWo
39qFyoP0vhzGRWDAuAFV+4VDhsvH7GL8lTrZCSMzrngTadmEdJ5haUIQOa50KFUn5HrK
1r12gayb+TaGaWfQfDo0Me689T8MQnS0ITUuzgvFxfgHZBz3h+IPnC0hrlhdziGovETo
GvHzgCCtiVzu6rop6VMLjLuAYmmT9+jZ3GjSRb+078C9cJR17YpguOC14Cyv4od1Tf7y
RFiQ==;
dara=google.com
ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
:mime-version:dkim-signature:dkim-signature;
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
fh=mbzrMIWIgWMC0ni1xEx+ViW4J0RLAdLdPT2cX81nTlk=;
b=JRkHr3CKSkCrafdLzBRtaBOGNl3/0ZSTtgubaNXtvhAiIqRqiQYocfLnVM6N/9sH7O
byTXYaRoaRLw/35WM+QTFGP3zUGRkM3eO4UVS/utVIss1IVLDjfmZHalqLYl8RokW5br
89Z/xYIyjTE7WUdy6uMSrExCNm5VWjO/qcMKsE5s5oDbXdSLaUYxLTurICM3LQksGkCY
wiAWaDDqK14+uhEhW5AyEnebDSYhL9U8UadIv+eK6Ng9q1kwOUzxICRQXEyUtnKhaDKJ
eZ1Qe1mp1CjCulr+I15fz3VwUJ6W1cv6cytcxPbu4p5GPn2gb2hS1eR81HVTL6V1Sp5G
NdDQ==;
dara=google.com
ARC-Authentication-Results: i=1; mx.google.com;
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
dara=pass header.i=@gmail.com
Received: from mail-sor-f73.google.com (mail-sor-f73.google.com. [209.85.220.73])
by mx.google.com with SMTPS id ca18e2360f4ac-88f2ea1122asor117632339f.3.2025.09.11.12.27.29
for <couchmoney@gmail.com>
(Google Transport Security);
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
Received-SPF: pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) client-ip=209.85.220.73;
Authentication-Results: mx.google.com;
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
dara=pass header.i=@gmail.com
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
d=google.com; s=20230601; t=1757618849; x=1758223649; dara=google.com;
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
:mime-version:from:to:cc:subject:date:message-id:reply-to;
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
b=dc+iKaXdFyqu6K0MIgk848QuwpQXvwzwlEVkxmjuCWvn9DzanMbYn5QJRyRTKilRna
BZ7gJSPriHUHcJd4fVKgGuCaQg0TxenCwm+0R64oB1xcDLfonayo/nCrFqEcCLHNmi7x
lTyWGJ0rLw6nKazxtcCdIbDhVgiE7/fXNI89w6XFp6pcKLl48yFIoCG1f6uY4iQ7QqNU
hLHzjmlzjTi58xFLao7SizZ0lr7E5cHXKHp1Ls/hkDzzcY0Y+O5+3r+NQw4MtpHTcY6/
kQlg6OhyMx8PTu4cuepQKXLHV4aFaNJbDQTp8wew4xPIgi7pm2p6hb6C3GgwY6ptOvLd
wuag==
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
d=gmail.com; s=20230601; t=1757618849; x=1758223649; dara=google.com;
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
:mime-version:from:to:cc:subject:date:message-id:reply-to;
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
b=kf8o8wAd5DSU/NC7SDiuIoohCu+/7wTjWyQqDYbBjUFGaBaYdj6aD5JWNQ1KEA2W8o
E+Qy2ymyrzodKa1eOsQX2UDAYKOKpdxMWvx1u19+SC3Dp8DP4puRMrL2ObiSEMLCuOvz
Mxmkd+ZUP72EhVuQwK1iSm04/cjQaMsSiPhvSBaxXMaaarwlKeOoCoIo+qC/Z9emiBBv
Gk0sQcLA+CByvsxuvD9GInSA0rdoZ0ijhSb0Y475Hieam1QQqy/fhe8lgujzhXNFoIbR
5EA9GE0VV9PDoNanaT+u954YeOFBL2YZ5gm2gHltw8tBI98LKnC42Pa3qyMznBa2dI2Q
A0RQ==
X-Google-Smtp-Source: AGHT+IGmC5/03nTVMeYJBoq1R/BiA19iH0DFaZyyImB3W8mtgjdn+XqIFK1fC8aTwWRXQmsr71Xo0cmkgx6hjPvicQ/d
MIME-Version: 1.0
X-Received: by 2002:a05:6602:380d:b0:887:4c93:f12c with SMTP id
ca18e2360f4ac-8903596aca3mr58994639f.17.1757618848817; Thu, 11 Sep 2025
12:27:28 -0700 (PDT)
Reply-To: tconvertino@gmail.com
Sender: Google Calendar <calendar-notification@google.com>
Auto-Submitted: auto-generated
Message-ID: <calendar-01d5e8a0-fad7-450b-9758-a16472bf2aa8@google.com>
Date: Thu, 11 Sep 2025 19:27:28 +0000
Subject: Canceled event: Scout Babysits @ Thu Sep 11, 2025 6pm - 9pm (PDT) (Family)
From: tconvertino@gmail.com
To: couchmoney@gmail.com
Content-Type: multipart/mixed; boundary="000000000000226b77063e8b878d"
--000000000000226b77063e8b878d
Content-Type: text/calendar; charset="UTF-8"; method=CANCEL
Content-Transfer-Encoding: 7bit
BEGIN:VCALENDAR
PRODID:-//Google Inc//Google Calendar 70.9054//EN
VERSION:2.0
CALSCALE:GREGORIAN
METHOD:CANCEL
X-GOOGLE-CALID:g66m0feuqsao8l1c767pvvcg4k@group.calendar.google.com
BEGIN:VEVENT
DTSTART:20250912T010000Z
DTEND:20250912T040000Z
DTSTAMP:20250911T192728Z
UID:4ang6172d1t7782sn2hmi30fgi@google.com
CREATED:20250901T224707Z
DESCRIPTION:
LAST-MODIFIED:20250911T192728Z
LOCATION:
SEQUENCE:1
STATUS:CANCELLED
SUMMARY:Scout Babysits
TRANSP:OPAQUE
END:VEVENT
END:VCALENDAR
--000000000000226b77063e8b878d--

View File

@ -11,8 +11,8 @@ version.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
build-info = "0.0.42"
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" }
build-info = "0.0.41"
letterbox-notmuch = { path = "../notmuch", version = "0.17.40", registry = "xinu" }
regex = "1.11.1"
serde = { version = "1.0.219", features = ["derive"] }
sqlx = "0.8.5"

View File

@ -9,10 +9,10 @@ repository.workspace = true
version.workspace = true
[build-dependencies]
build-info-build = "0.0.42"
build-info-build = "0.0.41"
[dev-dependencies]
#wasm-bindgen-test = "0.3.50"
wasm-bindgen-test = "0.3.50"
[dependencies]
console_error_panic_hook = "0.1.7"
@ -24,16 +24,16 @@ serde = { version = "1.0.219", features = ["derive"] }
itertools = "0.14.0"
serde_json = { version = "1.0.140", features = ["unbounded_depth"] }
chrono = "0.4.40"
graphql_client = "0.15.0"
graphql_client = "0.14.0"
thiserror = "2.0.12"
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
human_format = "1.1.0"
build-info = "0.0.42"
build-info = "0.0.41"
wasm-bindgen = "=0.2.100"
uuid = { version = "1.16.0", features = [
"js",
] } # direct dep to set js feature, prevents Rng issues
letterbox-shared = { path = "../shared/", version = "0.17", registry = "xinu" }
letterbox-shared = { path = "../shared/", version = "0.17.40", registry = "xinu" }
seed_hooks = { version = "0.4.1", registry = "xinu" }
strum_macros = "0.27.1"
gloo-console = "0.3.0"

View File

@ -51,7 +51,7 @@
},
{
"args": [],
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires exactly one of its field be provided)",
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires\n exactly one of its field be provided)",
"locations": [
"INPUT_OBJECT"
],
@ -107,14 +107,12 @@
}
],
"mutationType": {
"name": "MutationRoot"
"name": "Mutation"
},
"queryType": {
"name": "QueryRoot"
},
"subscriptionType": {
"name": "SubscriptionRoot"
},
"subscriptionType": null,
"types": [
{
"description": null,
@ -316,16 +314,6 @@
"name": "Corpus",
"possibleTypes": null
},
{
"description": "Implement the DateTime<Utc> scalar\n\nThe input/output is a string in RFC3339 format.",
"enumValues": null,
"fields": null,
"inputFields": null,
"interfaces": null,
"kind": "SCALAR",
"name": "DateTime",
"possibleTypes": null
},
{
"description": null,
"enumValues": [
@ -981,51 +969,6 @@
}
}
},
{
"args": [
{
"defaultValue": null,
"description": null,
"name": "query",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"defaultValue": null,
"description": null,
"name": "wakeTime",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "DateTime",
"ofType": null
}
}
}
],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "snooze",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
@ -1046,7 +989,7 @@
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "MutationRoot",
"name": "Mutation",
"possibleTypes": null
},
{
@ -1531,33 +1474,6 @@
"name": "String",
"possibleTypes": null
},
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "values",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "SubscriptionRoot",
"possibleTypes": null
},
{
"description": null,
"enumValues": null,

View File

@ -1,4 +0,0 @@
mutation SnoozeMutation($query: String!, $wakeTime: DateTime!) {
snooze(query: $query, wakeTime: $wakeTime)
}

View File

@ -1,4 +1,4 @@
DEV_HOST=localhost
DEV_PORT=9345
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql/ --output schema.json
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
git diff schema.json

View File

@ -1,9 +1,7 @@
use chrono::Utc;
use gloo_net::{http::Request, Error};
use graphql_client::GraphQLQuery;
use serde::{de::DeserializeOwned, Serialize};
type DateTime = chrono::DateTime<Utc>;
// The paths are relative to the directory where your `Cargo.toml` is located.
// Both json and the GraphQL schema language are supported as sources for the schema
#[derive(GraphQLQuery)]
@ -54,14 +52,6 @@ pub struct AddTagMutation;
)]
pub struct RemoveTagMutation;
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "graphql/schema.json",
query_path = "graphql/snooze.graphql",
response_derives = "Debug"
)]
pub struct SnoozeMutation;
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "graphql/schema.json",

View File

@ -1,6 +1,5 @@
use std::collections::HashSet;
use chrono::{DateTime, Utc};
use graphql_client::GraphQLQuery;
use letterbox_shared::WebsocketMessage;
use log::{debug, error, info, warn};
@ -260,29 +259,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::GoToSearchResults
});
}
Msg::Snooze(query, wake_time) => {
let is_catchup = model.catchup.is_some();
orders.skip().perform_cmd(async move {
let res: Result<
graphql_client::Response<graphql::snooze_mutation::ResponseData>,
gloo_net::Error,
> = send_graphql(graphql::SnoozeMutation::build_query(
graphql::snooze_mutation::Variables {
query: query.clone(),
wake_time,
},
))
.await;
if let Err(e) = res {
error!("Failed to snooze {query} until {wake_time}: {e}");
}
if is_catchup {
Msg::CatchupMarkAsRead
} else {
Msg::GoToSearchResults
}
});
}
Msg::FrontPageRequest {
query,
@ -291,7 +267,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
first,
last,
} => {
model.refreshing_state = RefreshingState::Loading;
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
{
// If no pagination set, set reasonable defaults
@ -317,32 +292,25 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
});
}
Msg::FrontPageResult(Err(e)) => {
let msg = format!("error FrontPageResult: {e:?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
error!("error FrontPageResult: {e:?}");
}
Msg::FrontPageResult(Ok(graphql_client::Response {
data: None,
errors: None,
..
})) => {
let msg = format!("FrontPageResult no data or errors, should not happen");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
error!("FrontPageResult no data or errors, should not happen");
}
Msg::FrontPageResult(Ok(graphql_client::Response {
data: None,
errors: Some(e),
..
})) => {
let msg = format!("FrontPageResult error: {e:?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
error!("FrontPageResult error: {e:?}");
}
Msg::FrontPageResult(Ok(graphql_client::Response {
data: Some(data), ..
})) => {
model.refreshing_state = RefreshingState::None;
model.tags = Some(
data.tags
.into_iter()
@ -382,7 +350,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
}
Msg::ShowThreadRequest { thread_id } => {
model.refreshing_state = RefreshingState::Loading;
orders.skip().perform_cmd(async move {
Msg::ShowThreadResult(
send_graphql(graphql::ShowThreadQuery::build_query(
@ -395,7 +362,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::ShowThreadResult(Ok(graphql_client::Response {
data: Some(data), ..
})) => {
model.refreshing_state = RefreshingState::None;
model.tags = Some(
data.tags
.into_iter()
@ -435,12 +401,9 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
orders.send_msg(Msg::WindowScrolled);
}
Msg::ShowThreadResult(bad) => {
let msg = format!("show_thread_query error: {bad:#?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
error!("show_thread_query error: {bad:#?}");
}
Msg::CatchupRequest { query } => {
model.refreshing_state = RefreshingState::Loading;
orders.perform_cmd(async move {
Msg::CatchupResult(
send_graphql::<_, graphql::catchup_query::ResponseData>(
@ -455,7 +418,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::CatchupResult(Ok(graphql_client::Response {
data: Some(data), ..
})) => {
model.refreshing_state = RefreshingState::None;
let items = data.catchup;
if items.is_empty() {
orders.send_msg(Msg::GoToSearchResults);
@ -471,9 +433,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
}
}
Msg::CatchupResult(bad) => {
let msg = format!("catchup_query error: {bad:#?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
error!("catchup_query error: {bad:#?}");
}
Msg::SelectionSetNone => {
if let Context::SearchResult {
@ -853,7 +813,6 @@ pub enum Msg {
SetUnread(String, bool),
AddTag(String, String),
RemoveTag(String, String),
Snooze(String, DateTime<Utc>),
FrontPageRequest {
query: String,

View File

@ -78,16 +78,13 @@ mod tw_classes {
}
pub fn view(model: &Model) -> Node<Msg> {
let is_loading = match model.refreshing_state {
RefreshingState::Loading => true,
_ => false,
};
match &model.context {
Context::None => normal_view(
div![h1!["Loading"]],
&model.versions,
&model.query,
&model.refreshing_state,
model.read_completion_ratio,
&model.tags,
),
Context::ThreadResult {
@ -96,23 +93,17 @@ pub fn view(model: &Model) -> Node<Msg> {
} => {
if let Some(catchup) = &model.catchup {
catchup_view(
thread(thread_data, open_messages, &model.content_el, true, 0.),
thread(thread_data, open_messages, &model.content_el, true),
&catchup.items,
is_loading,
model.read_completion_ratio,
)
} else {
normal_view(
thread(
thread_data,
open_messages,
&model.content_el,
false,
model.read_completion_ratio,
),
thread(thread_data, open_messages, &model.content_el, false),
&model.versions,
&model.query,
&model.refreshing_state,
model.read_completion_ratio,
&model.tags,
)
}
@ -123,17 +114,17 @@ pub fn view(model: &Model) -> Node<Msg> {
} => {
if let Some(catchup) = &model.catchup {
catchup_view(
news_post(post, &model.content_el, true, 0.),
news_post(post, &model.content_el, true),
&catchup.items,
is_loading,
model.read_completion_ratio,
)
} else {
normal_view(
news_post(post, &model.content_el, false, model.read_completion_ratio),
news_post(post, &model.content_el, false),
&model.versions,
&model.query,
&model.refreshing_state,
model.read_completion_ratio,
&model.tags,
)
}
@ -149,6 +140,7 @@ pub fn view(model: &Model) -> Node<Msg> {
&model.versions,
&model.query,
&model.refreshing_state,
model.read_completion_ratio,
&model.tags,
),
}
@ -159,6 +151,7 @@ fn normal_view(
versions: &Version,
query: &str,
refreshing_state: &RefreshingState,
read_completion_ratio: f64,
tags: &Option<Vec<Tag>>,
) -> Node<Msg> {
div![
@ -185,13 +178,13 @@ fn normal_view(
content,
view_header(query, refreshing_state, false),
],
reading_progress(read_completion_ratio),
]
}
fn catchup_view(
content: Node<Msg>,
items: &[CatchupItem],
is_loading: bool,
read_completion_ratio: f64,
) -> Node<Msg> {
div![
@ -207,35 +200,14 @@ fn catchup_view(
"border-gray-500",
"bg-black/50",
],
div![
C!["absolute", "top-0", "left-4", "text-green-200", "p-4"],
IF!(is_loading=>span![i![C!["animate-spin", "fas", "fa-spinner"]]])
],
h1![
C!["text-center"],
format!("{} left ", items.iter().filter(|i| !i.seen).count(),)
],
div![
C!["absolute", "top-0", "right-4", "text-gray-500", "p-4"],
span![i![C!["fas", "fa-x"]]],
ev(Ev::Click, move |_| Msg::CatchupExit)
],
div![
C![
"absolute",
"left-0",
"right-0",
"bottom-0",
"w-full",
"h-1",
"bg-gray-200"
],
div![
C!["h-1", "bg-green-500"],
style! {
St::Width => format!("{}%", read_completion_ratio*100.)
}
]
h1![
C!["text-center"],
format!("{} left ", items.iter().filter(|i| !i.seen).count(),)
]
],
div![C!["mt-12", "mb-20"], content],
@ -275,6 +247,7 @@ fn catchup_view(
ev(Ev::Click, |_| Msg::CatchupMarkAsRead)
]
],
reading_progress(read_completion_ratio)
]
}
@ -351,8 +324,8 @@ fn search_results(
attrs! {
At::Href => urls::thread(&tid)
},
div![C!["line-clamp-2"], title_break, &r.subject],
span![C!["line-clamp-2", "text-xs"], pretty_authors(&r.authors)],
div![title_break, &r.subject],
span![C!["text-xs"], pretty_authors(&r.authors)],
div![
C!["flex", "flex-wrap", "justify-between"],
span![tags_chiclet(&tags)],
@ -754,11 +727,9 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
C!["flex", "p-4", "bg-neutral-800"],
div![avatar],
div![
C!["px-4", "flex-1"],
div![
C!["flex"],
div![
C!["font-semibold", "text-sm", "flex-1"],
C!["px-4", "mr-auto"],
span![
C!["font-semibold", "text-sm"],
from_detail.as_ref().map(|addr| attrs! {
At::Title => addr
}),
@ -766,8 +737,6 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
" ",
from_detail.as_ref().map(|text| copy_text_widget(&text))
],
snooze_buttons(msg.timestamp, &id),
],
IF!(!msg.to.is_empty() =>div![
C!["text-xs"],
span![
@ -1089,8 +1058,6 @@ fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool)
},
) => div![
C!["view-part-text-html"],
// If there isn't any HTML tags, treat more like plain text
IF!(!(contents.contains('<') && contents.contains('>')) => C!["whitespace-pre-line"]),
raw![contents],
IF!(!msg.attachments.is_empty() => render_attachements(&msg.attachments)),
view_content_tree(&content_tree),
@ -1175,7 +1142,6 @@ fn thread(
open_messages: &HashSet<String>,
content_el: &ElRef<HtmlElement>,
catchup_mode: bool,
read_completion_ratio: f64,
) -> Node<Msg> {
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
let subject = if thread.subject.is_empty() {
@ -1260,8 +1226,7 @@ fn thread(
el_ref(content_el),
messages,
IF!(!catchup_mode => click_to_top())
],
reading_progress(read_completion_ratio)
]
]
}
@ -1404,7 +1369,7 @@ pub fn view_tags(tags: &Option<Vec<Tag>>) -> Node<Msg> {
},
],
a![
C![indent_cls, "grow", "truncate"],
C!["grow", "truncate"],
attrs! {
At::Href => href
},
@ -1505,7 +1470,6 @@ fn news_post(
post: &ShowThreadQueryThreadOnNewsPost,
content_el: &ElRef<HtmlElement>,
catchup_mode: bool,
read_completion_ratio: f64,
) -> Node<Msg> {
let subject = &post.title;
set_title(subject);
@ -1593,7 +1557,6 @@ fn news_post(
]
],
IF!(!catchup_mode => click_to_top()),
reading_progress(read_completion_ratio)
]
}
fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> {
@ -1629,13 +1592,9 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
C!["flex", "p-4", "bg-neutral-800"],
div![favicon],
div![
C!["px-4", "mr-auto", "flex-1"],
C!["px-4", "mr-auto"],
div![
div![
C!["flex"],
div![C!["font-semibold", "text-sm", "flex-1"], from],
snooze_buttons(Some(post.timestamp), &id),
],
div![C!["font-semibold", "text-sm"], from],
div![
C!["flex", "gap-2", "pt-2", "text-sm"],
a![
@ -1730,47 +1689,3 @@ fn click_to_top() -> Node<Msg> {
ev(Ev::Click, |_| Msg::ScrollToTop)
]
}
fn snooze_buttons(timestamp: Option<i64>, id: &str) -> Node<Msg> {
div![
span![C!["px-2"], ""],
button![
tw_classes::button(),
C!["rounded-r-none"],
"1d",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, Utc::now() + chrono::Days::new(1))
}
})
],
button![
tw_classes::button(),
C!["rounded-none"],
"7d",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, Utc::now() + chrono::Days::new(7))
}
})
],
timestamp.map(
|ts| chrono::DateTime::from_timestamp(ts, 0).map(|ts| button![
tw_classes::button(),
C!["rounded-l-none"],
"+6m",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, ts + chrono::Days::new(180))
}
})
])
),
]
}