Compare commits

..

No commits in common. "master" and "letterbox-web-v0.17.45" have entirely different histories.

23 changed files with 822 additions and 1084 deletions

1384
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
edition = "2021" edition = "2021"
license = "UNLICENSED" license = "UNLICENSED"
publish = ["xinu"] publish = ["xinu"]
version = "0.17.55" version = "0.17.45"
repository = "https://git.z.xinu.tv/wathiede/letterbox" repository = "https://git.z.xinu.tv/wathiede/letterbox"
[profile.dev] [profile.dev]

View File

@ -13,8 +13,8 @@ version.workspace = true
[dependencies] [dependencies]
anyhow = "1.0.98" anyhow = "1.0.98"
clap = { version = "4.5.37", features = ["derive", "env"] } clap = { version = "4.5.37", features = ["derive", "env"] }
letterbox-notmuch = { version = "0.17", registry = "xinu" } letterbox-notmuch = { version = "0.17.9", registry = "xinu" }
letterbox-shared = { version = "0.17", registry = "xinu" } letterbox-shared = { version = "0.17.9", registry = "xinu" }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] } sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] } tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }

View File

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 1000;\n", "query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 100;\n",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@ -28,5 +28,5 @@
true true
] ]
}, },
"hash": "cf369e3d5547f400cb54004dd03783ef6998a000aec91c50a79405dcf1c53b17" "hash": "3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04"
} }

View File

@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM snooze WHERE id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": []
},
"hash": "77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795"
}

View File

@ -1,26 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\nSELECT id, message_id\nFROM snooze\nWHERE wake < NOW();\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "message_id",
"type_info": "Text"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false
]
},
"hash": "c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084"
}

View File

@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO snooze (message_id, wake)\n VALUES ($1, $2)\n ON CONFLICT (message_id) DO UPDATE\n SET wake = $2\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Timestamptz"
]
},
"nullable": []
},
"hash": "effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664"
}

View File

@ -13,27 +13,27 @@ version.workspace = true
[dependencies] [dependencies]
chrono-tz = "0.10" chrono-tz = "0.10"
html2text = "0.16" html2text = "0.15"
ammonia = "4.1.0" ammonia = "4.1.0"
anyhow = "1.0.98" anyhow = "1.0.98"
askama = { version = "0.14.0", features = ["derive"] } askama = { version = "0.14.0", features = ["derive"] }
async-graphql = { version = "7", features = ["log", "chrono"] } async-graphql = { version = "7", features = ["log"] }
async-graphql-axum = "7.0.16" async-graphql-axum = "7.0.16"
async-trait = "0.1.88" async-trait = "0.1.88"
axum = { version = "0.8.3", features = ["ws"] } axum = { version = "0.8.3", features = ["ws"] }
axum-macros = "0.5.0" axum-macros = "0.5.0"
build-info = "0.0.42" build-info = "0.0.41"
cacher = { version = "0.2.0", registry = "xinu" } cacher = { version = "0.2.0", registry = "xinu" }
chrono = "0.4.40" chrono = "0.4.40"
clap = { version = "4.5.37", features = ["derive"] } clap = { version = "4.5.37", features = ["derive"] }
css-inline = "0.18.0" css-inline = "0.17.0"
flate2 = "1.1.2" flate2 = "1.1.2"
futures = "0.3.31" futures = "0.3.31"
headers = "0.4.0" headers = "0.4.0"
html-escape = "0.2.13" html-escape = "0.2.13"
ical = "0.11" ical = "0.11"
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" } letterbox-notmuch = { path = "../notmuch", version = "0.17.45", registry = "xinu" }
letterbox-shared = { path = "../shared", version = "0.17", registry = "xinu" } letterbox-shared = { path = "../shared", version = "0.17.45", registry = "xinu" }
linkify = "0.10.0" linkify = "0.10.0"
lol_html = "2.3.0" lol_html = "2.3.0"
mailparse = "0.16.1" mailparse = "0.16.1"
@ -42,10 +42,10 @@ memmap = "0.7.0"
quick-xml = { version = "0.38.1", features = ["serialize"] } quick-xml = { version = "0.38.1", features = ["serialize"] }
regex = "1.11.1" regex = "1.11.1"
reqwest = { version = "0.12.15", features = ["blocking"] } reqwest = { version = "0.12.15", features = ["blocking"] }
scraper = "0.25.0" scraper = "0.24.0"
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1.0.140"
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "chrono"] } sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "time"] }
tantivy = { version = "0.25.0", optional = true } tantivy = { version = "0.25.0", optional = true }
thiserror = "2.0.12" thiserror = "2.0.12"
tokio = "1.44.2" tokio = "1.44.2"
@ -56,11 +56,11 @@ urlencoding = "2.1.3"
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" } #xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
#xtracing = { path = "../../xtracing" } #xtracing = { path = "../../xtracing" }
xtracing = { version = "0.3.2", registry = "xinu" } xtracing = { version = "0.3.2", registry = "xinu" }
zip = "6.0.0" zip = "5.0.0"
[build-dependencies] [build-dependencies]
build-info-build = "0.0.42" build-info-build = "0.0.41"
[features] [features]
#default = [ "tantivy" ] #default = [ "tantivy" ]

View File

@ -1,2 +0,0 @@
-- Add down migration script here
DROP TABLE IF EXISTS snooze;

View File

@ -1,6 +0,0 @@
-- Add up migration script here
CREATE TABLE IF NOT EXISTS snooze (
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
message_id text NOT NULL UNIQUE,
wake timestamptz NOT NULL
);

View File

@ -10,4 +10,4 @@ WHERE
-- TODO remove AND link ~ '^<' -- TODO remove AND link ~ '^<'
ORDER BY ORDER BY
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC) ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
LIMIT 1000; LIMIT 100;

View File

@ -7,7 +7,6 @@ use async_graphql::{
Union, Union,
}; };
use cacher::FilesystemCacher; use cacher::FilesystemCacher;
use chrono::{DateTime, Utc};
use futures::stream; use futures::stream;
use letterbox_notmuch::Notmuch; use letterbox_notmuch::Notmuch;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -629,42 +628,6 @@ impl MutationRoot {
nm.tag_remove(&tag, &query)?; nm.tag_remove(&tag, &query)?;
Ok(true) Ok(true)
} }
#[instrument(skip_all, fields(query=query, wake_time=wake_time.to_string(), rid=request_id()))]
async fn snooze<'ctx>(
&self,
ctx: &Context<'ctx>,
query: String,
wake_time: DateTime<Utc>,
) -> Result<bool, Error> {
info!("TODO snooze {query} until {wake_time})");
let pool = ctx.data_unchecked::<PgPool>();
sqlx::query!(
r#"
INSERT INTO snooze (message_id, wake)
VALUES ($1, $2)
ON CONFLICT (message_id) DO UPDATE
SET wake = $2
"#,
query,
wake_time
)
.execute(pool)
.await?;
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
#[cfg(feature = "tantivy")]
let tantivy = ctx.data_unchecked::<TantivyConnection>();
let unread = false;
let query: Query = query.parse()?;
newsreader::set_read_status(pool, &query, unread).await?;
#[cfg(feature = "tantivy")]
tantivy.reindex_thread(pool, &query).await?;
nm::set_read_status(nm, &query, unread).await?;
Ok(true)
}
/// Drop and recreate tantivy index. Warning this is slow /// Drop and recreate tantivy index. Warning this is slow
#[cfg(feature = "tantivy")] #[cfg(feature = "tantivy")]
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> { async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
@ -676,18 +639,6 @@ impl MutationRoot {
Ok(true) Ok(true)
} }
#[instrument(skip_all, fields(rid=request_id()))]
async fn label_unprocessed<'ctx>(
&self,
ctx: &Context<'ctx>,
limit: Option<usize>,
) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>();
label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await?;
Ok(true)
}
#[instrument(skip_all, fields(rid=request_id()))] #[instrument(skip_all, fields(rid=request_id()))]
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> { async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
@ -697,10 +648,7 @@ impl MutationRoot {
newsreader::refresh(pool, cacher).await?; newsreader::refresh(pool, cacher).await?;
// Process email labels // Process email labels
label_unprocessed(&nm, &pool, false, Some(1000), "tag:unprocessed").await?; label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
// Look for snoozed messages and mark unread
wakeup(&nm, &pool).await?;
#[cfg(feature = "tantivy")] #[cfg(feature = "tantivy")]
{ {
@ -722,33 +670,6 @@ impl SubscriptionRoot {
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>; pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
#[instrument(name = "wakeup", skip_all)]
pub async fn wakeup(nm: &Notmuch, pool: &PgPool) -> Result<(), Error> {
for row in sqlx::query!(
r#"
SELECT id, message_id
FROM snooze
WHERE wake < NOW();
"#
)
.fetch_all(pool)
.await?
{
let query: Query = row.message_id.parse()?;
info!("need to wake {query}");
let unread = true;
newsreader::set_read_status(pool, &query, unread).await?;
#[cfg(feature = "tantivy")]
tantivy.reindex_thread(pool, &query).await?;
nm::set_read_status(nm, &query, unread).await?;
sqlx::query!("DELETE FROM snooze WHERE id = $1", row.id)
.execute(pool)
.await?;
}
Ok(())
}
#[instrument(skip_all, fields(query=query))] #[instrument(skip_all, fields(query=query))]
pub async fn compute_catchup_ids( pub async fn compute_catchup_ids(
nm: &Notmuch, nm: &Notmuch,

View File

@ -19,7 +19,6 @@ use std::{
use async_trait::async_trait; use async_trait::async_trait;
use cacher::{Cacher, FilesystemCacher}; use cacher::{Cacher, FilesystemCacher};
use chrono::NaiveDateTime;
use css_inline::{CSSInliner, InlineError, InlineOptions}; use css_inline::{CSSInliner, InlineError, InlineOptions};
pub use error::ServerError; pub use error::ServerError;
use linkify::{LinkFinder, LinkKind}; use linkify::{LinkFinder, LinkKind};
@ -31,6 +30,7 @@ use maplit::{hashmap, hashset};
use regex::Regex; use regex::Regex;
use reqwest::StatusCode; use reqwest::StatusCode;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use sqlx::types::time::PrimitiveDateTime;
use thiserror::Error; use thiserror::Error;
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use url::Url; use url::Url;
@ -754,7 +754,6 @@ pub struct Query {
pub is_notmuch: bool, pub is_notmuch: bool,
pub is_newsreader: bool, pub is_newsreader: bool,
pub is_tantivy: bool, pub is_tantivy: bool,
pub is_snoozed: bool,
pub corpus: Option<Corpus>, pub corpus: Option<Corpus>,
} }
@ -778,9 +777,6 @@ impl fmt::Display for Query {
if self.is_newsreader { if self.is_newsreader {
write!(f, "is:news ")?; write!(f, "is:news ")?;
} }
if self.is_snoozed {
write!(f, "is:snoozed ")?;
}
match self.corpus { match self.corpus {
Some(c) => write!(f, "corpus:{c:?}")?, Some(c) => write!(f, "corpus:{c:?}")?,
_ => (), _ => (),
@ -837,7 +833,6 @@ impl FromStr for Query {
let mut is_notmuch = false; let mut is_notmuch = false;
let mut is_newsreader = false; let mut is_newsreader = false;
let mut is_tantivy = false; let mut is_tantivy = false;
let mut is_snoozed = false;
let mut corpus = None; let mut corpus = None;
for word in s.split_whitespace() { for word in s.split_whitespace() {
if word == "is:unread" { if word == "is:unread" {
@ -877,8 +872,6 @@ impl FromStr for Query {
is_newsreader = true; is_newsreader = true;
} else if word == "is:newsreader" { } else if word == "is:newsreader" {
is_newsreader = true; is_newsreader = true;
} else if word == "is:snoozed" {
is_snoozed = true;
} else { } else {
remainder.push(word.to_string()); remainder.push(word.to_string());
} }
@ -897,14 +890,13 @@ impl FromStr for Query {
is_notmuch, is_notmuch,
is_newsreader, is_newsreader,
is_tantivy, is_tantivy,
is_snoozed,
corpus, corpus,
}) })
} }
} }
pub struct ThreadSummaryRecord { pub struct ThreadSummaryRecord {
pub site: Option<String>, pub site: Option<String>,
pub date: Option<NaiveDateTime>, pub date: Option<PrimitiveDateTime>,
pub is_read: Option<bool>, pub is_read: Option<bool>,
pub title: Option<String>, pub title: Option<String>,
pub uid: String, pub uid: String,
@ -922,7 +914,11 @@ async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
title = clean_title(&title).await.expect("failed to clean title"); title = clean_title(&title).await.expect("failed to clean title");
ThreadSummary { ThreadSummary {
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid), thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
timestamp: r.date.expect("post missing date").and_utc().timestamp() as isize, timestamp: r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp() as isize,
date_relative: format!("{:?}", r.date), date_relative: format!("{:?}", r.date),
//date_relative: "TODO date_relative".to_string(), //date_relative: "TODO date_relative".to_string(),
matched: 0, matched: 0,

View File

@ -6,7 +6,7 @@ use letterbox_shared::compute_color;
use maplit::hashmap; use maplit::hashmap;
use scraper::Selector; use scraper::Selector;
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use tracing::{error, info, instrument, warn}; use tracing::{error, info, instrument};
use url::Url; use url::Url;
use crate::{ use crate::{
@ -86,10 +86,6 @@ pub async fn search(
query: &Query, query: &Query,
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> { ) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}"); info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
if query.is_snoozed {
warn!("TODO implement snooze for newsreader::search");
return Ok(Vec::new());
}
if !is_newsreader_query(query) { if !is_newsreader_query(query) {
return Ok(Vec::new()); return Ok(Vec::new());
} }
@ -215,7 +211,11 @@ pub async fn thread(
} }
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?; let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
let is_read = r.is_read.unwrap_or(false); let is_read = r.is_read.unwrap_or(false);
let timestamp = r.date.expect("post missing date").and_utc().timestamp(); let timestamp = r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp();
Ok(Thread::News(NewsPost { Ok(Thread::News(NewsPost {
thread_id, thread_id,
is_read, is_read,

View File

@ -64,10 +64,6 @@ pub async fn search(
last: Option<i32>, last: Option<i32>,
query: &Query, query: &Query,
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> { ) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
if query.is_snoozed {
warn!("TODO implement snooze for nm::search");
return Ok(Vec::new());
}
if !is_notmuch_query(query) { if !is_notmuch_query(query) {
return Ok(Vec::new()); return Ok(Vec::new());
} }

View File

@ -11,8 +11,8 @@ version.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
build-info = "0.0.42" build-info = "0.0.41"
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" } letterbox-notmuch = { path = "../notmuch", version = "0.17.45", registry = "xinu" }
regex = "1.11.1" regex = "1.11.1"
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
sqlx = "0.8.5" sqlx = "0.8.5"

View File

@ -9,7 +9,7 @@ repository.workspace = true
version.workspace = true version.workspace = true
[build-dependencies] [build-dependencies]
build-info-build = "0.0.42" build-info-build = "0.0.41"
[dev-dependencies] [dev-dependencies]
#wasm-bindgen-test = "0.3.50" #wasm-bindgen-test = "0.3.50"
@ -24,16 +24,16 @@ serde = { version = "1.0.219", features = ["derive"] }
itertools = "0.14.0" itertools = "0.14.0"
serde_json = { version = "1.0.140", features = ["unbounded_depth"] } serde_json = { version = "1.0.140", features = ["unbounded_depth"] }
chrono = "0.4.40" chrono = "0.4.40"
graphql_client = "0.15.0" graphql_client = "0.14.0"
thiserror = "2.0.12" thiserror = "2.0.12"
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] } gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
human_format = "1.1.0" human_format = "1.1.0"
build-info = "0.0.42" build-info = "0.0.41"
wasm-bindgen = "=0.2.100" wasm-bindgen = "=0.2.100"
uuid = { version = "1.16.0", features = [ uuid = { version = "1.16.0", features = [
"js", "js",
] } # direct dep to set js feature, prevents Rng issues ] } # direct dep to set js feature, prevents Rng issues
letterbox-shared = { path = "../shared/", version = "0.17", registry = "xinu" } letterbox-shared = { path = "../shared/", version = "0.17.45", registry = "xinu" }
seed_hooks = { version = "0.4.1", registry = "xinu" } seed_hooks = { version = "0.4.1", registry = "xinu" }
strum_macros = "0.27.1" strum_macros = "0.27.1"
gloo-console = "0.3.0" gloo-console = "0.3.0"

View File

@ -51,7 +51,7 @@
}, },
{ {
"args": [], "args": [],
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires exactly one of its field be provided)", "description": "Indicates that an Input Object is a OneOf Input Object (and thus requires\n exactly one of its field be provided)",
"locations": [ "locations": [
"INPUT_OBJECT" "INPUT_OBJECT"
], ],
@ -107,14 +107,12 @@
} }
], ],
"mutationType": { "mutationType": {
"name": "MutationRoot" "name": "Mutation"
}, },
"queryType": { "queryType": {
"name": "QueryRoot" "name": "QueryRoot"
}, },
"subscriptionType": { "subscriptionType": null,
"name": "SubscriptionRoot"
},
"types": [ "types": [
{ {
"description": null, "description": null,
@ -316,16 +314,6 @@
"name": "Corpus", "name": "Corpus",
"possibleTypes": null "possibleTypes": null
}, },
{
"description": "Implement the DateTime<Utc> scalar\n\nThe input/output is a string in RFC3339 format.",
"enumValues": null,
"fields": null,
"inputFields": null,
"interfaces": null,
"kind": "SCALAR",
"name": "DateTime",
"possibleTypes": null
},
{ {
"description": null, "description": null,
"enumValues": [ "enumValues": [
@ -981,51 +969,6 @@
} }
} }
}, },
{
"args": [
{
"defaultValue": null,
"description": null,
"name": "query",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"defaultValue": null,
"description": null,
"name": "wakeTime",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "DateTime",
"ofType": null
}
}
}
],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "snooze",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
},
{ {
"args": [], "args": [],
"deprecationReason": null, "deprecationReason": null,
@ -1046,7 +989,7 @@
"inputFields": null, "inputFields": null,
"interfaces": [], "interfaces": [],
"kind": "OBJECT", "kind": "OBJECT",
"name": "MutationRoot", "name": "Mutation",
"possibleTypes": null "possibleTypes": null
}, },
{ {
@ -1531,33 +1474,6 @@
"name": "String", "name": "String",
"possibleTypes": null "possibleTypes": null
}, },
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "values",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "SubscriptionRoot",
"possibleTypes": null
},
{ {
"description": null, "description": null,
"enumValues": null, "enumValues": null,

View File

@ -1,4 +0,0 @@
mutation SnoozeMutation($query: String!, $wakeTime: DateTime!) {
snooze(query: $query, wakeTime: $wakeTime)
}

View File

@ -1,4 +1,4 @@
DEV_HOST=localhost DEV_HOST=localhost
DEV_PORT=9345 DEV_PORT=9345
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql/ --output schema.json graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
git diff schema.json git diff schema.json

View File

@ -1,9 +1,7 @@
use chrono::Utc;
use gloo_net::{http::Request, Error}; use gloo_net::{http::Request, Error};
use graphql_client::GraphQLQuery; use graphql_client::GraphQLQuery;
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
type DateTime = chrono::DateTime<Utc>;
// The paths are relative to the directory where your `Cargo.toml` is located. // The paths are relative to the directory where your `Cargo.toml` is located.
// Both json and the GraphQL schema language are supported as sources for the schema // Both json and the GraphQL schema language are supported as sources for the schema
#[derive(GraphQLQuery)] #[derive(GraphQLQuery)]
@ -54,14 +52,6 @@ pub struct AddTagMutation;
)] )]
pub struct RemoveTagMutation; pub struct RemoveTagMutation;
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "graphql/schema.json",
query_path = "graphql/snooze.graphql",
response_derives = "Debug"
)]
pub struct SnoozeMutation;
#[derive(GraphQLQuery)] #[derive(GraphQLQuery)]
#[graphql( #[graphql(
schema_path = "graphql/schema.json", schema_path = "graphql/schema.json",

View File

@ -1,6 +1,5 @@
use std::collections::HashSet; use std::collections::HashSet;
use chrono::{DateTime, Utc};
use graphql_client::GraphQLQuery; use graphql_client::GraphQLQuery;
use letterbox_shared::WebsocketMessage; use letterbox_shared::WebsocketMessage;
use log::{debug, error, info, warn}; use log::{debug, error, info, warn};
@ -260,29 +259,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::GoToSearchResults Msg::GoToSearchResults
}); });
} }
Msg::Snooze(query, wake_time) => {
let is_catchup = model.catchup.is_some();
orders.skip().perform_cmd(async move {
let res: Result<
graphql_client::Response<graphql::snooze_mutation::ResponseData>,
gloo_net::Error,
> = send_graphql(graphql::SnoozeMutation::build_query(
graphql::snooze_mutation::Variables {
query: query.clone(),
wake_time,
},
))
.await;
if let Err(e) = res {
error!("Failed to snooze {query} until {wake_time}: {e}");
}
if is_catchup {
Msg::CatchupMarkAsRead
} else {
Msg::GoToSearchResults
}
});
}
Msg::FrontPageRequest { Msg::FrontPageRequest {
query, query,
@ -291,7 +267,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
first, first,
last, last,
} => { } => {
model.refreshing_state = RefreshingState::Loading;
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last) let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
{ {
// If no pagination set, set reasonable defaults // If no pagination set, set reasonable defaults
@ -317,32 +292,25 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
}); });
} }
Msg::FrontPageResult(Err(e)) => { Msg::FrontPageResult(Err(e)) => {
let msg = format!("error FrontPageResult: {e:?}"); error!("error FrontPageResult: {e:?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
} }
Msg::FrontPageResult(Ok(graphql_client::Response { Msg::FrontPageResult(Ok(graphql_client::Response {
data: None, data: None,
errors: None, errors: None,
.. ..
})) => { })) => {
let msg = format!("FrontPageResult no data or errors, should not happen"); error!("FrontPageResult no data or errors, should not happen");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
} }
Msg::FrontPageResult(Ok(graphql_client::Response { Msg::FrontPageResult(Ok(graphql_client::Response {
data: None, data: None,
errors: Some(e), errors: Some(e),
.. ..
})) => { })) => {
let msg = format!("FrontPageResult error: {e:?}"); error!("FrontPageResult error: {e:?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
} }
Msg::FrontPageResult(Ok(graphql_client::Response { Msg::FrontPageResult(Ok(graphql_client::Response {
data: Some(data), .. data: Some(data), ..
})) => { })) => {
model.refreshing_state = RefreshingState::None;
model.tags = Some( model.tags = Some(
data.tags data.tags
.into_iter() .into_iter()
@ -382,7 +350,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
Msg::ShowThreadRequest { thread_id } => { Msg::ShowThreadRequest { thread_id } => {
model.refreshing_state = RefreshingState::Loading;
orders.skip().perform_cmd(async move { orders.skip().perform_cmd(async move {
Msg::ShowThreadResult( Msg::ShowThreadResult(
send_graphql(graphql::ShowThreadQuery::build_query( send_graphql(graphql::ShowThreadQuery::build_query(
@ -395,7 +362,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::ShowThreadResult(Ok(graphql_client::Response { Msg::ShowThreadResult(Ok(graphql_client::Response {
data: Some(data), .. data: Some(data), ..
})) => { })) => {
model.refreshing_state = RefreshingState::None;
model.tags = Some( model.tags = Some(
data.tags data.tags
.into_iter() .into_iter()
@ -435,12 +401,9 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
orders.send_msg(Msg::WindowScrolled); orders.send_msg(Msg::WindowScrolled);
} }
Msg::ShowThreadResult(bad) => { Msg::ShowThreadResult(bad) => {
let msg = format!("show_thread_query error: {bad:#?}"); error!("show_thread_query error: {bad:#?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
} }
Msg::CatchupRequest { query } => { Msg::CatchupRequest { query } => {
model.refreshing_state = RefreshingState::Loading;
orders.perform_cmd(async move { orders.perform_cmd(async move {
Msg::CatchupResult( Msg::CatchupResult(
send_graphql::<_, graphql::catchup_query::ResponseData>( send_graphql::<_, graphql::catchup_query::ResponseData>(
@ -455,7 +418,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::CatchupResult(Ok(graphql_client::Response { Msg::CatchupResult(Ok(graphql_client::Response {
data: Some(data), .. data: Some(data), ..
})) => { })) => {
model.refreshing_state = RefreshingState::None;
let items = data.catchup; let items = data.catchup;
if items.is_empty() { if items.is_empty() {
orders.send_msg(Msg::GoToSearchResults); orders.send_msg(Msg::GoToSearchResults);
@ -471,9 +433,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
} }
Msg::CatchupResult(bad) => { Msg::CatchupResult(bad) => {
let msg = format!("catchup_query error: {bad:#?}"); error!("catchup_query error: {bad:#?}");
error!("{msg}");
model.refreshing_state = RefreshingState::Error(msg);
} }
Msg::SelectionSetNone => { Msg::SelectionSetNone => {
if let Context::SearchResult { if let Context::SearchResult {
@ -853,7 +813,6 @@ pub enum Msg {
SetUnread(String, bool), SetUnread(String, bool),
AddTag(String, String), AddTag(String, String),
RemoveTag(String, String), RemoveTag(String, String),
Snooze(String, DateTime<Utc>),
FrontPageRequest { FrontPageRequest {
query: String, query: String,

View File

@ -78,16 +78,13 @@ mod tw_classes {
} }
pub fn view(model: &Model) -> Node<Msg> { pub fn view(model: &Model) -> Node<Msg> {
let is_loading = match model.refreshing_state {
RefreshingState::Loading => true,
_ => false,
};
match &model.context { match &model.context {
Context::None => normal_view( Context::None => normal_view(
div![h1!["Loading"]], div![h1!["Loading"]],
&model.versions, &model.versions,
&model.query, &model.query,
&model.refreshing_state, &model.refreshing_state,
model.read_completion_ratio,
&model.tags, &model.tags,
), ),
Context::ThreadResult { Context::ThreadResult {
@ -96,23 +93,17 @@ pub fn view(model: &Model) -> Node<Msg> {
} => { } => {
if let Some(catchup) = &model.catchup { if let Some(catchup) = &model.catchup {
catchup_view( catchup_view(
thread(thread_data, open_messages, &model.content_el, true, 0.), thread(thread_data, open_messages, &model.content_el, true),
&catchup.items, &catchup.items,
is_loading,
model.read_completion_ratio, model.read_completion_ratio,
) )
} else { } else {
normal_view( normal_view(
thread( thread(thread_data, open_messages, &model.content_el, false),
thread_data,
open_messages,
&model.content_el,
false,
model.read_completion_ratio,
),
&model.versions, &model.versions,
&model.query, &model.query,
&model.refreshing_state, &model.refreshing_state,
model.read_completion_ratio,
&model.tags, &model.tags,
) )
} }
@ -123,17 +114,17 @@ pub fn view(model: &Model) -> Node<Msg> {
} => { } => {
if let Some(catchup) = &model.catchup { if let Some(catchup) = &model.catchup {
catchup_view( catchup_view(
news_post(post, &model.content_el, true, 0.), news_post(post, &model.content_el, true),
&catchup.items, &catchup.items,
is_loading,
model.read_completion_ratio, model.read_completion_ratio,
) )
} else { } else {
normal_view( normal_view(
news_post(post, &model.content_el, false, model.read_completion_ratio), news_post(post, &model.content_el, false),
&model.versions, &model.versions,
&model.query, &model.query,
&model.refreshing_state, &model.refreshing_state,
model.read_completion_ratio,
&model.tags, &model.tags,
) )
} }
@ -149,6 +140,7 @@ pub fn view(model: &Model) -> Node<Msg> {
&model.versions, &model.versions,
&model.query, &model.query,
&model.refreshing_state, &model.refreshing_state,
model.read_completion_ratio,
&model.tags, &model.tags,
), ),
} }
@ -159,6 +151,7 @@ fn normal_view(
versions: &Version, versions: &Version,
query: &str, query: &str,
refreshing_state: &RefreshingState, refreshing_state: &RefreshingState,
read_completion_ratio: f64,
tags: &Option<Vec<Tag>>, tags: &Option<Vec<Tag>>,
) -> Node<Msg> { ) -> Node<Msg> {
div![ div![
@ -185,13 +178,13 @@ fn normal_view(
content, content,
view_header(query, refreshing_state, false), view_header(query, refreshing_state, false),
], ],
reading_progress(read_completion_ratio),
] ]
} }
fn catchup_view( fn catchup_view(
content: Node<Msg>, content: Node<Msg>,
items: &[CatchupItem], items: &[CatchupItem],
is_loading: bool,
read_completion_ratio: f64, read_completion_ratio: f64,
) -> Node<Msg> { ) -> Node<Msg> {
div![ div![
@ -207,35 +200,14 @@ fn catchup_view(
"border-gray-500", "border-gray-500",
"bg-black/50", "bg-black/50",
], ],
div![
C!["absolute", "top-0", "left-4", "text-green-200", "p-4"],
IF!(is_loading=>span![i![C!["animate-spin", "fas", "fa-spinner"]]])
],
h1![
C!["text-center"],
format!("{} left ", items.iter().filter(|i| !i.seen).count(),)
],
div![ div![
C!["absolute", "top-0", "right-4", "text-gray-500", "p-4"], C!["absolute", "top-0", "right-4", "text-gray-500", "p-4"],
span![i![C!["fas", "fa-x"]]], span![i![C!["fas", "fa-x"]]],
ev(Ev::Click, move |_| Msg::CatchupExit) ev(Ev::Click, move |_| Msg::CatchupExit)
], ],
div![ h1![
C![ C!["text-center"],
"absolute", format!("{} left ", items.iter().filter(|i| !i.seen).count(),)
"left-0",
"right-0",
"bottom-0",
"w-full",
"h-1",
"bg-gray-200"
],
div![
C!["h-1", "bg-green-500"],
style! {
St::Width => format!("{}%", read_completion_ratio*100.)
}
]
] ]
], ],
div![C!["mt-12", "mb-20"], content], div![C!["mt-12", "mb-20"], content],
@ -275,6 +247,7 @@ fn catchup_view(
ev(Ev::Click, |_| Msg::CatchupMarkAsRead) ev(Ev::Click, |_| Msg::CatchupMarkAsRead)
] ]
], ],
reading_progress(read_completion_ratio)
] ]
} }
@ -351,8 +324,8 @@ fn search_results(
attrs! { attrs! {
At::Href => urls::thread(&tid) At::Href => urls::thread(&tid)
}, },
div![C!["line-clamp-2"], title_break, &r.subject], div![title_break, &r.subject],
span![C!["line-clamp-2", "text-xs"], pretty_authors(&r.authors)], span![C!["text-xs"], pretty_authors(&r.authors)],
div![ div![
C!["flex", "flex-wrap", "justify-between"], C!["flex", "flex-wrap", "justify-between"],
span![tags_chiclet(&tags)], span![tags_chiclet(&tags)],
@ -754,19 +727,15 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
C!["flex", "p-4", "bg-neutral-800"], C!["flex", "p-4", "bg-neutral-800"],
div![avatar], div![avatar],
div![ div![
C!["px-4", "flex-1"], C!["px-4", "mr-auto"],
div![ span![
C!["flex"], C!["font-semibold", "text-sm"],
div![ from_detail.as_ref().map(|addr| attrs! {
C!["font-semibold", "text-sm", "flex-1"], At::Title => addr
from_detail.as_ref().map(|addr| attrs! { }),
At::Title => addr &from,
}), " ",
&from, from_detail.as_ref().map(|text| copy_text_widget(&text))
" ",
from_detail.as_ref().map(|text| copy_text_widget(&text))
],
snooze_buttons(msg.timestamp, &id),
], ],
IF!(!msg.to.is_empty() =>div![ IF!(!msg.to.is_empty() =>div![
C!["text-xs"], C!["text-xs"],
@ -1175,7 +1144,6 @@ fn thread(
open_messages: &HashSet<String>, open_messages: &HashSet<String>,
content_el: &ElRef<HtmlElement>, content_el: &ElRef<HtmlElement>,
catchup_mode: bool, catchup_mode: bool,
read_completion_ratio: f64,
) -> Node<Msg> { ) -> Node<Msg> {
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject // TODO(wathiede): show per-message subject if it changes significantly from top-level subject
let subject = if thread.subject.is_empty() { let subject = if thread.subject.is_empty() {
@ -1260,8 +1228,7 @@ fn thread(
el_ref(content_el), el_ref(content_el),
messages, messages,
IF!(!catchup_mode => click_to_top()) IF!(!catchup_mode => click_to_top())
], ]
reading_progress(read_completion_ratio)
] ]
} }
@ -1404,7 +1371,7 @@ pub fn view_tags(tags: &Option<Vec<Tag>>) -> Node<Msg> {
}, },
], ],
a![ a![
C![indent_cls, "grow", "truncate"], C!["grow", "truncate"],
attrs! { attrs! {
At::Href => href At::Href => href
}, },
@ -1505,7 +1472,6 @@ fn news_post(
post: &ShowThreadQueryThreadOnNewsPost, post: &ShowThreadQueryThreadOnNewsPost,
content_el: &ElRef<HtmlElement>, content_el: &ElRef<HtmlElement>,
catchup_mode: bool, catchup_mode: bool,
read_completion_ratio: f64,
) -> Node<Msg> { ) -> Node<Msg> {
let subject = &post.title; let subject = &post.title;
set_title(subject); set_title(subject);
@ -1593,7 +1559,6 @@ fn news_post(
] ]
], ],
IF!(!catchup_mode => click_to_top()), IF!(!catchup_mode => click_to_top()),
reading_progress(read_completion_ratio)
] ]
} }
fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> { fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> {
@ -1629,13 +1594,9 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
C!["flex", "p-4", "bg-neutral-800"], C!["flex", "p-4", "bg-neutral-800"],
div![favicon], div![favicon],
div![ div![
C!["px-4", "mr-auto", "flex-1"], C!["px-4", "mr-auto"],
div![ div![
div![ div![C!["font-semibold", "text-sm"], from],
C!["flex"],
div![C!["font-semibold", "text-sm", "flex-1"], from],
snooze_buttons(Some(post.timestamp), &id),
],
div![ div![
C!["flex", "gap-2", "pt-2", "text-sm"], C!["flex", "gap-2", "pt-2", "text-sm"],
a![ a![
@ -1730,47 +1691,3 @@ fn click_to_top() -> Node<Msg> {
ev(Ev::Click, |_| Msg::ScrollToTop) ev(Ev::Click, |_| Msg::ScrollToTop)
] ]
} }
fn snooze_buttons(timestamp: Option<i64>, id: &str) -> Node<Msg> {
div![
span![C!["px-2"], ""],
button![
tw_classes::button(),
C!["rounded-r-none"],
"1d",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, Utc::now() + chrono::Days::new(1))
}
})
],
button![
tw_classes::button(),
C!["rounded-none"],
"7d",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, Utc::now() + chrono::Days::new(7))
}
})
],
timestamp.map(
|ts| chrono::DateTime::from_timestamp(ts, 0).map(|ts| button![
tw_classes::button(),
C!["rounded-l-none"],
"+6m",
ev(Ev::Click, {
let id = id.to_string();
move |e| {
e.stop_propagation();
Msg::Snooze(id, ts + chrono::Days::new(180))
}
})
])
),
]
}