Compare commits
62 Commits
6edad4e8f2
...
letterbox-
| Author | SHA1 | Date | |
|---|---|---|---|
| e1681edda3 | |||
| 25ee8522ad | |||
| df356e8711 | |||
| 2e43700cd7 | |||
| b3769d99bf | |||
| 2aa85a03f8 | |||
| c0982e82c6 | |||
| 8971fe3b6b | |||
| 243e35ec15 | |||
| 4cf1f882b8 | |||
| a8129e4685 | |||
| 50a4bfcac7 | |||
| 90ac9a1e43 | |||
| 52b19365d7 | |||
| 399865f5f7 | |||
| 2eb4784e83 | |||
| be2085b397 | |||
| 2837ea835a | |||
| a84e673d88 | |||
| 2bc840a4e2 | |||
| dd2062f719 | |||
| 616623e477 | |||
| 593a20f621 | |||
| 584ccba5bd | |||
| e7a01e9d70 | |||
| 727599c12c | |||
| 17ad5b3b0b | |||
| 285b2f1591 | |||
| 1537333e76 | |||
| 285ff1d098 | |||
| 1563bf05a3 | |||
| 458aab3167 | |||
| 492e420337 | |||
| 330f9b1763 | |||
| ad904ac1c0 | |||
| 20f125bda5 | |||
| cf99e75ab8 | |||
| 54fc1e7962 | |||
| b187edc23b | |||
| fdafba3eeb | |||
| c5fe9f67d2 | |||
| ff970acf79 | |||
| 2f9bc17873 | |||
| 7e82f4ce97 | |||
| 5bb4f010d3 | |||
| 0af630acbe | |||
| d3d350e159 | |||
| 4013e4a7bf | |||
| b63171ea98 | |||
| 1c6ef02d11 | |||
| 32e5837dbf | |||
| 38234d4d18 | |||
| f609a3c122 | |||
| 440a630414 | |||
| ebda258750 | |||
| f766b3d529 | |||
| 96d927d416 | |||
| 60543b7e5d | |||
| 97a7bb6083 | |||
| c493857188 | |||
| 21f344b01c | |||
| 78f6d87c03 |
1377
Cargo.lock
generated
1377
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "UNLICENSED"
|
license = "UNLICENSED"
|
||||||
publish = ["xinu"]
|
publish = ["xinu"]
|
||||||
version = "0.17.44"
|
version = "0.17.49"
|
||||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
|
|||||||
14
server/.sqlx/query-77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795.json
generated
Normal file
14
server/.sqlx/query-77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795.json
generated
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "DELETE FROM snooze WHERE id = $1",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795"
|
||||||
|
}
|
||||||
26
server/.sqlx/query-c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084.json
generated
Normal file
26
server/.sqlx/query-c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084.json
generated
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\nSELECT id, message_id\nFROM snooze\nWHERE wake < NOW();\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "message_id",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084"
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"db_name": "PostgreSQL",
|
"db_name": "PostgreSQL",
|
||||||
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 100;\n",
|
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 1000;\n",
|
||||||
"describe": {
|
"describe": {
|
||||||
"columns": [
|
"columns": [
|
||||||
{
|
{
|
||||||
@@ -28,5 +28,5 @@
|
|||||||
true
|
true
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"hash": "3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04"
|
"hash": "cf369e3d5547f400cb54004dd03783ef6998a000aec91c50a79405dcf1c53b17"
|
||||||
}
|
}
|
||||||
15
server/.sqlx/query-effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664.json
generated
Normal file
15
server/.sqlx/query-effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664.json
generated
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n INSERT INTO snooze (message_id, wake)\n VALUES ($1, $2)\n ON CONFLICT (message_id) DO UPDATE\n SET wake = $2\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Timestamptz"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664"
|
||||||
|
}
|
||||||
@@ -13,27 +13,27 @@ version.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono-tz = "0.10"
|
chrono-tz = "0.10"
|
||||||
html2text = "0.15"
|
html2text = "0.16"
|
||||||
ammonia = "4.1.0"
|
ammonia = "4.1.0"
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.98"
|
||||||
askama = { version = "0.14.0", features = ["derive"] }
|
askama = { version = "0.14.0", features = ["derive"] }
|
||||||
async-graphql = { version = "7", features = ["log"] }
|
async-graphql = { version = "7", features = ["log", "chrono"] }
|
||||||
async-graphql-axum = "7.0.16"
|
async-graphql-axum = "7.0.16"
|
||||||
async-trait = "0.1.88"
|
async-trait = "0.1.88"
|
||||||
axum = { version = "0.8.3", features = ["ws"] }
|
axum = { version = "0.8.3", features = ["ws"] }
|
||||||
axum-macros = "0.5.0"
|
axum-macros = "0.5.0"
|
||||||
build-info = "0.0.41"
|
build-info = "0.0.42"
|
||||||
cacher = { version = "0.2.0", registry = "xinu" }
|
cacher = { version = "0.2.0", registry = "xinu" }
|
||||||
chrono = "0.4.40"
|
chrono = "0.4.40"
|
||||||
clap = { version = "4.5.37", features = ["derive"] }
|
clap = { version = "4.5.37", features = ["derive"] }
|
||||||
css-inline = "0.17.0"
|
css-inline = "0.18.0"
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.1.2"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
headers = "0.4.0"
|
headers = "0.4.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
ical = "0.11"
|
ical = "0.11"
|
||||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.44", registry = "xinu" }
|
letterbox-notmuch = { path = "../notmuch", version = "0.17.49", registry = "xinu" }
|
||||||
letterbox-shared = { path = "../shared", version = "0.17.44", registry = "xinu" }
|
letterbox-shared = { path = "../shared", version = "0.17.49", registry = "xinu" }
|
||||||
linkify = "0.10.0"
|
linkify = "0.10.0"
|
||||||
lol_html = "2.3.0"
|
lol_html = "2.3.0"
|
||||||
mailparse = "0.16.1"
|
mailparse = "0.16.1"
|
||||||
@@ -45,7 +45,7 @@ reqwest = { version = "0.12.15", features = ["blocking"] }
|
|||||||
scraper = "0.24.0"
|
scraper = "0.24.0"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "time"] }
|
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "chrono"] }
|
||||||
tantivy = { version = "0.25.0", optional = true }
|
tantivy = { version = "0.25.0", optional = true }
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.12"
|
||||||
tokio = "1.44.2"
|
tokio = "1.44.2"
|
||||||
@@ -60,7 +60,7 @@ zip = "5.0.0"
|
|||||||
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build-info-build = "0.0.41"
|
build-info-build = "0.0.42"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
#default = [ "tantivy" ]
|
#default = [ "tantivy" ]
|
||||||
|
|||||||
2
server/migrations/20250630023836_snooze.down.sql
Normal file
2
server/migrations/20250630023836_snooze.down.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-- Add down migration script here
|
||||||
|
DROP TABLE IF EXISTS snooze;
|
||||||
6
server/migrations/20250630023836_snooze.up.sql
Normal file
6
server/migrations/20250630023836_snooze.up.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-- Add up migration script here
|
||||||
|
CREATE TABLE IF NOT EXISTS snooze (
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
message_id text NOT NULL UNIQUE,
|
||||||
|
wake timestamptz NOT NULL
|
||||||
|
);
|
||||||
@@ -10,4 +10,4 @@ WHERE
|
|||||||
-- TODO remove AND link ~ '^<'
|
-- TODO remove AND link ~ '^<'
|
||||||
ORDER BY
|
ORDER BY
|
||||||
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
|
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
|
||||||
LIMIT 100;
|
LIMIT 1000;
|
||||||
|
|||||||
@@ -17,9 +17,11 @@ use crate::{
|
|||||||
const APPLICATION_GZIP: &'static str = "application/gzip";
|
const APPLICATION_GZIP: &'static str = "application/gzip";
|
||||||
|
|
||||||
const APPLICATION_ZIP: &'static str = "application/zip";
|
const APPLICATION_ZIP: &'static str = "application/zip";
|
||||||
|
const APPLICATION_TLSRPT_GZIP: &'static str = "application/tlsrpt+gzip";
|
||||||
const IMAGE_JPEG: &'static str = "image/jpeg";
|
const IMAGE_JPEG: &'static str = "image/jpeg";
|
||||||
const IMAGE_PJPEG: &'static str = "image/pjpeg";
|
const IMAGE_PJPEG: &'static str = "image/pjpeg";
|
||||||
const IMAGE_PNG: &'static str = "image/png";
|
const IMAGE_PNG: &'static str = "image/png";
|
||||||
|
const MESSAGE_DELIVERY_STATUS: &'static str = "message/delivery-status";
|
||||||
const MESSAGE_RFC822: &'static str = "message/rfc822";
|
const MESSAGE_RFC822: &'static str = "message/rfc822";
|
||||||
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
|
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
|
||||||
const MULTIPART_MIXED: &'static str = "multipart/mixed";
|
const MULTIPART_MIXED: &'static str = "multipart/mixed";
|
||||||
@@ -641,115 +643,186 @@ pub fn extract_gzip(m: &ParsedMail) -> Result<(Body, Option<String>), ServerErro
|
|||||||
Ok((extract_unhandled(m)?, None))
|
Ok((extract_unhandled(m)?, None))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_report(m: &ParsedMail, _part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
pub fn extract_report(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||||
let mut html_part = None;
|
let mut parts = Vec::new();
|
||||||
let mut tlsrpt_part = None;
|
|
||||||
|
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||||
|
part_addr.push(idx.to_string());
|
||||||
|
|
||||||
for sp in &m.subparts {
|
|
||||||
match sp.ctype.mimetype.as_str() {
|
match sp.ctype.mimetype.as_str() {
|
||||||
TEXT_HTML => html_part = Some(sp.get_body()?),
|
APPLICATION_TLSRPT_GZIP => {
|
||||||
"application/tlsrpt+gzip" => tlsrpt_part = Some(sp.get_body_raw()?),
|
let gz_bytes = sp.get_body_raw()?;
|
||||||
_ => {} // Ignore other parts for now
|
let mut decoder = flate2::read::GzDecoder::new(&gz_bytes[..]);
|
||||||
}
|
let mut buffer = Vec::new();
|
||||||
}
|
if decoder.read_to_end(&mut buffer).is_ok() {
|
||||||
|
if let Ok(json_str) = String::from_utf8(buffer) {
|
||||||
let tlsrpt_summary_html = if let Some(gz_bytes) = tlsrpt_part {
|
match serde_json::from_str::<TlsRpt>(&json_str) {
|
||||||
let mut decoder = flate2::read::GzDecoder::new(&gz_bytes[..]);
|
Ok(tlsrpt) => {
|
||||||
let mut buffer = Vec::new();
|
let formatted_tlsrpt = FormattedTlsRpt {
|
||||||
if decoder.read_to_end(&mut buffer).is_ok() {
|
organization_name: tlsrpt.organization_name,
|
||||||
if let Ok(json_str) = String::from_utf8(buffer) {
|
date_range: FormattedTlsRptDateRange {
|
||||||
match serde_json::from_str::<TlsRpt>(&json_str) {
|
start_datetime: tlsrpt.date_range.start_datetime,
|
||||||
Ok(tlsrpt) => {
|
end_datetime: tlsrpt.date_range.end_datetime,
|
||||||
let formatted_tlsrpt = FormattedTlsRpt {
|
|
||||||
organization_name: tlsrpt.organization_name,
|
|
||||||
date_range: FormattedTlsRptDateRange {
|
|
||||||
start_datetime: tlsrpt.date_range.start_datetime,
|
|
||||||
end_datetime: tlsrpt.date_range.end_datetime,
|
|
||||||
},
|
|
||||||
contact_info: tlsrpt.contact_info.unwrap_or_else(|| "".to_string()),
|
|
||||||
report_id: tlsrpt.report_id,
|
|
||||||
policies: tlsrpt
|
|
||||||
.policies
|
|
||||||
.into_iter()
|
|
||||||
.map(|policy| FormattedTlsRptPolicy {
|
|
||||||
policy: FormattedTlsRptPolicyDetails {
|
|
||||||
policy_type: policy.policy.policy_type,
|
|
||||||
policy_string: policy.policy.policy_string,
|
|
||||||
policy_domain: policy.policy.policy_domain,
|
|
||||||
mx_host: policy
|
|
||||||
.policy
|
|
||||||
.mx_host
|
|
||||||
.unwrap_or_else(|| Vec::new())
|
|
||||||
.into_iter()
|
|
||||||
.map(|mx| match mx {
|
|
||||||
MxHost::String(s) => FormattedTlsRptMxHost {
|
|
||||||
hostname: s,
|
|
||||||
failure_count: 0,
|
|
||||||
result_type: "".to_string(),
|
|
||||||
},
|
|
||||||
MxHost::Object(o) => FormattedTlsRptMxHost {
|
|
||||||
hostname: o.hostname,
|
|
||||||
failure_count: o.failure_count,
|
|
||||||
result_type: o.result_type,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
},
|
},
|
||||||
summary: policy.summary,
|
contact_info: tlsrpt
|
||||||
failure_details: policy
|
.contact_info
|
||||||
.failure_details
|
.unwrap_or_else(|| "".to_string()),
|
||||||
.unwrap_or_else(|| Vec::new())
|
report_id: tlsrpt.report_id,
|
||||||
|
policies: tlsrpt
|
||||||
|
.policies
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|detail| FormattedTlsRptFailureDetails {
|
.map(|policy| FormattedTlsRptPolicy {
|
||||||
result_type: detail.result_type,
|
policy: FormattedTlsRptPolicyDetails {
|
||||||
sending_mta_ip: detail
|
policy_type: policy.policy.policy_type,
|
||||||
.sending_mta_ip
|
policy_string: policy.policy.policy_string,
|
||||||
.unwrap_or_else(|| "".to_string()),
|
policy_domain: policy.policy.policy_domain,
|
||||||
receiving_ip: detail
|
mx_host: policy
|
||||||
.receiving_ip
|
.policy
|
||||||
.unwrap_or_else(|| "".to_string()),
|
.mx_host
|
||||||
receiving_mx_hostname: detail
|
.unwrap_or_else(|| Vec::new())
|
||||||
.receiving_mx_hostname
|
.into_iter()
|
||||||
.unwrap_or_else(|| "".to_string()),
|
.map(|mx| match mx {
|
||||||
failed_session_count: detail.failed_session_count,
|
MxHost::String(s) => {
|
||||||
additional_info: detail
|
FormattedTlsRptMxHost {
|
||||||
.additional_info
|
hostname: s,
|
||||||
.unwrap_or_else(|| "".to_string()),
|
failure_count: 0,
|
||||||
failure_reason_code: detail
|
result_type: "".to_string(),
|
||||||
.failure_reason_code
|
}
|
||||||
.unwrap_or_else(|| "".to_string()),
|
}
|
||||||
|
MxHost::Object(o) => {
|
||||||
|
FormattedTlsRptMxHost {
|
||||||
|
hostname: o.hostname,
|
||||||
|
failure_count: o.failure_count,
|
||||||
|
result_type: o.result_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
},
|
||||||
|
summary: policy.summary,
|
||||||
|
failure_details: policy
|
||||||
|
.failure_details
|
||||||
|
.unwrap_or_else(|| Vec::new())
|
||||||
|
.into_iter()
|
||||||
|
.map(|detail| FormattedTlsRptFailureDetails {
|
||||||
|
result_type: detail.result_type,
|
||||||
|
sending_mta_ip: detail
|
||||||
|
.sending_mta_ip
|
||||||
|
.unwrap_or_else(|| "".to_string()),
|
||||||
|
receiving_ip: detail
|
||||||
|
.receiving_ip
|
||||||
|
.unwrap_or_else(|| "".to_string()),
|
||||||
|
receiving_mx_hostname: detail
|
||||||
|
.receiving_mx_hostname
|
||||||
|
.unwrap_or_else(|| "".to_string()),
|
||||||
|
failed_session_count: detail
|
||||||
|
.failed_session_count,
|
||||||
|
additional_info: detail
|
||||||
|
.additional_info
|
||||||
|
.unwrap_or_else(|| "".to_string()),
|
||||||
|
failure_reason_code: detail
|
||||||
|
.failure_reason_code
|
||||||
|
.unwrap_or_else(|| "".to_string()),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
})
|
};
|
||||||
.collect(),
|
let template = TlsReportTemplate {
|
||||||
};
|
report: &formatted_tlsrpt,
|
||||||
let template = TlsReportTemplate {
|
};
|
||||||
report: &formatted_tlsrpt,
|
let html = template.render().unwrap_or_else(|e| format!("<div class=\"tlsrpt-error\">Failed to render TLS report template: {}</div>", e));
|
||||||
};
|
parts.push(Body::html(html));
|
||||||
template.render().unwrap_or_else(|e| format!("<div class=\"tlsrpt-error\">Failed to render TLS report template: {}</div>", e))
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let html = format!(
|
||||||
|
"<div class=\"tlsrpt-error\">Failed to parse TLS report JSON: {}</div>",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
parts.push(Body::html(html));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let html = format!("<div class=\"tlsrpt-error\">Failed to convert decompressed data to UTF-8.</div>");
|
||||||
|
parts.push(Body::html(html));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let html =
|
||||||
|
format!("<div class=\"tlsrpt-error\">Failed to decompress data.</div>");
|
||||||
|
parts.push(Body::html(html));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MESSAGE_RFC822 => {
|
||||||
|
parts.push(extract_rfc822(&sp, part_addr)?);
|
||||||
|
}
|
||||||
|
TEXT_HTML => {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
parts.push(Body::html(body));
|
||||||
|
}
|
||||||
|
MESSAGE_DELIVERY_STATUS => {
|
||||||
|
let body = extract_delivery_status(sp)?;
|
||||||
|
parts.push(body);
|
||||||
|
}
|
||||||
|
TEXT_PLAIN => {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
parts.push(Body::text(body));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// For any other content type, try to extract the body using the general extract_body function
|
||||||
|
match extract_body(sp, part_addr) {
|
||||||
|
Ok(body) => parts.push(body),
|
||||||
|
Err(_) => {
|
||||||
|
// If extraction fails, create an unhandled content type body
|
||||||
|
let msg = format!(
|
||||||
|
"Unhandled report subpart content type: {}\n{}",
|
||||||
|
sp.ctype.mimetype,
|
||||||
|
sp.get_body()
|
||||||
|
.unwrap_or_else(|_| "Failed to get body".to_string())
|
||||||
|
);
|
||||||
|
parts.push(Body::UnhandledContentType(UnhandledContentType {
|
||||||
|
text: msg,
|
||||||
|
content_tree: render_content_type_tree(sp),
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
Err(e) => format!(
|
|
||||||
"<div class=\"tlsrpt-error\">Failed to parse TLS report JSON: {}</div>",
|
|
||||||
e
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
format!("<div class=\"tlsrpt-error\">Failed to convert decompressed data to UTF-8.</div>")
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
format!("<div class=\"tlsrpt-error\">Failed to decompressed data.</div>")
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
"".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let final_html = if let Some(html) = html_part {
|
part_addr.pop();
|
||||||
format!("{}<hr>{} ", html, tlsrpt_summary_html)
|
}
|
||||||
} else {
|
|
||||||
tlsrpt_summary_html
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Body::html(final_html))
|
if parts.is_empty() {
|
||||||
|
return Ok(Body::html(
|
||||||
|
"<div class=\"report-error\">No report content found</div>".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add <hr> tags between subparts for better visual separation
|
||||||
|
let html = parts
|
||||||
|
.iter()
|
||||||
|
.map(|p| match p {
|
||||||
|
Body::PlainText(PlainText { text, .. }) => {
|
||||||
|
format!(
|
||||||
|
r#"<p class="view-part-text-plain font-mono whitespace-pre-line">{}</p>"#,
|
||||||
|
linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Body::Html(Html { html, .. }) => html.clone(),
|
||||||
|
Body::UnhandledContentType(UnhandledContentType { text, .. }) => {
|
||||||
|
format!(
|
||||||
|
r#"<p class="view-part-unhandled">{}</p>"#,
|
||||||
|
linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("<hr>\n");
|
||||||
|
|
||||||
|
Ok(Body::html(html))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_delivery_status(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||||
|
Ok(Body::text(m.get_body()?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
|
pub fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ use async_graphql::{
|
|||||||
Union,
|
Union,
|
||||||
};
|
};
|
||||||
use cacher::FilesystemCacher;
|
use cacher::FilesystemCacher;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
use futures::stream;
|
use futures::stream;
|
||||||
use letterbox_notmuch::Notmuch;
|
use letterbox_notmuch::Notmuch;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -628,6 +629,42 @@ impl MutationRoot {
|
|||||||
nm.tag_remove(&tag, &query)?;
|
nm.tag_remove(&tag, &query)?;
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
#[instrument(skip_all, fields(query=query, wake_time=wake_time.to_string(), rid=request_id()))]
|
||||||
|
async fn snooze<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
query: String,
|
||||||
|
wake_time: DateTime<Utc>,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
info!("TODO snooze {query} until {wake_time})");
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
sqlx::query!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO snooze (message_id, wake)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
ON CONFLICT (message_id) DO UPDATE
|
||||||
|
SET wake = $2
|
||||||
|
"#,
|
||||||
|
query,
|
||||||
|
wake_time
|
||||||
|
)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
|
||||||
|
let unread = false;
|
||||||
|
let query: Query = query.parse()?;
|
||||||
|
newsreader::set_read_status(pool, &query, unread).await?;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
tantivy.reindex_thread(pool, &query).await?;
|
||||||
|
nm::set_read_status(nm, &query, unread).await?;
|
||||||
|
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
/// Drop and recreate tantivy index. Warning this is slow
|
/// Drop and recreate tantivy index. Warning this is slow
|
||||||
#[cfg(feature = "tantivy")]
|
#[cfg(feature = "tantivy")]
|
||||||
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
||||||
@@ -639,6 +676,18 @@ impl MutationRoot {
|
|||||||
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
#[instrument(skip_all, fields(rid=request_id()))]
|
||||||
|
async fn label_unprocessed<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
limit: Option<usize>,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(rid=request_id()))]
|
#[instrument(skip_all, fields(rid=request_id()))]
|
||||||
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
@@ -648,7 +697,10 @@ impl MutationRoot {
|
|||||||
newsreader::refresh(pool, cacher).await?;
|
newsreader::refresh(pool, cacher).await?;
|
||||||
|
|
||||||
// Process email labels
|
// Process email labels
|
||||||
label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
|
label_unprocessed(&nm, &pool, false, Some(1000), "tag:unprocessed").await?;
|
||||||
|
|
||||||
|
// Look for snoozed messages and mark unread
|
||||||
|
wakeup(&nm, &pool).await?;
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
#[cfg(feature = "tantivy")]
|
||||||
{
|
{
|
||||||
@@ -670,6 +722,33 @@ impl SubscriptionRoot {
|
|||||||
|
|
||||||
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
||||||
|
|
||||||
|
#[instrument(name = "wakeup", skip_all)]
|
||||||
|
pub async fn wakeup(nm: &Notmuch, pool: &PgPool) -> Result<(), Error> {
|
||||||
|
for row in sqlx::query!(
|
||||||
|
r#"
|
||||||
|
SELECT id, message_id
|
||||||
|
FROM snooze
|
||||||
|
WHERE wake < NOW();
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
let query: Query = row.message_id.parse()?;
|
||||||
|
info!("need to wake {query}");
|
||||||
|
let unread = true;
|
||||||
|
newsreader::set_read_status(pool, &query, unread).await?;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
tantivy.reindex_thread(pool, &query).await?;
|
||||||
|
nm::set_read_status(nm, &query, unread).await?;
|
||||||
|
|
||||||
|
sqlx::query!("DELETE FROM snooze WHERE id = $1", row.id)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
#[instrument(skip_all, fields(query=query))]
|
||||||
pub async fn compute_catchup_ids(
|
pub async fn compute_catchup_ids(
|
||||||
nm: &Notmuch,
|
nm: &Notmuch,
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ use std::{
|
|||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use cacher::{Cacher, FilesystemCacher};
|
use cacher::{Cacher, FilesystemCacher};
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||||
pub use error::ServerError;
|
pub use error::ServerError;
|
||||||
use linkify::{LinkFinder, LinkKind};
|
use linkify::{LinkFinder, LinkKind};
|
||||||
@@ -30,7 +31,6 @@ use maplit::{hashmap, hashset};
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use reqwest::StatusCode;
|
use reqwest::StatusCode;
|
||||||
use scraper::{Html, Selector};
|
use scraper::{Html, Selector};
|
||||||
use sqlx::types::time::PrimitiveDateTime;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
@@ -754,6 +754,7 @@ pub struct Query {
|
|||||||
pub is_notmuch: bool,
|
pub is_notmuch: bool,
|
||||||
pub is_newsreader: bool,
|
pub is_newsreader: bool,
|
||||||
pub is_tantivy: bool,
|
pub is_tantivy: bool,
|
||||||
|
pub is_snoozed: bool,
|
||||||
pub corpus: Option<Corpus>,
|
pub corpus: Option<Corpus>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -777,6 +778,9 @@ impl fmt::Display for Query {
|
|||||||
if self.is_newsreader {
|
if self.is_newsreader {
|
||||||
write!(f, "is:news ")?;
|
write!(f, "is:news ")?;
|
||||||
}
|
}
|
||||||
|
if self.is_snoozed {
|
||||||
|
write!(f, "is:snoozed ")?;
|
||||||
|
}
|
||||||
match self.corpus {
|
match self.corpus {
|
||||||
Some(c) => write!(f, "corpus:{c:?}")?,
|
Some(c) => write!(f, "corpus:{c:?}")?,
|
||||||
_ => (),
|
_ => (),
|
||||||
@@ -833,6 +837,7 @@ impl FromStr for Query {
|
|||||||
let mut is_notmuch = false;
|
let mut is_notmuch = false;
|
||||||
let mut is_newsreader = false;
|
let mut is_newsreader = false;
|
||||||
let mut is_tantivy = false;
|
let mut is_tantivy = false;
|
||||||
|
let mut is_snoozed = false;
|
||||||
let mut corpus = None;
|
let mut corpus = None;
|
||||||
for word in s.split_whitespace() {
|
for word in s.split_whitespace() {
|
||||||
if word == "is:unread" {
|
if word == "is:unread" {
|
||||||
@@ -872,6 +877,8 @@ impl FromStr for Query {
|
|||||||
is_newsreader = true;
|
is_newsreader = true;
|
||||||
} else if word == "is:newsreader" {
|
} else if word == "is:newsreader" {
|
||||||
is_newsreader = true;
|
is_newsreader = true;
|
||||||
|
} else if word == "is:snoozed" {
|
||||||
|
is_snoozed = true;
|
||||||
} else {
|
} else {
|
||||||
remainder.push(word.to_string());
|
remainder.push(word.to_string());
|
||||||
}
|
}
|
||||||
@@ -890,13 +897,14 @@ impl FromStr for Query {
|
|||||||
is_notmuch,
|
is_notmuch,
|
||||||
is_newsreader,
|
is_newsreader,
|
||||||
is_tantivy,
|
is_tantivy,
|
||||||
|
is_snoozed,
|
||||||
corpus,
|
corpus,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub struct ThreadSummaryRecord {
|
pub struct ThreadSummaryRecord {
|
||||||
pub site: Option<String>,
|
pub site: Option<String>,
|
||||||
pub date: Option<PrimitiveDateTime>,
|
pub date: Option<NaiveDateTime>,
|
||||||
pub is_read: Option<bool>,
|
pub is_read: Option<bool>,
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@@ -914,11 +922,7 @@ async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
|
|||||||
title = clean_title(&title).await.expect("failed to clean title");
|
title = clean_title(&title).await.expect("failed to clean title");
|
||||||
ThreadSummary {
|
ThreadSummary {
|
||||||
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
|
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
|
||||||
timestamp: r
|
timestamp: r.date.expect("post missing date").and_utc().timestamp() as isize,
|
||||||
.date
|
|
||||||
.expect("post missing date")
|
|
||||||
.assume_utc()
|
|
||||||
.unix_timestamp() as isize,
|
|
||||||
date_relative: format!("{:?}", r.date),
|
date_relative: format!("{:?}", r.date),
|
||||||
//date_relative: "TODO date_relative".to_string(),
|
//date_relative: "TODO date_relative".to_string(),
|
||||||
matched: 0,
|
matched: 0,
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use letterbox_shared::compute_color;
|
|||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use scraper::Selector;
|
use scraper::Selector;
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use tracing::{error, info, instrument};
|
use tracing::{error, info, instrument, warn};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -86,6 +86,10 @@ pub async fn search(
|
|||||||
query: &Query,
|
query: &Query,
|
||||||
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
||||||
|
if query.is_snoozed {
|
||||||
|
warn!("TODO implement snooze for newsreader::search");
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
if !is_newsreader_query(query) {
|
if !is_newsreader_query(query) {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
}
|
}
|
||||||
@@ -211,11 +215,7 @@ pub async fn thread(
|
|||||||
}
|
}
|
||||||
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
||||||
let is_read = r.is_read.unwrap_or(false);
|
let is_read = r.is_read.unwrap_or(false);
|
||||||
let timestamp = r
|
let timestamp = r.date.expect("post missing date").and_utc().timestamp();
|
||||||
.date
|
|
||||||
.expect("post missing date")
|
|
||||||
.assume_utc()
|
|
||||||
.unix_timestamp();
|
|
||||||
Ok(Thread::News(NewsPost {
|
Ok(Thread::News(NewsPost {
|
||||||
thread_id,
|
thread_id,
|
||||||
is_read,
|
is_read,
|
||||||
|
|||||||
@@ -64,6 +64,10 @@ pub async fn search(
|
|||||||
last: Option<i32>,
|
last: Option<i32>,
|
||||||
query: &Query,
|
query: &Query,
|
||||||
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
|
if query.is_snoozed {
|
||||||
|
warn!("TODO implement snooze for nm::search");
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
if !is_notmuch_query(query) {
|
if !is_notmuch_query(query) {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ version.workspace = true
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
build-info = "0.0.41"
|
build-info = "0.0.42"
|
||||||
letterbox-notmuch = { path = "../notmuch", version = "0.17.44", registry = "xinu" }
|
letterbox-notmuch = { path = "../notmuch", version = "0.17.49", registry = "xinu" }
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
sqlx = "0.8.5"
|
sqlx = "0.8.5"
|
||||||
|
|||||||
@@ -9,10 +9,10 @@ repository.workspace = true
|
|||||||
version.workspace = true
|
version.workspace = true
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build-info-build = "0.0.41"
|
build-info-build = "0.0.42"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
wasm-bindgen-test = "0.3.50"
|
#wasm-bindgen-test = "0.3.50"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
console_error_panic_hook = "0.1.7"
|
console_error_panic_hook = "0.1.7"
|
||||||
@@ -28,12 +28,12 @@ graphql_client = "0.14.0"
|
|||||||
thiserror = "2.0.12"
|
thiserror = "2.0.12"
|
||||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||||
human_format = "1.1.0"
|
human_format = "1.1.0"
|
||||||
build-info = "0.0.41"
|
build-info = "0.0.42"
|
||||||
wasm-bindgen = "=0.2.100"
|
wasm-bindgen = "=0.2.100"
|
||||||
uuid = { version = "1.16.0", features = [
|
uuid = { version = "1.16.0", features = [
|
||||||
"js",
|
"js",
|
||||||
] } # direct dep to set js feature, prevents Rng issues
|
] } # direct dep to set js feature, prevents Rng issues
|
||||||
letterbox-shared = { path = "../shared/", version = "0.17.44", registry = "xinu" }
|
letterbox-shared = { path = "../shared/", version = "0.17.49", registry = "xinu" }
|
||||||
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
||||||
strum_macros = "0.27.1"
|
strum_macros = "0.27.1"
|
||||||
gloo-console = "0.3.0"
|
gloo-console = "0.3.0"
|
||||||
|
|||||||
@@ -51,7 +51,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires\n exactly one of its field be provided)",
|
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires exactly one of its field be provided)",
|
||||||
"locations": [
|
"locations": [
|
||||||
"INPUT_OBJECT"
|
"INPUT_OBJECT"
|
||||||
],
|
],
|
||||||
@@ -107,12 +107,14 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"mutationType": {
|
"mutationType": {
|
||||||
"name": "Mutation"
|
"name": "MutationRoot"
|
||||||
},
|
},
|
||||||
"queryType": {
|
"queryType": {
|
||||||
"name": "QueryRoot"
|
"name": "QueryRoot"
|
||||||
},
|
},
|
||||||
"subscriptionType": null,
|
"subscriptionType": {
|
||||||
|
"name": "SubscriptionRoot"
|
||||||
|
},
|
||||||
"types": [
|
"types": [
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
@@ -314,6 +316,16 @@
|
|||||||
"name": "Corpus",
|
"name": "Corpus",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": "Implement the DateTime<Utc> scalar\n\nThe input/output is a string in RFC3339 format.",
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": null,
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": null,
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "DateTime",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": [
|
"enumValues": [
|
||||||
@@ -969,6 +981,51 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"args": [
|
||||||
|
{
|
||||||
|
"defaultValue": null,
|
||||||
|
"description": null,
|
||||||
|
"name": "query",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"defaultValue": null,
|
||||||
|
"description": null,
|
||||||
|
"name": "wakeTime",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "DateTime",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "snooze",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "Boolean",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"deprecationReason": null,
|
"deprecationReason": null,
|
||||||
@@ -989,7 +1046,7 @@
|
|||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
"interfaces": [],
|
"interfaces": [],
|
||||||
"kind": "OBJECT",
|
"kind": "OBJECT",
|
||||||
"name": "Mutation",
|
"name": "MutationRoot",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -1474,6 +1531,33 @@
|
|||||||
"name": "String",
|
"name": "String",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": null,
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "values",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "Int",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": [],
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "SubscriptionRoot",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
|
|||||||
4
web/graphql/snooze.graphql
Normal file
4
web/graphql/snooze.graphql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
|
||||||
|
mutation SnoozeMutation($query: String!, $wakeTime: DateTime!) {
|
||||||
|
snooze(query: $query, wakeTime: $wakeTime)
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
DEV_HOST=localhost
|
DEV_HOST=localhost
|
||||||
DEV_PORT=9345
|
DEV_PORT=9345
|
||||||
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
|
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql/ --output schema.json
|
||||||
git diff schema.json
|
git diff schema.json
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
|
use chrono::Utc;
|
||||||
use gloo_net::{http::Request, Error};
|
use gloo_net::{http::Request, Error};
|
||||||
use graphql_client::GraphQLQuery;
|
use graphql_client::GraphQLQuery;
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
|
||||||
|
type DateTime = chrono::DateTime<Utc>;
|
||||||
// The paths are relative to the directory where your `Cargo.toml` is located.
|
// The paths are relative to the directory where your `Cargo.toml` is located.
|
||||||
// Both json and the GraphQL schema language are supported as sources for the schema
|
// Both json and the GraphQL schema language are supported as sources for the schema
|
||||||
#[derive(GraphQLQuery)]
|
#[derive(GraphQLQuery)]
|
||||||
@@ -52,6 +54,14 @@ pub struct AddTagMutation;
|
|||||||
)]
|
)]
|
||||||
pub struct RemoveTagMutation;
|
pub struct RemoveTagMutation;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/snooze.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct SnoozeMutation;
|
||||||
|
|
||||||
#[derive(GraphQLQuery)]
|
#[derive(GraphQLQuery)]
|
||||||
#[graphql(
|
#[graphql(
|
||||||
schema_path = "graphql/schema.json",
|
schema_path = "graphql/schema.json",
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
use graphql_client::GraphQLQuery;
|
use graphql_client::GraphQLQuery;
|
||||||
use letterbox_shared::WebsocketMessage;
|
use letterbox_shared::WebsocketMessage;
|
||||||
use log::{debug, error, info, warn};
|
use log::{debug, error, info, warn};
|
||||||
@@ -259,6 +260,29 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
Msg::GoToSearchResults
|
Msg::GoToSearchResults
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
Msg::Snooze(query, wake_time) => {
|
||||||
|
let is_catchup = model.catchup.is_some();
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
let res: Result<
|
||||||
|
graphql_client::Response<graphql::snooze_mutation::ResponseData>,
|
||||||
|
gloo_net::Error,
|
||||||
|
> = send_graphql(graphql::SnoozeMutation::build_query(
|
||||||
|
graphql::snooze_mutation::Variables {
|
||||||
|
query: query.clone(),
|
||||||
|
wake_time,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
if let Err(e) = res {
|
||||||
|
error!("Failed to snooze {query} until {wake_time}: {e}");
|
||||||
|
}
|
||||||
|
if is_catchup {
|
||||||
|
Msg::CatchupMarkAsRead
|
||||||
|
} else {
|
||||||
|
Msg::GoToSearchResults
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
Msg::FrontPageRequest {
|
Msg::FrontPageRequest {
|
||||||
query,
|
query,
|
||||||
@@ -813,6 +837,7 @@ pub enum Msg {
|
|||||||
SetUnread(String, bool),
|
SetUnread(String, bool),
|
||||||
AddTag(String, String),
|
AddTag(String, String),
|
||||||
RemoveTag(String, String),
|
RemoveTag(String, String),
|
||||||
|
Snooze(String, DateTime<Utc>),
|
||||||
|
|
||||||
FrontPageRequest {
|
FrontPageRequest {
|
||||||
query: String,
|
query: String,
|
||||||
|
|||||||
@@ -727,15 +727,19 @@ fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<
|
|||||||
C!["flex", "p-4", "bg-neutral-800"],
|
C!["flex", "p-4", "bg-neutral-800"],
|
||||||
div![avatar],
|
div![avatar],
|
||||||
div![
|
div![
|
||||||
C!["px-4", "mr-auto"],
|
C!["px-4", "flex-1"],
|
||||||
span![
|
div![
|
||||||
C!["font-semibold", "text-sm"],
|
C!["flex"],
|
||||||
from_detail.as_ref().map(|addr| attrs! {
|
div![
|
||||||
At::Title => addr
|
C!["font-semibold", "text-sm", "flex-1"],
|
||||||
}),
|
from_detail.as_ref().map(|addr| attrs! {
|
||||||
&from,
|
At::Title => addr
|
||||||
" ",
|
}),
|
||||||
from_detail.as_ref().map(|text| copy_text_widget(&text))
|
&from,
|
||||||
|
" ",
|
||||||
|
from_detail.as_ref().map(|text| copy_text_widget(&text))
|
||||||
|
],
|
||||||
|
snooze_buttons(msg.timestamp, &id),
|
||||||
],
|
],
|
||||||
IF!(!msg.to.is_empty() =>div![
|
IF!(!msg.to.is_empty() =>div![
|
||||||
C!["text-xs"],
|
C!["text-xs"],
|
||||||
@@ -1371,7 +1375,7 @@ pub fn view_tags(tags: &Option<Vec<Tag>>) -> Node<Msg> {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
a![
|
a![
|
||||||
C!["grow", "truncate"],
|
C![indent_cls, "grow", "truncate"],
|
||||||
attrs! {
|
attrs! {
|
||||||
At::Href => href
|
At::Href => href
|
||||||
},
|
},
|
||||||
@@ -1594,9 +1598,13 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
|
|||||||
C!["flex", "p-4", "bg-neutral-800"],
|
C!["flex", "p-4", "bg-neutral-800"],
|
||||||
div![favicon],
|
div![favicon],
|
||||||
div![
|
div![
|
||||||
C!["px-4", "mr-auto"],
|
C!["px-4", "mr-auto", "flex-1"],
|
||||||
div![
|
div![
|
||||||
div![C!["font-semibold", "text-sm"], from],
|
div![
|
||||||
|
C!["flex"],
|
||||||
|
div![C!["font-semibold", "text-sm", "flex-1"], from],
|
||||||
|
snooze_buttons(Some(post.timestamp), &id),
|
||||||
|
],
|
||||||
div![
|
div![
|
||||||
C!["flex", "gap-2", "pt-2", "text-sm"],
|
C!["flex", "gap-2", "pt-2", "text-sm"],
|
||||||
a![
|
a![
|
||||||
@@ -1691,3 +1699,47 @@ fn click_to_top() -> Node<Msg> {
|
|||||||
ev(Ev::Click, |_| Msg::ScrollToTop)
|
ev(Ev::Click, |_| Msg::ScrollToTop)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn snooze_buttons(timestamp: Option<i64>, id: &str) -> Node<Msg> {
|
||||||
|
div![
|
||||||
|
span![C!["px-2"], "⏰"],
|
||||||
|
button![
|
||||||
|
tw_classes::button(),
|
||||||
|
C!["rounded-r-none"],
|
||||||
|
"1d",
|
||||||
|
ev(Ev::Click, {
|
||||||
|
let id = id.to_string();
|
||||||
|
move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::Snooze(id, Utc::now() + chrono::Days::new(1))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
],
|
||||||
|
button![
|
||||||
|
tw_classes::button(),
|
||||||
|
C!["rounded-none"],
|
||||||
|
"7d",
|
||||||
|
ev(Ev::Click, {
|
||||||
|
let id = id.to_string();
|
||||||
|
move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::Snooze(id, Utc::now() + chrono::Days::new(7))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
],
|
||||||
|
timestamp.map(
|
||||||
|
|ts| chrono::DateTime::from_timestamp(ts, 0).map(|ts| button![
|
||||||
|
tw_classes::button(),
|
||||||
|
C!["rounded-l-none"],
|
||||||
|
"+6m",
|
||||||
|
ev(Ev::Click, {
|
||||||
|
let id = id.to_string();
|
||||||
|
move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::Snooze(id, ts + chrono::Days::new(180))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
])
|
||||||
|
),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user