Compare commits
1 Commits
master
...
slurp-toml
| Author | SHA1 | Date | |
|---|---|---|---|
| 86805f38e3 |
@ -2,8 +2,3 @@
|
|||||||
[build]
|
[build]
|
||||||
rustflags = [ "--cfg=web_sys_unstable_apis" ]
|
rustflags = [ "--cfg=web_sys_unstable_apis" ]
|
||||||
|
|
||||||
[registry]
|
|
||||||
global-credential-providers = ["cargo:token"]
|
|
||||||
|
|
||||||
[registries.xinu]
|
|
||||||
index = "sparse+https://git.z.xinu.tv/api/packages/wathiede/cargo/"
|
|
||||||
|
|||||||
@ -1,67 +0,0 @@
|
|||||||
on: [push]
|
|
||||||
|
|
||||||
name: Continuous integration
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
name: Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
- run: cargo check
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test Suite
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
- run: cargo test
|
|
||||||
|
|
||||||
trunk:
|
|
||||||
name: Trunk
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
target: wasm32-unknown-unknown
|
|
||||||
- run: cargo install trunk
|
|
||||||
- run: cd web; trunk build
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
name: Rustfmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
components: rustfmt
|
|
||||||
- name: Rustfmt Check
|
|
||||||
uses: actions-rust-lang/rustfmt@v1
|
|
||||||
|
|
||||||
build:
|
|
||||||
name: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
- run: cargo build
|
|
||||||
|
|
||||||
udeps:
|
|
||||||
name: Disallow unused dependencies
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
|
|
||||||
- name: Run cargo-udeps
|
|
||||||
uses: aig787/cargo-udeps-action@v1
|
|
||||||
with:
|
|
||||||
version: 'latest'
|
|
||||||
args: '--all-targets'
|
|
||||||
|
|
||||||
40
.github/copilot-instructions.md
vendored
40
.github/copilot-instructions.md
vendored
@ -1,40 +0,0 @@
|
|||||||
# Copilot/AI Agent Instructions for Letterbox
|
|
||||||
|
|
||||||
## Project Overview
|
|
||||||
- **Letterbox** is a Rust monorepo for a mail/newsreader system with a web frontend and a Rocket/GraphQL backend.
|
|
||||||
- Major crates: `server` (backend, Rocket+async-graphql), `web` (Seed-based WASM frontend), `notmuch` (mail integration), `shared` (common types), `procmail2notmuch` (migration/utility).
|
|
||||||
- Data flows: Email/news data is indexed and queried via the backend, exposed to the frontend via GraphQL. SQLx/Postgres is used for persistence. Notmuch and custom SQL are both used for mail storage/search.
|
|
||||||
|
|
||||||
## Key Workflows
|
|
||||||
- **Development**: Use `dev.sh` to launch a tmux session with live-reloading for both frontend (`trunk serve`) and backend (`cargo watch ... run`).
|
|
||||||
- **Build/Release**: Use `just patch|minor|major` for versioned releases (runs SQLx prepare, bumps versions, pushes). `Makefile`'s `release` target does similar steps.
|
|
||||||
- **Frontend**: In `web/`, use `cargo make serve` and `cargo make watch` for local dev. See `web/README.md` for Seed-specific details.
|
|
||||||
- **Backend**: In `server/`, run with `cargo run` or via the tmux/dev.sh workflow. SQL migrations are in `server/migrations/`.
|
|
||||||
|
|
||||||
## Project Conventions & Patterns
|
|
||||||
- **GraphQL**: All API boundaries are defined in `server/src/graphql.rs`. Use the `Query`, `Mutation`, and `Subscription` roots. Types are defined with `async-graphql` derive macros.
|
|
||||||
- **HTML Sanitization**: See `server/src/lib.rs` for custom HTML/CSS sanitization and transformation logic (e.g., `Transformer` trait, `sanitize_html`).
|
|
||||||
- **Tag/Query Parsing**: The `Query` struct in `server/src/lib.rs` parses user queries into filters for notmuch/newsreader/tantivy.
|
|
||||||
- **Shared Types**: Use the `shared` crate for types and helpers shared between frontend and backend.
|
|
||||||
- **Custom SQL**: Raw SQL queries are in `server/sql/`. Use these for complex queries not handled by SQLx macros.
|
|
||||||
- **Feature Flags**: The `tantivy` feature enables full-text search via Tantivy. Check for `#[cfg(feature = "tantivy")]` in backend code.
|
|
||||||
|
|
||||||
## Integration Points
|
|
||||||
- **Notmuch**: Integrated via the `notmuch` crate for mail indexing/search.
|
|
||||||
- **Postgres**: Used for newsreader and other persistent data (see `server/migrations/`).
|
|
||||||
- **GraphQL**: All client-server communication is via GraphQL endpoints defined in the backend.
|
|
||||||
- **Seed/Trunk**: Frontend is built with Seed (Rust/WASM) and served via Trunk.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
- To add a new GraphQL query, update `server/src/graphql.rs` and expose it in the `QueryRoot`.
|
|
||||||
- To add a new frontend page, add a module in `web/src/` and register it in the Seed app's router.
|
|
||||||
- To run the full dev environment: `./dev.sh` (requires tmux, trunk, cargo-watch, etc.).
|
|
||||||
|
|
||||||
## References
|
|
||||||
- See `web/README.md` for frontend/Seed workflow details.
|
|
||||||
- See `Justfile` and `Makefile` for release/versioning automation.
|
|
||||||
- See `server/src/lib.rs` and `server/src/graphql.rs` for backend architecture and conventions.
|
|
||||||
- See `server/sql/` for custom SQL queries.
|
|
||||||
|
|
||||||
---
|
|
||||||
If any conventions or workflows are unclear, please ask for clarification or check the referenced files for examples.
|
|
||||||
4983
Cargo.lock
generated
4983
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
23
Cargo.toml
23
Cargo.toml
@ -1,18 +1,15 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
default-members = ["server"]
|
default-members = [
|
||||||
members = ["web", "server", "notmuch", "procmail2notmuch", "shared"]
|
"server"
|
||||||
|
]
|
||||||
[workspace.package]
|
members = [
|
||||||
authors = ["Bill Thiede <git@xinu.tv>"]
|
"web",
|
||||||
edition = "2021"
|
"server",
|
||||||
license = "UNLICENSED"
|
"notmuch",
|
||||||
publish = ["xinu"]
|
"procmail2notmuch",
|
||||||
version = "0.17.55"
|
"shared"
|
||||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
]
|
||||||
|
|
||||||
[profile.dev]
|
|
||||||
opt-level = 1
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
lto = true
|
lto = true
|
||||||
|
|||||||
19
Justfile
19
Justfile
@ -1,19 +0,0 @@
|
|||||||
export CARGO_INCREMENTAL := "0"
|
|
||||||
export RUSTFLAGS := "-D warnings"
|
|
||||||
|
|
||||||
default:
|
|
||||||
@echo "Run: just patch|minor|major"
|
|
||||||
|
|
||||||
major: (_release "major")
|
|
||||||
minor: (_release "minor")
|
|
||||||
patch: (_release "patch")
|
|
||||||
|
|
||||||
sqlx-prepare:
|
|
||||||
cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true
|
|
||||||
|
|
||||||
pull:
|
|
||||||
git pull
|
|
||||||
|
|
||||||
|
|
||||||
_release level: pull sqlx-prepare
|
|
||||||
cargo-release release -x {{ level }} --workspace --no-confirm --registry=xinu
|
|
||||||
1
Makefile
1
Makefile
@ -1,6 +1,5 @@
|
|||||||
.PHONEY: release
|
.PHONEY: release
|
||||||
release:
|
release:
|
||||||
(cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true)
|
|
||||||
bash scripts/update-crate-version.sh
|
bash scripts/update-crate-version.sh
|
||||||
git push
|
git push
|
||||||
|
|
||||||
|
|||||||
2
dev.sh
2
dev.sh
@ -3,5 +3,5 @@ tmux new-session -d -s letterbox-dev
|
|||||||
tmux rename-window web
|
tmux rename-window web
|
||||||
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||||
tmux new-window -n server
|
tmux new-window -n server
|
||||||
tmux send-keys "cd server; cargo watch -c -w ../.git -w ../shared -w ../notmuch -w ./ -x 'run postgres://newsreader@nixos-07.h.xinu.tv/newsreader ../target/database/newsreader /tmp/letterbox/slurp'" C-m
|
tmux send-keys "cd server; cargo watch -c -x run -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||||
tmux attach -d -t letterbox-dev
|
tmux attach -d -t letterbox-dev
|
||||||
|
|||||||
@ -1,24 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "letterbox-notmuch"
|
name = "notmuch"
|
||||||
exclude = ["/testdata"]
|
version = "0.0.29"
|
||||||
description = "Wrapper for calling notmuch cli"
|
edition = "2021"
|
||||||
authors.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
publish.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
version.workspace = true
|
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.27"
|
log = "0.4.14"
|
||||||
mailparse = "0.16.1"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||||
thiserror = "2.0.12"
|
thiserror = "1.0.30"
|
||||||
tracing = "0.1.41"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
itertools = "0.14.0"
|
itertools = "0.10.1"
|
||||||
pretty_assertions = "1"
|
pretty_assertions = "1"
|
||||||
rayon = "1.10"
|
rayon = "1.5"
|
||||||
|
|||||||
@ -207,15 +207,14 @@
|
|||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
io::{self},
|
io::{self},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use log::info;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::{error, info, instrument, warn};
|
|
||||||
|
|
||||||
/// # Number of seconds since the Epoch
|
/// # Number of seconds since the Epoch
|
||||||
pub type UnixTime = isize;
|
pub type UnixTime = isize;
|
||||||
@ -270,12 +269,6 @@ pub struct Headers {
|
|||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub bcc: Option<String>,
|
pub bcc: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[serde(alias = "Delivered-To")]
|
|
||||||
pub delivered_to: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
#[serde(alias = "X-Original-To")]
|
|
||||||
pub x_original_to: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub reply_to: Option<String>,
|
pub reply_to: Option<String>,
|
||||||
pub date: String,
|
pub date: String,
|
||||||
}
|
}
|
||||||
@ -465,17 +458,13 @@ pub enum NotmuchError {
|
|||||||
StringUtf8Error(#[from] std::string::FromUtf8Error),
|
StringUtf8Error(#[from] std::string::FromUtf8Error),
|
||||||
#[error("failed to parse str as int")]
|
#[error("failed to parse str as int")]
|
||||||
ParseIntError(#[from] std::num::ParseIntError),
|
ParseIntError(#[from] std::num::ParseIntError),
|
||||||
#[error("failed to parse mail: {0}")]
|
|
||||||
MailParseError(#[from] mailparse::MailParseError),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Default)]
|
||||||
pub struct Notmuch {
|
pub struct Notmuch {
|
||||||
config_path: Option<PathBuf>,
|
config_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: rewrite to use tokio::process::Command and make everything async to see if that helps with
|
|
||||||
// concurrency being more parallel.
|
|
||||||
impl Notmuch {
|
impl Notmuch {
|
||||||
pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch {
|
pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch {
|
||||||
Notmuch {
|
Notmuch {
|
||||||
@ -483,7 +472,6 @@ impl Notmuch {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all)]
|
|
||||||
pub fn new(&self) -> Result<Vec<u8>, NotmuchError> {
|
pub fn new(&self) -> Result<Vec<u8>, NotmuchError> {
|
||||||
self.run_notmuch(["new"])
|
self.run_notmuch(["new"])
|
||||||
}
|
}
|
||||||
@ -492,7 +480,6 @@ impl Notmuch {
|
|||||||
self.run_notmuch(std::iter::empty::<&str>())
|
self.run_notmuch(std::iter::empty::<&str>())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub fn tags_for_query(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
pub fn tags_for_query(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
let res = self.run_notmuch(["search", "--format=json", "--output=tags", query])?;
|
let res = self.run_notmuch(["search", "--format=json", "--output=tags", query])?;
|
||||||
Ok(serde_json::from_slice(&res)?)
|
Ok(serde_json::from_slice(&res)?)
|
||||||
@ -503,31 +490,15 @@ impl Notmuch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||||
self.tags_add(tag, &[search_term])
|
self.run_notmuch(["tag", &format!("+{tag}"), search_term])?;
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
|
||||||
pub fn tags_add(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
|
||||||
let tag = format!("+{tag}");
|
|
||||||
let mut args = vec!["tag", &tag];
|
|
||||||
args.extend(search_term);
|
|
||||||
self.run_notmuch(&args)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||||
self.tags_remove(tag, &[search_term])
|
self.run_notmuch(["tag", &format!("-{tag}"), search_term])?;
|
||||||
}
|
|
||||||
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
|
||||||
pub fn tags_remove(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
|
||||||
let tag = format!("-{tag}");
|
|
||||||
let mut args = vec!["tag", &tag];
|
|
||||||
args.extend(search_term);
|
|
||||||
self.run_notmuch(&args)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query,offset=offset,limit=limit))]
|
|
||||||
pub fn search(
|
pub fn search(
|
||||||
&self,
|
&self,
|
||||||
query: &str,
|
query: &str,
|
||||||
@ -536,35 +507,24 @@ impl Notmuch {
|
|||||||
) -> Result<SearchSummary, NotmuchError> {
|
) -> Result<SearchSummary, NotmuchError> {
|
||||||
let query = if query.is_empty() { "*" } else { query };
|
let query = if query.is_empty() { "*" } else { query };
|
||||||
|
|
||||||
let res = self
|
let res = self.run_notmuch([
|
||||||
.run_notmuch([
|
"search",
|
||||||
"search",
|
"--format=json",
|
||||||
"--format=json",
|
&format!("--offset={offset}"),
|
||||||
&format!("--offset={offset}"),
|
&format!("--limit={limit}"),
|
||||||
&format!("--limit={limit}"),
|
query,
|
||||||
query,
|
])?;
|
||||||
])
|
Ok(serde_json::from_slice(&res)?)
|
||||||
.inspect_err(|err| error!("failed to notmuch search for query '{query}': {err}"))?;
|
|
||||||
Ok(serde_json::from_slice(&res).unwrap_or_else(|err| {
|
|
||||||
error!("failed to decode search result for query '{query}': {err}");
|
|
||||||
SearchSummary(Vec::new())
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
||||||
// NOTE: --output=threads is technically more correct, but really slow
|
// TODO: compare speed of notmuch count for * w/ and w/o --output=threads
|
||||||
// TODO: find a fast thread count path
|
let res = self.run_notmuch(["count", "--output=threads", query])?;
|
||||||
// let res = self.run_notmuch(["count", "--output=threads", query])?;
|
|
||||||
let res = self.run_notmuch(["count", query])?;
|
|
||||||
// Strip '\n' from res.
|
// Strip '\n' from res.
|
||||||
let s = std::str::from_utf8(&res)?.trim();
|
let s = std::str::from_utf8(&res[..res.len() - 1])?;
|
||||||
Ok(s.parse()
|
Ok(s.parse()?)
|
||||||
.inspect_err(|err| error!("failed to parse count for query '{query}': {err}"))
|
|
||||||
.unwrap_or(0))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> {
|
pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> {
|
||||||
let slice = self.run_notmuch([
|
let slice = self.run_notmuch([
|
||||||
"show",
|
"show",
|
||||||
@ -583,7 +543,6 @@ impl Notmuch {
|
|||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query,part=part))]
|
|
||||||
pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> {
|
pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> {
|
||||||
let slice = self.run_notmuch([
|
let slice = self.run_notmuch([
|
||||||
"show",
|
"show",
|
||||||
@ -603,108 +562,25 @@ impl Notmuch {
|
|||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(id=id))]
|
|
||||||
pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> {
|
pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> {
|
||||||
self.show_original_part(id, 0)
|
self.show_original_part(id, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(id=id,part=part))]
|
|
||||||
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
||||||
let id = if id.starts_with("id:") {
|
|
||||||
id
|
|
||||||
} else {
|
|
||||||
&format!("id:{id}")
|
|
||||||
};
|
|
||||||
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
||||||
Ok(serde_json::from_slice(&res)?)
|
Ok(serde_json::from_slice(&res)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
||||||
Ok(serde_json::from_slice(&res)?)
|
Ok(serde_json::from_slice(&res)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all)]
|
|
||||||
pub fn unread_recipients(&self) -> Result<HashMap<String, usize>, NotmuchError> {
|
|
||||||
let slice = self.run_notmuch([
|
|
||||||
"show",
|
|
||||||
"--include-html=false",
|
|
||||||
"--entire-thread=false",
|
|
||||||
"--body=false",
|
|
||||||
"--format=json",
|
|
||||||
// Arbitrary limit to prevent too much work
|
|
||||||
"--limit=1000",
|
|
||||||
"is:unread",
|
|
||||||
])?;
|
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
|
||||||
// use that for parsing in rust.
|
|
||||||
let s = String::from_utf8_lossy(&slice);
|
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
|
||||||
deserializer.disable_recursion_limit();
|
|
||||||
let ts: ThreadSet = serde::de::Deserialize::deserialize(&mut deserializer)?;
|
|
||||||
deserializer.end()?;
|
|
||||||
let mut r = HashMap::new();
|
|
||||||
fn collect_from_thread_node(
|
|
||||||
r: &mut HashMap<String, usize>,
|
|
||||||
tn: &ThreadNode,
|
|
||||||
) -> Result<(), NotmuchError> {
|
|
||||||
let Some(msg) = &tn.0 else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
let mut addrs = vec![];
|
|
||||||
let hdr = &msg.headers.to;
|
|
||||||
if let Some(to) = hdr {
|
|
||||||
addrs.push(to);
|
|
||||||
} else {
|
|
||||||
let hdr = &msg.headers.x_original_to;
|
|
||||||
if let Some(to) = hdr {
|
|
||||||
addrs.push(to);
|
|
||||||
} else {
|
|
||||||
let hdr = &msg.headers.delivered_to;
|
|
||||||
if let Some(to) = hdr {
|
|
||||||
addrs.push(to);
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
let hdr = &msg.headers.cc;
|
|
||||||
if let Some(cc) = hdr {
|
|
||||||
addrs.push(cc);
|
|
||||||
};
|
|
||||||
for recipient in addrs {
|
|
||||||
mailparse::addrparse(&recipient)?
|
|
||||||
.into_inner()
|
|
||||||
.iter()
|
|
||||||
.for_each(|a| {
|
|
||||||
let mailparse::MailAddr::Single(si) = a else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
let addr = &si.addr;
|
|
||||||
|
|
||||||
if addr == "couchmoney@gmail.com" || addr.ends_with("@xinu.tv") {
|
|
||||||
*r.entry(addr.to_lowercase()).or_default() += 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
for t in ts.0 {
|
|
||||||
for tn in t.0 {
|
|
||||||
collect_from_thread_node(&mut r, &tn)?;
|
|
||||||
for sub_tn in tn.1 {
|
|
||||||
collect_from_thread_node(&mut r, &sub_tn)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = S>,
|
I: IntoIterator<Item = S>,
|
||||||
@ -717,13 +593,6 @@ impl Notmuch {
|
|||||||
cmd.args(args);
|
cmd.args(args);
|
||||||
info!("{:?}", &cmd);
|
info!("{:?}", &cmd);
|
||||||
let out = cmd.output()?;
|
let out = cmd.output()?;
|
||||||
if !out.stderr.is_empty() {
|
|
||||||
warn!(
|
|
||||||
"{:?}: STDERR:\n{}",
|
|
||||||
&cmd,
|
|
||||||
String::from_utf8_lossy(&out.stderr)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Ok(out.stdout)
|
Ok(out.stdout)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use std::{
|
|||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use letterbox_notmuch::Notmuch;
|
use notmuch::Notmuch;
|
||||||
use rayon::iter::{ParallelBridge, ParallelIterator};
|
use rayon::iter::{ParallelBridge, ParallelIterator};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@ -1,20 +1,9 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "letterbox-procmail2notmuch"
|
name = "procmail2notmuch"
|
||||||
description = "Tool for generating notmuch rules from procmail"
|
version = "0.0.29"
|
||||||
authors.workspace = true
|
edition = "2021"
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
publish.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
version.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.69"
|
||||||
clap = { version = "4.5.37", features = ["derive", "env"] }
|
|
||||||
letterbox-notmuch = { version = "0.17", registry = "xinu" }
|
|
||||||
letterbox-shared = { version = "0.17", registry = "xinu" }
|
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
|
||||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
|
||||||
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
|
||||||
|
|||||||
@ -1,36 +1,210 @@
|
|||||||
use std::{collections::HashMap, io::Write};
|
use std::{convert::Infallible, io::Write, str::FromStr};
|
||||||
|
|
||||||
use clap::{Parser, Subcommand};
|
#[derive(Debug, Default)]
|
||||||
use letterbox_shared::{cleanup_match, Match, MatchType, Rule};
|
enum MatchType {
|
||||||
use sqlx::{types::Json, PgPool};
|
From,
|
||||||
|
Sender,
|
||||||
#[derive(Debug, Subcommand)]
|
To,
|
||||||
enum Mode {
|
Cc,
|
||||||
Debug,
|
Subject,
|
||||||
Notmuchrc,
|
List,
|
||||||
LoadSql {
|
DeliveredTo,
|
||||||
#[arg(short, long)]
|
XForwardedTo,
|
||||||
dsn: String,
|
ReplyTo,
|
||||||
},
|
XOriginalTo,
|
||||||
|
XSpam,
|
||||||
|
Body,
|
||||||
|
#[default]
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct Match {
|
||||||
|
match_type: MatchType,
|
||||||
|
needle: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Simple program to greet a person
|
#[derive(Debug, Default)]
|
||||||
#[derive(Parser, Debug)]
|
struct Rule {
|
||||||
#[command(version, about, long_about = None)]
|
matches: Vec<Match>,
|
||||||
struct Args {
|
tags: Vec<String>,
|
||||||
#[arg(short, long, default_value = "/home/wathiede/dotfiles/procmailrc")]
|
|
||||||
input: String,
|
|
||||||
|
|
||||||
#[command(subcommand)]
|
|
||||||
mode: Mode,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
fn unescape(s: &str) -> String {
|
||||||
async fn main() -> anyhow::Result<()> {
|
s.replace('\\', "")
|
||||||
let args = Args::parse();
|
}
|
||||||
|
|
||||||
|
fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||||
|
unescape(&s[prefix.len()..]).replace(".*", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
mod matches {
|
||||||
|
pub const TO: &'static str = "TO";
|
||||||
|
pub const CC: &'static str = "Cc";
|
||||||
|
pub const TOCC: &'static str = "(TO|Cc)";
|
||||||
|
pub const FROM: &'static str = "From";
|
||||||
|
pub const SENDER: &'static str = "Sender";
|
||||||
|
pub const SUBJECT: &'static str = "Subject";
|
||||||
|
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
||||||
|
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
||||||
|
pub const REPLY_TO: &'static str = "Reply-To";
|
||||||
|
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
||||||
|
pub const LIST_ID: &'static str = "List-ID";
|
||||||
|
pub const X_SPAM: &'static str = "X-Spam";
|
||||||
|
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Match {
|
||||||
|
type Err = Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
// Examples:
|
||||||
|
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
||||||
|
// "* ^TOsonyrewards.com@xinu.tv"
|
||||||
|
let mut it = s.split_whitespace().skip(1);
|
||||||
|
let mut needle = it.next().unwrap();
|
||||||
|
if needle == "1^0" {
|
||||||
|
needle = it.next().unwrap();
|
||||||
|
}
|
||||||
|
let mut needle = vec![needle];
|
||||||
|
needle.extend(it);
|
||||||
|
let needle = needle.join(" ");
|
||||||
|
let first = needle.chars().nth(0).unwrap_or(' ');
|
||||||
|
use matches::*;
|
||||||
|
if first == '^' {
|
||||||
|
let needle = &needle[1..];
|
||||||
|
if needle.starts_with(TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(FROM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::From,
|
||||||
|
needle: cleanup_match(FROM, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(CC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Cc,
|
||||||
|
needle: cleanup_match(CC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(TOCC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TOCC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SENDER) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Sender,
|
||||||
|
needle: cleanup_match(SENDER, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SUBJECT) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Subject,
|
||||||
|
needle: cleanup_match(SUBJECT, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_ORIGINAL_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XOriginalTo,
|
||||||
|
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(LIST_ID) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::List,
|
||||||
|
needle: cleanup_match(LIST_ID, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(REPLY_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::ReplyTo,
|
||||||
|
needle: cleanup_match(REPLY_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM_FLAG) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(DELIVERED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::DeliveredTo,
|
||||||
|
needle: cleanup_match(DELIVERED_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_FORWARDED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XForwardedTo,
|
||||||
|
needle: cleanup_match(X_FORWARDED_TO, needle),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
unreachable!("needle: '{needle}'")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Body,
|
||||||
|
needle: cleanup_match("", &needle),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
||||||
|
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
||||||
|
// faster.
|
||||||
|
let mut lines = Vec::new();
|
||||||
|
for r in rules {
|
||||||
|
for m in &r.matches {
|
||||||
|
for t in &r.tags {
|
||||||
|
if let MatchType::Unknown = m.match_type {
|
||||||
|
eprintln!("rule has unknown match {:?}", r);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rule = match m.match_type {
|
||||||
|
MatchType::From => "from:",
|
||||||
|
// TODO(wathiede): something more specific?
|
||||||
|
MatchType::Sender => "from:",
|
||||||
|
MatchType::To => "to:",
|
||||||
|
MatchType::Cc => "to:",
|
||||||
|
MatchType::Subject => "subject:",
|
||||||
|
MatchType::List => "List-ID:",
|
||||||
|
MatchType::Body => "",
|
||||||
|
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||||
|
// index. Handle them later.
|
||||||
|
MatchType::DeliveredTo
|
||||||
|
| MatchType::XForwardedTo
|
||||||
|
| MatchType::ReplyTo
|
||||||
|
| MatchType::XOriginalTo
|
||||||
|
| MatchType::XSpam => continue,
|
||||||
|
MatchType::Unknown => unreachable!(),
|
||||||
|
};
|
||||||
|
// Preserve unread status if run with --remove-all
|
||||||
|
lines.push(format!(
|
||||||
|
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
||||||
|
t, rule, m.needle
|
||||||
|
));
|
||||||
|
lines.push(format!(
|
||||||
|
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
||||||
|
// `tag:unprocessed` to all new mail.
|
||||||
|
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
||||||
|
t, rule, m.needle
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines.sort();
|
||||||
|
for l in lines {
|
||||||
|
writeln!(w, "{l}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
let input = "/home/wathiede/dotfiles/procmailrc";
|
||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
let mut cur_rule = Rule::default();
|
let mut cur_rule = Rule::default();
|
||||||
for l in std::fs::read_to_string(args.input)?.lines() {
|
for l in std::fs::read_to_string(input)?.lines() {
|
||||||
let l = if let Some(idx) = l.find('#') {
|
let l = if let Some(idx) = l.find('#') {
|
||||||
&l[..idx]
|
&l[..idx]
|
||||||
} else {
|
} else {
|
||||||
@ -48,9 +222,6 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
match first {
|
match first {
|
||||||
':' => {
|
':' => {
|
||||||
// start of rule
|
// start of rule
|
||||||
|
|
||||||
// If carbon-copy flag present, don't stop on match
|
|
||||||
cur_rule.stop_on_match = !l.contains('c');
|
|
||||||
}
|
}
|
||||||
'*' => {
|
'*' => {
|
||||||
// add to current rule
|
// add to current rule
|
||||||
@ -59,119 +230,26 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
'.' => {
|
'.' => {
|
||||||
// delivery to folder
|
// delivery to folder
|
||||||
cur_rule.tag = cleanup_match(
|
cur_rule.tags.push(cleanup_match(
|
||||||
"",
|
"",
|
||||||
&l.replace('.', "/")
|
&l.replace('.', "/")
|
||||||
.replace(' ', "")
|
.replace(' ', "")
|
||||||
.trim_matches('/')
|
.trim_matches('/')
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
));
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
}
|
}
|
||||||
'/' => cur_rule = Rule::default(), // Ex. /dev/null
|
|
||||||
'|' => cur_rule = Rule::default(), // external command
|
'|' => cur_rule = Rule::default(), // external command
|
||||||
'$' => {
|
'$' => {
|
||||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||||
cur_rule.tag = cleanup_match("", "inbox");
|
cur_rule.tags.push(cleanup_match("", "inbox"));
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
} // variable, should only be $DEFAULT in my config
|
} // variable, should only be $DEFAULT in my config
|
||||||
_ => panic!("Unhandled first character '{}'\nLine: {}", first, l),
|
_ => panic!("Unhandled first character '{}' {}", first, l),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match args.mode {
|
notmuch_from_rules(std::io::stdout(), &rules)?;
|
||||||
Mode::Debug => print_rules(&rules),
|
|
||||||
Mode::Notmuchrc => notmuch_from_rules(std::io::stdout(), &rules)?,
|
|
||||||
Mode::LoadSql { dsn } => load_sql(&dsn, &rules).await?,
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_rules(rules: &[Rule]) {
|
|
||||||
let mut tally = HashMap::new();
|
|
||||||
for r in rules {
|
|
||||||
for m in &r.matches {
|
|
||||||
*tally.entry(m.match_type).or_insert(0) += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut sorted: Vec<_> = tally.iter().map(|(k, v)| (v, k)).collect();
|
|
||||||
sorted.sort();
|
|
||||||
sorted.reverse();
|
|
||||||
for (v, k) in sorted {
|
|
||||||
println!("{k:?}: {v}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
|
||||||
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
|
||||||
// faster.
|
|
||||||
let mut lines = Vec::new();
|
|
||||||
for r in rules {
|
|
||||||
for m in &r.matches {
|
|
||||||
let t = &r.tag;
|
|
||||||
if let MatchType::Unknown = m.match_type {
|
|
||||||
eprintln!("rule has unknown match {:?}", r);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let rule = match m.match_type {
|
|
||||||
MatchType::From => "from:",
|
|
||||||
// TODO(wathiede): something more specific?
|
|
||||||
MatchType::Sender => "from:",
|
|
||||||
MatchType::To => "to:",
|
|
||||||
MatchType::Cc => "to:",
|
|
||||||
MatchType::Subject => "subject:",
|
|
||||||
MatchType::ListId => "List-ID:",
|
|
||||||
MatchType::Body => "",
|
|
||||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
|
||||||
// index. Handle them later.
|
|
||||||
MatchType::DeliveredTo
|
|
||||||
| MatchType::XForwardedTo
|
|
||||||
| MatchType::ReplyTo
|
|
||||||
| MatchType::XOriginalTo
|
|
||||||
| MatchType::XSpam => continue,
|
|
||||||
MatchType::Unknown => unreachable!(),
|
|
||||||
};
|
|
||||||
// Preserve unread status if run with --remove-all
|
|
||||||
lines.push(format!(
|
|
||||||
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
|
||||||
t, rule, m.needle
|
|
||||||
));
|
|
||||||
lines.push(format!(
|
|
||||||
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
|
||||||
// `tag:unprocessed` to all new mail.
|
|
||||||
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
|
||||||
t, rule, m.needle
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lines.sort();
|
|
||||||
for l in lines {
|
|
||||||
writeln!(w, "{l}")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn load_sql(dsn: &str, rules: &[Rule]) -> anyhow::Result<()> {
|
|
||||||
let pool = PgPool::connect(dsn).await?;
|
|
||||||
println!("clearing email_rule table");
|
|
||||||
sqlx::query!("DELETE FROM email_rule")
|
|
||||||
.execute(&pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
for (order, rule) in rules.iter().enumerate() {
|
|
||||||
println!("inserting {order}: {rule:?}");
|
|
||||||
sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO email_rule (sort_order, rule)
|
|
||||||
VALUES ($1, $2)
|
|
||||||
"#,
|
|
||||||
order as i32,
|
|
||||||
Json(rule) as _
|
|
||||||
)
|
|
||||||
.execute(&pool)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
"extends": [
|
|
||||||
"config:recommended"
|
|
||||||
]
|
|
||||||
,
|
|
||||||
"packageRules": [
|
|
||||||
{
|
|
||||||
"matchPackageNames": ["wasm-bindgen"],
|
|
||||||
"enabled": false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"db_name": "PostgreSQL",
|
"db_name": "PostgreSQL",
|
||||||
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n clean_summary,\n title,\n name,\n homepage\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug\nWHERE\n uid = $1\n",
|
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n title,\n name,\n homepage\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n uid = $1\n",
|
||||||
"describe": {
|
"describe": {
|
||||||
"columns": [
|
"columns": [
|
||||||
{
|
{
|
||||||
@ -30,21 +30,16 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 5,
|
"ordinal": 5,
|
||||||
"name": "clean_summary",
|
|
||||||
"type_info": "Text"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 6,
|
|
||||||
"name": "title",
|
"name": "title",
|
||||||
"type_info": "Text"
|
"type_info": "Text"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 7,
|
"ordinal": 6,
|
||||||
"name": "name",
|
"name": "name",
|
||||||
"type_info": "Text"
|
"type_info": "Text"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 8,
|
"ordinal": 7,
|
||||||
"name": "homepage",
|
"name": "homepage",
|
||||||
"type_info": "Text"
|
"type_info": "Text"
|
||||||
}
|
}
|
||||||
@ -57,7 +52,6 @@
|
|||||||
"nullable": [
|
"nullable": [
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
false,
|
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
@ -66,5 +60,5 @@
|
|||||||
true
|
true
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"hash": "383221a94bc3746322ba78e41cde37994440ee67dc32e88d2394c51211bde6cd"
|
"hash": "113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb"
|
||||||
}
|
}
|
||||||
62
server/.sqlx/query-1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7.json
generated
Normal file
62
server/.sqlx/query-1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7.json
generated
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n title,\n summary,\n link,\n date,\n is_read,\n uid,\n id\nFROM post\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "link",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "uid",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7"
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"db_name": "PostgreSQL",
|
"db_name": "PostgreSQL",
|
||||||
"query": "SELECT\n site,\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $5 :: text IS NULL\n OR to_tsvector('english', search_summary) @@ websearch_to_tsquery('english', $5)\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
|
"query": "SELECT\n site,\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
|
||||||
"describe": {
|
"describe": {
|
||||||
"columns": [
|
"columns": [
|
||||||
{
|
{
|
||||||
@ -39,8 +39,7 @@
|
|||||||
"Text",
|
"Text",
|
||||||
"Bool",
|
"Bool",
|
||||||
"Int8",
|
"Int8",
|
||||||
"Int8",
|
"Int8"
|
||||||
"Text"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"nullable": [
|
"nullable": [
|
||||||
@ -52,5 +51,5 @@
|
|||||||
true
|
true
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"hash": "fc4607f02cc76a5f3a6629cce4507c74f52ae44820897b47365da3f339d1da06"
|
"hash": "2c1954b6db3cbcabf9b878cd1c8ea01c607f46dc43a85b58e19217e7633cf337"
|
||||||
}
|
}
|
||||||
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "DELETE FROM snooze WHERE id = $1",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int4"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "77f79f981a9736d18ffd4b87d3aec34d6a048162154a3aba833370c58a860795"
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "SELECT COUNT(*) AS count\nFROM\n post\nWHERE\n (\n $1::text IS NULL\n OR site = $1\n )\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $3::text IS NULL\n OR TO_TSVECTOR('english', search_summary)\n @@ WEBSEARCH_TO_TSQUERY('english', $3)\n )\n",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "count",
|
|
||||||
"type_info": "Int8"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text",
|
|
||||||
"Bool",
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
null
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "8c1b3c78649135e98b89092237750088433f7ff1b7c2ddeedec553406ea9f203"
|
|
||||||
}
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\nSELECT id, message_id\nFROM snooze\nWHERE wake < NOW();\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "message_id",
|
|
||||||
"type_info": "Text"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "c8383663124a5cc5912b54553f18f7064d33087ebfdf3c0c1c43cbe6d3577084"
|
|
||||||
}
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 1000;\n",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "link",
|
|
||||||
"type_info": "Text"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "clean_summary",
|
|
||||||
"type_info": "Text"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "cf369e3d5547f400cb54004dd03783ef6998a000aec91c50a79405dcf1c53b17"
|
|
||||||
}
|
|
||||||
23
server/.sqlx/query-e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419.json
generated
Normal file
23
server/.sqlx/query-e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419.json
generated
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n COUNT(*) count\nFROM\n post\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "count",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Bool"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419"
|
||||||
|
}
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "UPDATE post SET search_summary = $1 WHERE id = $2",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text",
|
|
||||||
"Int4"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "ef8327f039dbfa8f4e59b7a77a6411252a346bf51cf940024a17d9fbb2df173c"
|
|
||||||
}
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n INSERT INTO snooze (message_id, wake)\n VALUES ($1, $2)\n ON CONFLICT (message_id) DO UPDATE\n SET wake = $2\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text",
|
|
||||||
"Timestamptz"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "effd0d0d91e6ad84546f7177f1fd39d4fad736b471eb5e55fd5ac74f7adff664"
|
|
||||||
}
|
|
||||||
@ -1,67 +1,42 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "letterbox-server"
|
name = "server"
|
||||||
default-run = "letterbox-server"
|
version = "0.0.29"
|
||||||
description = "Backend for letterbox"
|
edition = "2021"
|
||||||
authors.workspace = true
|
default-run = "server"
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
publish.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
version.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono-tz = "0.10"
|
ammonia = "3.3.0"
|
||||||
html2text = "0.16"
|
anyhow = "1.0.79"
|
||||||
ammonia = "4.1.0"
|
async-graphql = { version = "6.0.11", features = ["log"] }
|
||||||
anyhow = "1.0.98"
|
async-graphql-rocket = "6.0.11"
|
||||||
askama = { version = "0.14.0", features = ["derive"] }
|
async-trait = "0.1.81"
|
||||||
async-graphql = { version = "7", features = ["log", "chrono"] }
|
build-info = "0.0.38"
|
||||||
async-graphql-axum = "7.0.16"
|
cacher = {git = "http://git-private.h.xinu.tv/wathiede/cacher.git"}
|
||||||
async-trait = "0.1.88"
|
css-inline = "0.13.0"
|
||||||
axum = { version = "0.8.3", features = ["ws"] }
|
glog = "0.1.0"
|
||||||
axum-macros = "0.5.0"
|
|
||||||
build-info = "0.0.42"
|
|
||||||
cacher = { version = "0.2.0", registry = "xinu" }
|
|
||||||
chrono = "0.4.40"
|
|
||||||
clap = { version = "4.5.37", features = ["derive"] }
|
|
||||||
css-inline = "0.18.0"
|
|
||||||
flate2 = "1.1.2"
|
|
||||||
futures = "0.3.31"
|
|
||||||
headers = "0.4.0"
|
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
ical = "0.11"
|
|
||||||
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" }
|
|
||||||
letterbox-shared = { path = "../shared", version = "0.17", registry = "xinu" }
|
|
||||||
linkify = "0.10.0"
|
linkify = "0.10.0"
|
||||||
lol_html = "2.3.0"
|
log = "0.4.17"
|
||||||
mailparse = "0.16.1"
|
lol_html = "1.2.0"
|
||||||
|
mailparse = "0.15.0"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
memmap = "0.7.0"
|
memmap = "0.7.0"
|
||||||
quick-xml = { version = "0.38.1", features = ["serialize"] }
|
notmuch = { path = "../notmuch" }
|
||||||
regex = "1.11.1"
|
reqwest = { version = "0.12.7", features = ["blocking"] }
|
||||||
reqwest = { version = "0.12.15", features = ["blocking"] }
|
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
|
||||||
scraper = "0.25.0"
|
rocket_cors = "0.6.0"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
scraper = "0.20.0"
|
||||||
serde_json = "1.0.140"
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "chrono"] }
|
serde_json = "1.0.87"
|
||||||
tantivy = { version = "0.25.0", optional = true }
|
shared = { path = "../shared" }
|
||||||
thiserror = "2.0.12"
|
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
|
||||||
tokio = "1.44.2"
|
tantivy = "0.22.0"
|
||||||
tower-http = { version = "0.6.2", features = ["trace"] }
|
thiserror = "1.0.37"
|
||||||
tracing = "0.1.41"
|
tokio = "1.26.0"
|
||||||
url = "2.5.4"
|
url = "2.5.2"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
|
||||||
#xtracing = { path = "../../xtracing" }
|
|
||||||
xtracing = { version = "0.3.2", registry = "xinu" }
|
|
||||||
zip = "6.0.0"
|
|
||||||
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build-info-build = "0.0.42"
|
build-info-build = "0.0.38"
|
||||||
|
|
||||||
[features]
|
|
||||||
#default = [ "tantivy" ]
|
|
||||||
tantivy = ["dep:tantivy"]
|
|
||||||
|
|||||||
@ -5,9 +5,50 @@ newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
|||||||
newsreader_tantivy_db_path = "../target/database/newsreader"
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
|
|
||||||
[debug]
|
[debug]
|
||||||
|
address = "0.0.0.0"
|
||||||
port = 9345
|
port = 9345
|
||||||
# Uncomment to make it production like.
|
# Uncomment to make it production like.
|
||||||
#log_level = "critical"
|
#log_level = "critical"
|
||||||
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||||
newsreader_tantivy_db_path = "../target/database/newsreader"
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
slurp_cache_path = "/tmp/letterbox/slurp"
|
slurp_cache_path = "/net/nasx/x/letterbox/slurp"
|
||||||
|
|
||||||
|
[debug.slurp_site_selectors]
|
||||||
|
"atmeta.com" = [
|
||||||
|
"div.entry-content"
|
||||||
|
]
|
||||||
|
"blog.prusa3d.com" = [
|
||||||
|
"article.content .post-block"
|
||||||
|
]
|
||||||
|
"blog.cloudflare.com" = [
|
||||||
|
".author-lists .author-name-tooltip",
|
||||||
|
".post-full-content"
|
||||||
|
]
|
||||||
|
"blog.zsa.io" = [
|
||||||
|
"section.blog-article"
|
||||||
|
]
|
||||||
|
"engineering.fb.com" = [
|
||||||
|
"article"
|
||||||
|
]
|
||||||
|
"hackaday.com" = [
|
||||||
|
"div.entry-featured-image",
|
||||||
|
"div.entry-content"
|
||||||
|
]
|
||||||
|
"mitchellh.com" = [
|
||||||
|
"div.w-full"
|
||||||
|
]
|
||||||
|
"natwelch.com" = [
|
||||||
|
"article div.prose"
|
||||||
|
]
|
||||||
|
"slashdot.org" = [
|
||||||
|
"span.story-byline",
|
||||||
|
"div.p"
|
||||||
|
]
|
||||||
|
"www.redox-os.org" = [
|
||||||
|
"div.content"
|
||||||
|
]
|
||||||
|
"www.smbc-comics.com" = [
|
||||||
|
"img#cc-comic",
|
||||||
|
"div#aftercomic img"
|
||||||
|
]
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
fn main() {
|
fn main() {
|
||||||
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
// and `build_info::format!` in the main program.
|
// and `build_info::format!` in the main program.
|
||||||
build_info_build::build_script();
|
build_info_build::build_script();
|
||||||
println!("cargo:rerun-if-changed=templates");
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
DROP INDEX IF EXISTS post_summary_idx;
|
|
||||||
DROP INDEX IF EXISTS post_site_idx;
|
|
||||||
DROP INDEX IF EXISTS post_title_idx;
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
CREATE INDEX post_summary_idx ON post USING GIN (to_tsvector('english', summary));
|
|
||||||
CREATE INDEX post_site_idx ON post USING GIN (to_tsvector('english', site));
|
|
||||||
CREATE INDEX post_title_idx ON post USING GIN (to_tsvector('english', title));
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
BEGIN;
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS public."Email";
|
|
||||||
DROP TABLE IF EXISTS public."EmailDisplayName";
|
|
||||||
DROP TABLE IF EXISTS public."Message";
|
|
||||||
DROP TABLE IF EXISTS public."Header";
|
|
||||||
DROP TABLE IF EXISTS public."File";
|
|
||||||
DROP TABLE IF EXISTS public."Avatar";
|
|
||||||
DROP TABLE IF EXISTS public."Body";
|
|
||||||
DROP TABLE IF EXISTS public."Thread";
|
|
||||||
DROP TABLE IF EXISTS public."Tag";
|
|
||||||
|
|
||||||
END;
|
|
||||||
@ -1,174 +0,0 @@
|
|||||||
-- This script was generated by the ERD tool in pgAdmin 4.
|
|
||||||
-- Please log an issue at https://github.com/pgadmin-org/pgadmin4/issues/new/choose if you find any bugs, including reproduction steps.
|
|
||||||
BEGIN;
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
|
|
||||||
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Email"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
address text NOT NULL,
|
|
||||||
avatar_id integer,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
CONSTRAINT avatar_id UNIQUE (avatar_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."EmailDisplayName"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
email_id integer NOT NULL,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Message"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
subject text,
|
|
||||||
"from" integer,
|
|
||||||
"to" integer,
|
|
||||||
cc integer,
|
|
||||||
header_id integer,
|
|
||||||
hash text NOT NULL,
|
|
||||||
file_id integer NOT NULL,
|
|
||||||
date timestamp with time zone NOT NULL,
|
|
||||||
unread boolean NOT NULL,
|
|
||||||
body_id integer NOT NULL,
|
|
||||||
thread_id integer NOT NULL,
|
|
||||||
tag_id integer,
|
|
||||||
CONSTRAINT body_id UNIQUE (body_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Header"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
key text NOT NULL,
|
|
||||||
value text NOT NULL,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."File"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
path text NOT NULL,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Avatar"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
url text NOT NULL,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Body"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
text text NOT NULL,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Thread"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public."Tag"
|
|
||||||
(
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
name text NOT NULL,
|
|
||||||
display text,
|
|
||||||
fg_color integer,
|
|
||||||
bg_color integer,
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Email"
|
|
||||||
ADD CONSTRAINT email_avatar_fkey FOREIGN KEY (avatar_id)
|
|
||||||
REFERENCES public."Avatar" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."EmailDisplayName"
|
|
||||||
ADD CONSTRAINT email_id_fk FOREIGN KEY (email_id)
|
|
||||||
REFERENCES public."Email" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_to_fkey FOREIGN KEY ("to")
|
|
||||||
REFERENCES public."Email" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_cc_fkey FOREIGN KEY (cc)
|
|
||||||
REFERENCES public."Email" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_from_fkey FOREIGN KEY ("from")
|
|
||||||
REFERENCES public."Email" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_header_fkey FOREIGN KEY (header_id)
|
|
||||||
REFERENCES public."Header" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_file_fkey FOREIGN KEY (file_id)
|
|
||||||
REFERENCES public."File" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_body_id_fkey FOREIGN KEY (body_id)
|
|
||||||
REFERENCES public."Body" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_thread_fkey FOREIGN KEY (thread_id)
|
|
||||||
REFERENCES public."Thread" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
|
|
||||||
ALTER TABLE IF EXISTS public."Message"
|
|
||||||
ADD CONSTRAINT message_tag_fkey FOREIGN KEY (tag_id)
|
|
||||||
REFERENCES public."Tag" (id) MATCH SIMPLE
|
|
||||||
ON UPDATE NO ACTION
|
|
||||||
ON DELETE NO ACTION
|
|
||||||
NOT VALID;
|
|
||||||
|
|
||||||
END;
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
ALTER TABLE
|
|
||||||
post DROP CONSTRAINT post_link_key;
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
WITH dupes AS (
|
|
||||||
SELECT
|
|
||||||
uid,
|
|
||||||
link,
|
|
||||||
Row_number() over(
|
|
||||||
PARTITION by link
|
|
||||||
ORDER BY
|
|
||||||
link
|
|
||||||
) AS RowNumber
|
|
||||||
FROM
|
|
||||||
post
|
|
||||||
)
|
|
||||||
DELETE FROM
|
|
||||||
post
|
|
||||||
WHERE
|
|
||||||
uid IN (
|
|
||||||
SELECT
|
|
||||||
uid
|
|
||||||
FROM
|
|
||||||
dupes
|
|
||||||
WHERE
|
|
||||||
RowNumber > 1
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE
|
|
||||||
post
|
|
||||||
ADD
|
|
||||||
UNIQUE (link);
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
ALTER TABLE
|
|
||||||
post
|
|
||||||
ALTER COLUMN
|
|
||||||
link DROP NOT NULL;
|
|
||||||
|
|
||||||
ALTER TABLE
|
|
||||||
post DROP CONSTRAINT link;
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
DELETE FROM
|
|
||||||
post
|
|
||||||
WHERE
|
|
||||||
link IS NULL
|
|
||||||
OR link = '';
|
|
||||||
|
|
||||||
ALTER TABLE
|
|
||||||
post
|
|
||||||
ALTER COLUMN
|
|
||||||
link
|
|
||||||
SET
|
|
||||||
NOT NULL;
|
|
||||||
|
|
||||||
ALTER TABLE
|
|
||||||
post
|
|
||||||
ADD
|
|
||||||
CONSTRAINT link CHECK (link <> '');
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
DROP TABLE IF EXISTS email_address;
|
|
||||||
DROP TABLE IF EXISTS photo;
|
|
||||||
DROP TABLE IF EXISTS google_person;
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
-- Add up migration script here
|
|
||||||
CREATE TABLE IF NOT EXISTS google_person (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
resource_name TEXT NOT NULL UNIQUE,
|
|
||||||
display_name TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS email_photo (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
google_person_id INTEGER REFERENCES google_person (id) UNIQUE,
|
|
||||||
url TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS email_address (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
address TEXT NOT NULL UNIQUE,
|
|
||||||
email_photo_id INTEGER REFERENCES email_photo (id),
|
|
||||||
google_person_id INTEGER REFERENCES google_person (id)
|
|
||||||
);
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
DROP INDEX post_summary_idx;
|
|
||||||
CREATE INDEX post_summary_idx ON post USING gin (
|
|
||||||
to_tsvector('english', summary)
|
|
||||||
);
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
-- Something like this around summary in the idx w/ tsvector
|
|
||||||
DROP INDEX post_summary_idx;
|
|
||||||
CREATE INDEX post_summary_idx ON post USING gin (to_tsvector(
|
|
||||||
'english',
|
|
||||||
regexp_replace(
|
|
||||||
regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
|
||||||
'\s+',
|
|
||||||
' ',
|
|
||||||
'g'
|
|
||||||
)
|
|
||||||
));
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
DROP INDEX nzb_posts_created_at_idx;
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- Add up migration script here
|
|
||||||
CREATE INDEX nzb_posts_created_at_idx ON nzb_posts USING btree (created_at);
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
BEGIN;
|
|
||||||
DROP INDEX IF EXISTS post_search_summary_idx;
|
|
||||||
ALTER TABLE post DROP search_summary;
|
|
||||||
|
|
||||||
-- CREATE INDEX post_summary_idx ON post USING gin (to_tsvector(
|
|
||||||
-- 'english',
|
|
||||||
-- regexp_replace(
|
|
||||||
-- regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
|
||||||
-- '\s+',
|
|
||||||
-- ' ',
|
|
||||||
-- 'g'
|
|
||||||
-- )
|
|
||||||
-- ));
|
|
||||||
COMMIT;
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
-- Add up migration script here
|
|
||||||
BEGIN;
|
|
||||||
DROP INDEX IF EXISTS post_summary_idx;
|
|
||||||
ALTER TABLE post ADD search_summary TEXT;
|
|
||||||
CREATE INDEX post_search_summary_idx ON post USING gin (
|
|
||||||
to_tsvector('english', search_summary)
|
|
||||||
);
|
|
||||||
UPDATE post SET search_summary = regexp_replace(
|
|
||||||
regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
|
||||||
'\s+',
|
|
||||||
' ',
|
|
||||||
'g'
|
|
||||||
);
|
|
||||||
COMMIT;
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
-- Bad examples:
|
|
||||||
-- https://nzbfinder.ws/getnzb/d2c3e5a08abadd985dccc6a574122892030b6a9a.nzb&i=95972&r=b55082d289937c050dedc203c9653850
|
|
||||||
-- https://nzbfinder.ws/getnzb?id=45add174-7da4-4445-bf2b-a67dbbfc07fe.nzb&r=b55082d289937c050dedc203c9653850
|
|
||||||
-- https://nzbfinder.ws/api/v1/getnzb?id=82486020-c192-4fa0-a7e7-798d7d72e973.nzb&r=b55082d289937c050dedc203c9653850
|
|
||||||
UPDATE nzb_posts
|
|
||||||
SET link =
|
|
||||||
regexp_replace(
|
|
||||||
regexp_replace(
|
|
||||||
regexp_replace(
|
|
||||||
link,
|
|
||||||
'https://nzbfinder.ws/getnzb/',
|
|
||||||
'https://nzbfinder.ws/api/v1/getnzb?id='
|
|
||||||
),
|
|
||||||
'https://nzbfinder.ws/getnzb',
|
|
||||||
'https://nzbfinder.ws/api/v1/getnzb'
|
|
||||||
),
|
|
||||||
'&r=',
|
|
||||||
'&apikey='
|
|
||||||
)
|
|
||||||
;
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
DROP TABLE IF NOT EXISTS email_rule;
|
|
||||||
|
|
||||||
-- Add down migration script here
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS email_rule (
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
sort_order integer NOT NULL,
|
|
||||||
rule jsonb NOT NULL
|
|
||||||
);
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
ALTER TABLE feed DROP COLUMN IF EXISTS disabled;
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- Add up migration script here
|
|
||||||
ALTER TABLE feed ADD disabled boolean;
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- Add down migration script here
|
|
||||||
DROP TABLE IF EXISTS snooze;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
-- Add up migration script here
|
|
||||||
CREATE TABLE IF NOT EXISTS snooze (
|
|
||||||
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
|
||||||
message_id text NOT NULL UNIQUE,
|
|
||||||
wake timestamptz NOT NULL
|
|
||||||
);
|
|
||||||
@ -6,9 +6,5 @@ SELECT
|
|||||||
date,
|
date,
|
||||||
is_read,
|
is_read,
|
||||||
uid,
|
uid,
|
||||||
p.id id
|
id
|
||||||
FROM
|
FROM post
|
||||||
post AS p
|
|
||||||
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
|
||||||
ORDER BY
|
|
||||||
date DESC;
|
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
SELECT
|
|
||||||
uid
|
|
||||||
FROM
|
|
||||||
post AS p
|
|
||||||
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
|
||||||
;
|
|
||||||
@ -1,17 +1,10 @@
|
|||||||
SELECT COUNT(*) AS count
|
SELECT
|
||||||
|
COUNT(*) count
|
||||||
FROM
|
FROM
|
||||||
post
|
post
|
||||||
WHERE
|
WHERE
|
||||||
(
|
($1::text IS NULL OR site = $1)
|
||||||
$1::text IS NULL
|
|
||||||
OR site = $1
|
|
||||||
)
|
|
||||||
AND (
|
AND (
|
||||||
NOT $2
|
NOT $2
|
||||||
OR NOT is_read
|
OR NOT is_read
|
||||||
)
|
)
|
||||||
AND (
|
|
||||||
$3::text IS NULL
|
|
||||||
OR TO_TSVECTOR('english', search_summary)
|
|
||||||
@@ WEBSEARCH_TO_TSQUERY('english', $3)
|
|
||||||
)
|
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
SELECT rule as "rule: Json<Rule>" FROM email_rule ORDER BY sort_order
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
SELECT
|
|
||||||
p.id,
|
|
||||||
link,
|
|
||||||
clean_summary
|
|
||||||
FROM
|
|
||||||
post AS p
|
|
||||||
INNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
|
||||||
WHERE
|
|
||||||
search_summary IS NULL
|
|
||||||
-- TODO remove AND link ~ '^<'
|
|
||||||
ORDER BY
|
|
||||||
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
|
|
||||||
LIMIT 1000;
|
|
||||||
@ -1 +0,0 @@
|
|||||||
SELECT url FROM email_photo ep JOIN email_address ea ON ep.id = ea.email_photo_id WHERE address = $1
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
SELECT
|
|
||||||
site AS "site!",
|
|
||||||
title AS "title!",
|
|
||||||
summary AS "summary!",
|
|
||||||
link AS "link!",
|
|
||||||
date AS "date!",
|
|
||||||
is_read AS "is_read!",
|
|
||||||
uid AS "uid!",
|
|
||||||
p.id id
|
|
||||||
FROM
|
|
||||||
post p
|
|
||||||
JOIN feed f ON p.site = f.slug
|
|
||||||
WHERE
|
|
||||||
uid = ANY ($1);
|
|
||||||
@ -4,12 +4,11 @@ SELECT
|
|||||||
link,
|
link,
|
||||||
site,
|
site,
|
||||||
summary,
|
summary,
|
||||||
clean_summary,
|
|
||||||
title,
|
title,
|
||||||
name,
|
name,
|
||||||
homepage
|
homepage
|
||||||
FROM
|
FROM
|
||||||
post AS p
|
post p
|
||||||
INNER JOIN feed AS f ON p.site = f.slug
|
JOIN feed f ON p.site = f.slug
|
||||||
WHERE
|
WHERE
|
||||||
uid = $1
|
uid = $1
|
||||||
|
|||||||
@ -1,14 +0,0 @@
|
|||||||
SELECT
|
|
||||||
site,
|
|
||||||
date,
|
|
||||||
is_read,
|
|
||||||
title,
|
|
||||||
uid,
|
|
||||||
name
|
|
||||||
FROM
|
|
||||||
post p
|
|
||||||
JOIN feed f ON p.site = f.slug
|
|
||||||
WHERE
|
|
||||||
uid = ANY ($1)
|
|
||||||
ORDER BY
|
|
||||||
date DESC;
|
|
||||||
@ -14,10 +14,6 @@ WHERE
|
|||||||
NOT $2
|
NOT $2
|
||||||
OR NOT is_read
|
OR NOT is_read
|
||||||
)
|
)
|
||||||
AND (
|
|
||||||
$5 :: text IS NULL
|
|
||||||
OR to_tsvector('english', search_summary) @@ websearch_to_tsquery('english', $5)
|
|
||||||
)
|
|
||||||
ORDER BY
|
ORDER BY
|
||||||
date DESC,
|
date DESC,
|
||||||
title OFFSET $3
|
title OFFSET $3
|
||||||
|
|||||||
@ -1,13 +0,0 @@
|
|||||||
select t.id, tt.tokid, tt.alias, length(t.token), t.token from (
|
|
||||||
select id, (ts_parse('default',
|
|
||||||
-- regexp_replace(
|
|
||||||
-- regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
|
||||||
-- '\s+',
|
|
||||||
-- ' ',
|
|
||||||
-- 'g'
|
|
||||||
-- )
|
|
||||||
summary
|
|
||||||
)).* from post) t
|
|
||||||
inner join ts_token_type('default') tt
|
|
||||||
on t.tokid = tt.tokid
|
|
||||||
where length(token) >= 2*1024;
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use letterbox_server::sanitize_html;
|
use server::sanitize_html;
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
let mut args = std::env::args().skip(1);
|
let mut args = std::env::args().skip(1);
|
||||||
|
|||||||
@ -1,21 +0,0 @@
|
|||||||
use std::fs;
|
|
||||||
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
|
||||||
println!("PWD: {}", std::env::current_dir()?.display());
|
|
||||||
let _url = "https://slashdot.org/story/25/01/24/1813201/walgreens-replaced-fridge-doors-with-smart-screens-its-now-a-200-million-fiasco?utm_source=rss1.0mainlinkanon&utm_medium=feed";
|
|
||||||
let _url = "https://hackaday.com/2025/01/24/hackaday-podcast-episode-305-caustic-clocks-practice-bones-and-brick-layers/";
|
|
||||||
let _url = "https://theonion.com/monster-devastated-to-see-film-depicting-things-he-told-guillermo-del-toro-in-confidence/";
|
|
||||||
let _url = "https://trofi.github.io/posts/330-another-nix-language-nondeterminism-example.html";
|
|
||||||
let _url = "https://blog.cloudflare.com/ddos-threat-report-for-2024-q4/";
|
|
||||||
let url = "https://trofi.github.io/posts/330-another-nix-language-nondeterminism-example.html";
|
|
||||||
let body = reqwest::blocking::get(url)?.text()?;
|
|
||||||
let output = "/tmp/h2md/output.html";
|
|
||||||
let inliner = css_inline::CSSInliner::options()
|
|
||||||
.base_url(Url::parse(url).ok())
|
|
||||||
.build();
|
|
||||||
let inlined = inliner.inline(&body)?;
|
|
||||||
fs::write(output, inlined)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@ -1,344 +0,0 @@
|
|||||||
// Rocket generates a lot of warnings for handlers
|
|
||||||
// TODO: figure out why
|
|
||||||
#![allow(unreachable_patterns)]
|
|
||||||
use std::{error::Error, net::SocketAddr, sync::Arc, time::Duration};
|
|
||||||
|
|
||||||
use async_graphql::{extensions, http::GraphiQLSource, Schema};
|
|
||||||
use async_graphql_axum::{GraphQL, GraphQLSubscription};
|
|
||||||
//allows to extract the IP of connecting user
|
|
||||||
use axum::extract::connect_info::ConnectInfo;
|
|
||||||
use axum::{
|
|
||||||
extract::{self, ws::WebSocketUpgrade, Query, State},
|
|
||||||
http::{header, StatusCode},
|
|
||||||
response::{self, IntoResponse, Response},
|
|
||||||
routing::{any, get, post},
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use cacher::FilesystemCacher;
|
|
||||||
use clap::Parser;
|
|
||||||
use letterbox_notmuch::Notmuch;
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
use letterbox_server::tantivy::TantivyConnection;
|
|
||||||
use letterbox_server::{
|
|
||||||
graphql::{compute_catchup_ids, Attachment, MutationRoot, QueryRoot, SubscriptionRoot},
|
|
||||||
nm::{attachment_bytes, cid_attachment_bytes, label_unprocessed},
|
|
||||||
ws::ConnectionTracker,
|
|
||||||
};
|
|
||||||
use letterbox_shared::WebsocketMessage;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use sqlx::postgres::PgPool;
|
|
||||||
use tokio::{net::TcpListener, sync::Mutex};
|
|
||||||
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
|
||||||
use tracing::{error, info};
|
|
||||||
|
|
||||||
// Make our own error that wraps `ServerError`.
|
|
||||||
struct AppError(letterbox_server::ServerError);
|
|
||||||
|
|
||||||
// Tell axum how to convert `AppError` into a response.
|
|
||||||
impl IntoResponse for AppError {
|
|
||||||
fn into_response(self) -> Response {
|
|
||||||
(
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
format!("Something went wrong: {}", self.0),
|
|
||||||
)
|
|
||||||
.into_response()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// This enables using `?` on functions that return `Result<_, letterbox_server::Error>` to turn them into
|
|
||||||
// `Result<_, AppError>`. That way you don't need to do that manually.
|
|
||||||
impl<E> From<E> for AppError
|
|
||||||
where
|
|
||||||
E: Into<letterbox_server::ServerError>,
|
|
||||||
{
|
|
||||||
fn from(err: E) -> Self {
|
|
||||||
Self(err.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inline_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
|
||||||
info!("attachment filename {:?}", attachment.filename);
|
|
||||||
let mut hdr_map = headers::HeaderMap::new();
|
|
||||||
if let Some(filename) = attachment.filename {
|
|
||||||
hdr_map.insert(
|
|
||||||
header::CONTENT_DISPOSITION,
|
|
||||||
format!(r#"inline; filename="{}""#, filename)
|
|
||||||
.parse()
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if let Some(ct) = attachment.content_type {
|
|
||||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
|
||||||
}
|
|
||||||
info!("hdr_map {hdr_map:?}");
|
|
||||||
(hdr_map, attachment.bytes).into_response()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn download_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
|
||||||
info!("attachment filename {:?}", attachment.filename);
|
|
||||||
let mut hdr_map = headers::HeaderMap::new();
|
|
||||||
if let Some(filename) = attachment.filename {
|
|
||||||
hdr_map.insert(
|
|
||||||
header::CONTENT_DISPOSITION,
|
|
||||||
format!(r#"attachment; filename="{}""#, filename)
|
|
||||||
.parse()
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if let Some(ct) = attachment.content_type {
|
|
||||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
|
||||||
}
|
|
||||||
info!("hdr_map {hdr_map:?}");
|
|
||||||
(hdr_map, attachment.bytes).into_response()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[axum_macros::debug_handler]
|
|
||||||
async fn view_attachment(
|
|
||||||
State(AppState { nm, .. }): State<AppState>,
|
|
||||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
|
||||||
) -> Result<impl IntoResponse, AppError> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
info!("view attachment {mid} {idx}");
|
|
||||||
let idx: Vec<_> = idx
|
|
||||||
.split('.')
|
|
||||||
.map(|s| s.parse().expect("not a usize"))
|
|
||||||
.collect();
|
|
||||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
|
||||||
Ok(inline_attachment_response(attachment))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn download_attachment(
|
|
||||||
State(AppState { nm, .. }): State<AppState>,
|
|
||||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
|
||||||
) -> Result<impl IntoResponse, AppError> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
info!("download attachment message id '{mid}' idx '{idx}'");
|
|
||||||
let idx: Vec<_> = idx
|
|
||||||
.split('.')
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.map(|s| s.parse().expect("not a usize"))
|
|
||||||
.collect();
|
|
||||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
|
||||||
Ok(download_attachment_response(attachment))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn view_cid(
|
|
||||||
State(AppState { nm, .. }): State<AppState>,
|
|
||||||
extract::Path((id, cid)): extract::Path<(String, String)>,
|
|
||||||
) -> Result<impl IntoResponse, AppError> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
info!("view cid attachment {mid} {cid}");
|
|
||||||
let attachment = cid_attachment_bytes(&nm, &mid, &cid)?;
|
|
||||||
Ok(inline_attachment_response(attachment))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO make this work with gitea message ids like `wathiede/letterbox/pulls/91@git.z.xinu.tv`
|
|
||||||
async fn view_original(
|
|
||||||
State(AppState { nm, .. }): State<AppState>,
|
|
||||||
extract::Path(id): extract::Path<String>,
|
|
||||||
) -> Result<impl IntoResponse, AppError> {
|
|
||||||
info!("view_original {id}");
|
|
||||||
let bytes = nm.show_original(&id)?;
|
|
||||||
let s = String::from_utf8_lossy(&bytes).to_string();
|
|
||||||
Ok(s.into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn graphiql() -> impl IntoResponse {
|
|
||||||
response::Html(
|
|
||||||
GraphiQLSource::build()
|
|
||||||
.endpoint("/api/graphql/")
|
|
||||||
.subscription_endpoint("/api/graphql/ws")
|
|
||||||
.finish(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn start_ws(
|
|
||||||
ws: WebSocketUpgrade,
|
|
||||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
|
||||||
State(AppState {
|
|
||||||
connection_tracker, ..
|
|
||||||
}): State<AppState>,
|
|
||||||
) -> impl IntoResponse {
|
|
||||||
info!("intiating websocket connection for {addr}");
|
|
||||||
ws.on_upgrade(async move |socket| connection_tracker.lock().await.add_peer(socket, addr).await)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct NotificationParams {
|
|
||||||
delay_ms: Option<u64>,
|
|
||||||
num_unprocessed: Option<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn send_refresh_websocket_handler(
|
|
||||||
State(AppState {
|
|
||||||
nm,
|
|
||||||
pool,
|
|
||||||
connection_tracker,
|
|
||||||
..
|
|
||||||
}): State<AppState>,
|
|
||||||
params: Query<NotificationParams>,
|
|
||||||
) -> impl IntoResponse {
|
|
||||||
info!("send_refresh_websocket_handler params {params:?}");
|
|
||||||
if let Some(delay_ms) = params.delay_ms {
|
|
||||||
let delay = Duration::from_millis(delay_ms);
|
|
||||||
info!("sleeping {delay:?}");
|
|
||||||
tokio::time::sleep(delay).await;
|
|
||||||
}
|
|
||||||
let limit = match params.num_unprocessed {
|
|
||||||
Some(0) => None,
|
|
||||||
Some(limit) => Some(limit),
|
|
||||||
None => Some(10),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut ids = None;
|
|
||||||
match label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await {
|
|
||||||
Ok(i) => ids = Some(i),
|
|
||||||
Err(err) => error!("Failed to label_unprocessed: {err:?}"),
|
|
||||||
};
|
|
||||||
connection_tracker
|
|
||||||
.lock()
|
|
||||||
.await
|
|
||||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
|
||||||
.await;
|
|
||||||
if let Some(ids) = ids {
|
|
||||||
format!("{ids:?}")
|
|
||||||
} else {
|
|
||||||
"refresh triggered".to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn watch_new(
|
|
||||||
nm: Notmuch,
|
|
||||||
pool: PgPool,
|
|
||||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
|
||||||
poll_time: Duration,
|
|
||||||
) -> Result<(), async_graphql::Error> {
|
|
||||||
async fn watch_new_iteration(
|
|
||||||
nm: &Notmuch,
|
|
||||||
pool: &PgPool,
|
|
||||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
|
||||||
old_ids: &[String],
|
|
||||||
) -> Result<Vec<String>, async_graphql::Error> {
|
|
||||||
let ids = compute_catchup_ids(&nm, &pool, "is:unread").await?;
|
|
||||||
info!("old_ids: {} ids: {}", old_ids.len(), ids.len());
|
|
||||||
if old_ids != ids {
|
|
||||||
label_unprocessed(&nm, &pool, false, Some(100), "tag:unprocessed").await?;
|
|
||||||
conn_tracker
|
|
||||||
.lock()
|
|
||||||
.await
|
|
||||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
Ok(ids)
|
|
||||||
}
|
|
||||||
let mut old_ids = Vec::new();
|
|
||||||
loop {
|
|
||||||
old_ids = match watch_new_iteration(&nm, &pool, conn_tracker.clone(), &old_ids).await {
|
|
||||||
Ok(old_ids) => old_ids,
|
|
||||||
Err(err) => {
|
|
||||||
error!("watch_new_iteration failed: {err:?}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
tokio::time::sleep(poll_time).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct AppState {
|
|
||||||
nm: Notmuch,
|
|
||||||
pool: PgPool,
|
|
||||||
connection_tracker: Arc<Mutex<ConnectionTracker>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(version, about, long_about = None)]
|
|
||||||
struct Cli {
|
|
||||||
#[arg(short, long, default_value = "0.0.0.0:9345")]
|
|
||||||
addr: SocketAddr,
|
|
||||||
newsreader_database_url: String,
|
|
||||||
newsreader_tantivy_db_path: String,
|
|
||||||
slurp_cache_path: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
|
||||||
build_info::build_info!(fn bi);
|
|
||||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
|
||||||
if !std::fs::exists(&cli.slurp_cache_path)? {
|
|
||||||
info!("Creating slurp cache @ '{}'", &cli.slurp_cache_path);
|
|
||||||
std::fs::create_dir_all(&cli.slurp_cache_path)?;
|
|
||||||
}
|
|
||||||
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
|
||||||
let nm = Notmuch::default();
|
|
||||||
sqlx::migrate!("./migrations").run(&pool).await?;
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy_conn = TantivyConnection::new(&cli.newsreader_tantivy_db_path)?;
|
|
||||||
|
|
||||||
let cacher = FilesystemCacher::new(&cli.slurp_cache_path)?;
|
|
||||||
let schema = Schema::build(QueryRoot, MutationRoot, SubscriptionRoot)
|
|
||||||
.data(nm.clone())
|
|
||||||
.data(cacher)
|
|
||||||
.data(pool.clone());
|
|
||||||
|
|
||||||
let schema = schema.extension(extensions::Logger).finish();
|
|
||||||
|
|
||||||
let connection_tracker = Arc::new(Mutex::new(ConnectionTracker::default()));
|
|
||||||
let ct = Arc::clone(&connection_tracker);
|
|
||||||
let poll_time = Duration::from_secs(60);
|
|
||||||
let _h = tokio::spawn(watch_new(nm.clone(), pool.clone(), ct, poll_time));
|
|
||||||
|
|
||||||
let api_routes = Router::new()
|
|
||||||
.route(
|
|
||||||
"/download/attachment/{id}/{idx}/{*rest}",
|
|
||||||
get(download_attachment),
|
|
||||||
)
|
|
||||||
.route("/view/attachment/{id}/{idx}/{*rest}", get(view_attachment))
|
|
||||||
.route("/original/{id}", get(view_original))
|
|
||||||
.route("/cid/{id}/{cid}", get(view_cid))
|
|
||||||
.route("/ws", any(start_ws))
|
|
||||||
.route_service("/graphql/ws", GraphQLSubscription::new(schema.clone()))
|
|
||||||
.route(
|
|
||||||
"/graphql/",
|
|
||||||
get(graphiql).post_service(GraphQL::new(schema.clone())),
|
|
||||||
);
|
|
||||||
|
|
||||||
let notification_routes = Router::new()
|
|
||||||
.route("/mail", post(send_refresh_websocket_handler))
|
|
||||||
.route("/news", post(send_refresh_websocket_handler));
|
|
||||||
let app = Router::new()
|
|
||||||
.nest("/api", api_routes)
|
|
||||||
.nest("/notification", notification_routes)
|
|
||||||
.with_state(AppState {
|
|
||||||
nm,
|
|
||||||
pool,
|
|
||||||
connection_tracker,
|
|
||||||
})
|
|
||||||
.layer(
|
|
||||||
TraceLayer::new_for_http()
|
|
||||||
.make_span_with(DefaultMakeSpan::default().include_headers(true)),
|
|
||||||
);
|
|
||||||
|
|
||||||
let listener = TcpListener::bind(cli.addr).await.unwrap();
|
|
||||||
tracing::info!("listening on {}", listener.local_addr().unwrap());
|
|
||||||
axum::serve(
|
|
||||||
listener,
|
|
||||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
381
server/src/bin/server.rs
Normal file
381
server/src/bin/server.rs
Normal file
@ -0,0 +1,381 @@
|
|||||||
|
// Rocket generates a lot of warnings for handlers
|
||||||
|
// TODO: figure out why
|
||||||
|
#![allow(unreachable_patterns)]
|
||||||
|
#[macro_use]
|
||||||
|
extern crate rocket;
|
||||||
|
use std::{error::Error, io::Cursor, str::FromStr};
|
||||||
|
|
||||||
|
use async_graphql::{http::GraphiQLSource, EmptySubscription, Schema};
|
||||||
|
use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
|
||||||
|
use glog::Flags;
|
||||||
|
use notmuch::{Notmuch, NotmuchError, ThreadSet};
|
||||||
|
use rocket::{
|
||||||
|
fairing::AdHoc,
|
||||||
|
http::{ContentType, Header},
|
||||||
|
request::Request,
|
||||||
|
response::{content, Debug, Responder},
|
||||||
|
serde::json::Json,
|
||||||
|
Response, State,
|
||||||
|
};
|
||||||
|
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||||
|
use server::{
|
||||||
|
config::Config,
|
||||||
|
error::ServerError,
|
||||||
|
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||||
|
nm::{attachment_bytes, cid_attachment_bytes},
|
||||||
|
};
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tantivy::{Index, IndexWriter};
|
||||||
|
|
||||||
|
#[get("/refresh")]
|
||||||
|
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
||||||
|
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/show/<query>/pretty")]
|
||||||
|
async fn show_pretty(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
query: &str,
|
||||||
|
) -> Result<Json<ThreadSet>, Debug<ServerError>> {
|
||||||
|
let query = urlencoding::decode(query).map_err(|e| ServerError::from(NotmuchError::from(e)))?;
|
||||||
|
let res = nm.show(&query).map_err(ServerError::from)?;
|
||||||
|
Ok(Json(res))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/show/<query>")]
|
||||||
|
async fn show(nm: &State<Notmuch>, query: &str) -> Result<Json<ThreadSet>, Debug<NotmuchError>> {
|
||||||
|
let query = urlencoding::decode(query).map_err(NotmuchError::from)?;
|
||||||
|
let res = nm.show(&query)?;
|
||||||
|
Ok(Json(res))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct InlineAttachmentResponder(Attachment);
|
||||||
|
|
||||||
|
impl<'r, 'o: 'r> Responder<'r, 'o> for InlineAttachmentResponder {
|
||||||
|
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||||
|
let mut resp = Response::build();
|
||||||
|
if let Some(filename) = self.0.filename {
|
||||||
|
resp.header(Header::new(
|
||||||
|
"Content-Disposition",
|
||||||
|
format!(r#"inline; filename="{}""#, filename),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if let Some(content_type) = self.0.content_type {
|
||||||
|
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||||
|
resp.header(ct);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DownloadAttachmentResponder(Attachment);
|
||||||
|
|
||||||
|
impl<'r, 'o: 'r> Responder<'r, 'o> for DownloadAttachmentResponder {
|
||||||
|
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||||
|
let mut resp = Response::build();
|
||||||
|
if let Some(filename) = self.0.filename {
|
||||||
|
resp.header(Header::new(
|
||||||
|
"Content-Disposition",
|
||||||
|
format!(r#"attachment; filename="{}""#, filename),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if let Some(content_type) = self.0.content_type {
|
||||||
|
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||||
|
resp.header(ct);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/cid/<id>/<cid>")]
|
||||||
|
async fn view_cid(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
cid: &str,
|
||||||
|
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("view cid attachment {mid} {cid}");
|
||||||
|
let attachment = cid_attachment_bytes(nm, &mid, &cid)?;
|
||||||
|
Ok(InlineAttachmentResponder(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/view/attachment/<id>/<idx>/<_>")]
|
||||||
|
async fn view_attachment(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
idx: &str,
|
||||||
|
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("view attachment {mid} {idx}");
|
||||||
|
let idx: Vec<_> = idx
|
||||||
|
.split('.')
|
||||||
|
.map(|s| s.parse().expect("not a usize"))
|
||||||
|
.collect();
|
||||||
|
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||||
|
Ok(InlineAttachmentResponder(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/download/attachment/<id>/<idx>/<_>")]
|
||||||
|
async fn download_attachment(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
idx: &str,
|
||||||
|
) -> Result<DownloadAttachmentResponder, Debug<ServerError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("download attachment {mid} {idx}");
|
||||||
|
let idx: Vec<_> = idx
|
||||||
|
.split('.')
|
||||||
|
.map(|s| s.parse().expect("not a usize"))
|
||||||
|
.collect();
|
||||||
|
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||||
|
Ok(DownloadAttachmentResponder(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/original/<id>")]
|
||||||
|
async fn original(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
) -> Result<(ContentType, Vec<u8>), Debug<NotmuchError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
let res = nm.show_original(&mid)?;
|
||||||
|
Ok((ContentType::Plain, res))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::get("/")]
|
||||||
|
fn graphiql() -> content::RawHtml<String> {
|
||||||
|
content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::post("/create-news-db")]
|
||||||
|
fn create_news_db(config: &State<Config>) -> Result<String, Debug<ServerError>> {
|
||||||
|
create_news_db_impl(config)?;
|
||||||
|
Ok(format!(
|
||||||
|
"DB created in {}\n",
|
||||||
|
config.newsreader_tantivy_db_path
|
||||||
|
))
|
||||||
|
}
|
||||||
|
fn create_news_db_impl(config: &Config) -> Result<(), ServerError> {
|
||||||
|
std::fs::remove_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
std::fs::create_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
use tantivy::schema::*;
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
schema_builder.add_text_field("site", STRING | STORED);
|
||||||
|
schema_builder.add_text_field("title", TEXT | STORED);
|
||||||
|
schema_builder.add_text_field("summary", TEXT);
|
||||||
|
schema_builder.add_text_field("link", STRING | STORED);
|
||||||
|
schema_builder.add_date_field("date", FAST);
|
||||||
|
schema_builder.add_bool_field("is_read", FAST);
|
||||||
|
schema_builder.add_text_field("uid", STRING | STORED);
|
||||||
|
schema_builder.add_i64_field("id", FAST);
|
||||||
|
|
||||||
|
let schema = schema_builder.build();
|
||||||
|
Index::create_in_dir(&config.newsreader_tantivy_db_path, schema).map_err(ServerError::from)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::post("/reindex-news-db")]
|
||||||
|
async fn reindex_news_db(
|
||||||
|
pool: &State<PgPool>,
|
||||||
|
config: &State<Config>,
|
||||||
|
) -> Result<String, Debug<ServerError>> {
|
||||||
|
use tantivy::{doc, Term};
|
||||||
|
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let pool: &PgPool = pool;
|
||||||
|
|
||||||
|
let index =
|
||||||
|
Index::open_in_dir(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
let mut index_writer = index.writer(50_000_000).map_err(ServerError::from)?;
|
||||||
|
let schema = index.schema();
|
||||||
|
let site = schema.get_field("site").map_err(ServerError::from)?;
|
||||||
|
let title = schema.get_field("title").map_err(ServerError::from)?;
|
||||||
|
let summary = schema.get_field("summary").map_err(ServerError::from)?;
|
||||||
|
let link = schema.get_field("link").map_err(ServerError::from)?;
|
||||||
|
let date = schema.get_field("date").map_err(ServerError::from)?;
|
||||||
|
let is_read = schema.get_field("is_read").map_err(ServerError::from)?;
|
||||||
|
let uid = schema.get_field("uid").map_err(ServerError::from)?;
|
||||||
|
let id = schema.get_field("id").map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
let rows = sqlx::query_file!("sql/all-posts.sql")
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
let total = rows.len();
|
||||||
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
if i % 10_000 == 0 {
|
||||||
|
info!(
|
||||||
|
"{i}/{total} processed, elapsed {:.2}s",
|
||||||
|
start_time.elapsed().as_secs_f32()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let id_term = Term::from_field_text(uid, &r.uid);
|
||||||
|
index_writer.delete_term(id_term);
|
||||||
|
index_writer
|
||||||
|
.add_document(doc!(
|
||||||
|
site => r.site.expect("UNKOWN_SITE"),
|
||||||
|
title => r.title.expect("UNKOWN_TITLE"),
|
||||||
|
// TODO: clean and extract text from HTML
|
||||||
|
summary => r.summary.expect("UNKNOWN_SUMMARY"),
|
||||||
|
link => r.link.expect("link"),
|
||||||
|
date => tantivy::DateTime::from_primitive(r.date.expect("date")),
|
||||||
|
is_read => r.is_read.expect("is_read"),
|
||||||
|
uid => r.uid,
|
||||||
|
id => r.id as i64,
|
||||||
|
))
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
index_writer.commit().map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
info!("took {:.2}s to reindex", start_time.elapsed().as_secs_f32());
|
||||||
|
Ok(format!(
|
||||||
|
"DB openned in {}\n",
|
||||||
|
config.newsreader_tantivy_db_path
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::get("/search-news-db")]
|
||||||
|
fn search_news_db(
|
||||||
|
index: &State<tantivy::Index>,
|
||||||
|
reader: &State<tantivy::IndexReader>,
|
||||||
|
) -> Result<String, Debug<ServerError>> {
|
||||||
|
use tantivy::{collector::TopDocs, query::QueryParser, Document, TantivyDocument};
|
||||||
|
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let schema = index.schema();
|
||||||
|
let site = schema.get_field("site").map_err(ServerError::from)?;
|
||||||
|
let title = schema.get_field("title").map_err(ServerError::from)?;
|
||||||
|
let summary = schema.get_field("summary").map_err(ServerError::from)?;
|
||||||
|
let query_parser = QueryParser::for_index(&index, vec![site, title, summary]);
|
||||||
|
|
||||||
|
let query = query_parser
|
||||||
|
.parse_query("grapheme")
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
let top_docs = searcher
|
||||||
|
.search(&query, &TopDocs::with_limit(10))
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
let mut results = vec![];
|
||||||
|
info!("search found {} docs", top_docs.len());
|
||||||
|
for (_score, doc_address) in top_docs {
|
||||||
|
let retrieved_doc: TantivyDocument =
|
||||||
|
searcher.doc(doc_address).map_err(ServerError::from)?;
|
||||||
|
results.push(format!("{}", retrieved_doc.to_json(&schema)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(format!("{}", results.join(" ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::get("/graphql?<query..>")]
|
||||||
|
async fn graphql_query(schema: &State<GraphqlSchema>, query: GraphQLQuery) -> GraphQLResponse {
|
||||||
|
query.execute(schema.inner()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::post("/graphql", data = "<request>", format = "application/json")]
|
||||||
|
async fn graphql_request(
|
||||||
|
schema: &State<GraphqlSchema>,
|
||||||
|
request: GraphQLRequest,
|
||||||
|
) -> GraphQLResponse {
|
||||||
|
request.execute(schema.inner()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
glog::new()
|
||||||
|
.init(Flags {
|
||||||
|
colorlogtostderr: true,
|
||||||
|
//alsologtostderr: true, // use logtostderr to only write to stderr and not to files
|
||||||
|
logtostderr: true,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
info!("Build Info: {}", shared::build_version(bi));
|
||||||
|
let allowed_origins = AllowedOrigins::all();
|
||||||
|
let cors = rocket_cors::CorsOptions {
|
||||||
|
allowed_origins,
|
||||||
|
allowed_methods: vec!["Get"]
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| FromStr::from_str(s).unwrap())
|
||||||
|
.collect(),
|
||||||
|
allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]),
|
||||||
|
allow_credentials: true,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
.to_cors()?;
|
||||||
|
|
||||||
|
let rkt = rocket::build()
|
||||||
|
.mount(
|
||||||
|
shared::urls::MOUNT_POINT,
|
||||||
|
routes![
|
||||||
|
create_news_db,
|
||||||
|
reindex_news_db,
|
||||||
|
search_news_db,
|
||||||
|
original,
|
||||||
|
refresh,
|
||||||
|
show_pretty,
|
||||||
|
show,
|
||||||
|
graphql_query,
|
||||||
|
graphql_request,
|
||||||
|
graphiql,
|
||||||
|
view_cid,
|
||||||
|
view_attachment,
|
||||||
|
download_attachment,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.attach(cors)
|
||||||
|
.attach(AdHoc::config::<Config>());
|
||||||
|
|
||||||
|
let config: Config = rkt.figment().extract()?;
|
||||||
|
info!("Config:\n{config:#?}");
|
||||||
|
if !std::fs::exists(&config.slurp_cache_path)? {
|
||||||
|
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
|
||||||
|
std::fs::create_dir_all(&config.slurp_cache_path)?;
|
||||||
|
}
|
||||||
|
let pool = PgPool::connect(&config.newsreader_database_url).await?;
|
||||||
|
let tantivy_newsreader_index = match Index::open_in_dir(&config.newsreader_tantivy_db_path) {
|
||||||
|
Ok(idx) => idx,
|
||||||
|
Err(_) => {
|
||||||
|
create_news_db_impl(&config)?;
|
||||||
|
Index::open_in_dir(&config.newsreader_tantivy_db_path)?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let tantivy_newsreader_reader = tantivy_newsreader_index.reader()?;
|
||||||
|
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||||
|
.data(Notmuch::default())
|
||||||
|
.data(config)
|
||||||
|
.data(pool.clone())
|
||||||
|
.extension(async_graphql::extensions::Logger)
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
let rkt = rkt
|
||||||
|
.manage(schema)
|
||||||
|
.manage(pool)
|
||||||
|
.manage(Notmuch::default())
|
||||||
|
.manage(tantivy_newsreader_index)
|
||||||
|
.manage(tantivy_newsreader_reader);
|
||||||
|
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||||
|
|
||||||
|
rkt.launch().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@ -1,39 +0,0 @@
|
|||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use letterbox_notmuch::Notmuch;
|
|
||||||
use letterbox_server::nm::label_unprocessed;
|
|
||||||
use sqlx::postgres::PgPool;
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(version, about, long_about = None)]
|
|
||||||
struct Cli {
|
|
||||||
#[arg(short, long)]
|
|
||||||
newsreader_database_url: String,
|
|
||||||
#[arg(short, long, default_value = "10")]
|
|
||||||
/// Set to 0 to process all matches
|
|
||||||
messages_to_process: usize,
|
|
||||||
#[arg(short, long, default_value = "false")]
|
|
||||||
execute: bool,
|
|
||||||
/// Process messages matching this notmuch query
|
|
||||||
#[arg(short, long, default_value = "tag:unprocessed")]
|
|
||||||
query: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
|
||||||
build_info::build_info!(fn bi);
|
|
||||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
|
||||||
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
|
||||||
let nm = Notmuch::default();
|
|
||||||
let limit = if cli.messages_to_process > 0 {
|
|
||||||
Some(cli.messages_to_process)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
label_unprocessed(&nm, &pool, !cli.execute, limit, &cli.query).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,23 @@
|
|||||||
use serde::Deserialize;
|
use std::{collections::HashMap, fmt::Display, str::FromStr};
|
||||||
#[derive(Deserialize)]
|
|
||||||
|
use scraper::Selector;
|
||||||
|
use serde::{de, Deserialize, Deserializer};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DeSelector(pub Selector);
|
||||||
|
impl<'de> Deserialize<'de> for DeSelector {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let s = String::deserialize(deserializer)?;
|
||||||
|
Ok(DeSelector(Selector::parse(&s).map_err(de::Error::custom)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub newsreader_database_url: String,
|
pub newsreader_database_url: String,
|
||||||
pub newsreader_tantivy_db_path: String,
|
pub newsreader_tantivy_db_path: String,
|
||||||
pub slurp_cache_path: String,
|
pub slurp_cache_path: String,
|
||||||
|
pub slurp_site_selectors: HashMap<String, Vec<DeSelector>>,
|
||||||
}
|
}
|
||||||
|
|||||||
8
server/src/custom.css
Normal file
8
server/src/custom.css
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
pre {
|
||||||
|
background-color: var(--color-bg);
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,8 @@
|
|||||||
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
|
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
use mailparse::MailParseError;
|
use mailparse::MailParseError;
|
||||||
#[cfg(feature = "tantivy")]
|
use tantivy::TantivyError;
|
||||||
use tantivy::{query::QueryParserError, TantivyError};
|
use tantivy::query::QueryParserError;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::TransformError;
|
use crate::TransformError;
|
||||||
@ -10,7 +10,7 @@ use crate::TransformError;
|
|||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum ServerError {
|
pub enum ServerError {
|
||||||
#[error("notmuch: {0}")]
|
#[error("notmuch: {0}")]
|
||||||
NotmuchError(#[from] letterbox_notmuch::NotmuchError),
|
NotmuchError(#[from] notmuch::NotmuchError),
|
||||||
#[error("flatten")]
|
#[error("flatten")]
|
||||||
FlattenError,
|
FlattenError,
|
||||||
#[error("mail parse error: {0}")]
|
#[error("mail parse error: {0}")]
|
||||||
@ -31,20 +31,10 @@ pub enum ServerError {
|
|||||||
StringError(String),
|
StringError(String),
|
||||||
#[error("invalid url: {0}")]
|
#[error("invalid url: {0}")]
|
||||||
UrlParseError(#[from] url::ParseError),
|
UrlParseError(#[from] url::ParseError),
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
#[error("tantivy error: {0}")]
|
#[error("tantivy error: {0}")]
|
||||||
TantivyError(#[from] TantivyError),
|
TantivyError(#[from] TantivyError),
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
#[error("tantivy query parse error: {0}")]
|
#[error("tantivy query parse error: {0}")]
|
||||||
QueryParseError(#[from] QueryParserError),
|
QueryParseError(#[from] QueryParserError),
|
||||||
#[error("impossible: {0}")]
|
#[error("impossible: {0}")]
|
||||||
InfaillibleError(#[from] Infallible),
|
InfaillibleError(#[from] Infallible),
|
||||||
#[error("askama error: {0}")]
|
|
||||||
AskamaError(#[from] askama::Error),
|
|
||||||
#[error("xml error: {0}")]
|
|
||||||
XmlError(#[from] quick_xml::Error),
|
|
||||||
#[error("xml encoding error: {0}")]
|
|
||||||
XmlEncodingError(#[from] quick_xml::encoding::EncodingError),
|
|
||||||
#[error("html to text error: {0}")]
|
|
||||||
Html2TextError(#[from] html2text::Error),
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,23 +1,14 @@
|
|||||||
use std::{fmt, str::FromStr};
|
|
||||||
|
|
||||||
use async_graphql::{
|
use async_graphql::{
|
||||||
connection::{self, Connection, Edge, OpaqueCursor},
|
connection::{self, Connection, Edge, OpaqueCursor},
|
||||||
futures_util::Stream,
|
Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema,
|
||||||
Context, Enum, Error, FieldResult, InputObject, Object, Schema, SimpleObject, Subscription,
|
SimpleObject, Union,
|
||||||
Union,
|
|
||||||
};
|
};
|
||||||
use cacher::FilesystemCacher;
|
use log::info;
|
||||||
use chrono::{DateTime, Utc};
|
use notmuch::Notmuch;
|
||||||
use futures::stream;
|
|
||||||
use letterbox_notmuch::Notmuch;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use tokio::join;
|
|
||||||
use tracing::{info, instrument};
|
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
use crate::{config::Config, newsreader, nm, Query};
|
||||||
use crate::tantivy::TantivyConnection;
|
|
||||||
use crate::{newsreader, nm, nm::label_unprocessed, Query};
|
|
||||||
|
|
||||||
/// # Number of seconds since the Epoch
|
/// # Number of seconds since the Epoch
|
||||||
pub type UnixTime = isize;
|
pub type UnixTime = isize;
|
||||||
@ -25,26 +16,6 @@ pub type UnixTime = isize;
|
|||||||
/// # Thread ID, sans "thread:"
|
/// # Thread ID, sans "thread:"
|
||||||
pub type ThreadId = String;
|
pub type ThreadId = String;
|
||||||
|
|
||||||
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
|
||||||
pub enum Corpus {
|
|
||||||
Notmuch,
|
|
||||||
Newsreader,
|
|
||||||
Tantivy,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Corpus {
|
|
||||||
type Err = String;
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(match s {
|
|
||||||
"notmuch" => Corpus::Notmuch,
|
|
||||||
"newsreader" => Corpus::Newsreader,
|
|
||||||
"tantivy" => Corpus::Tantivy,
|
|
||||||
s => return Err(format!("unknown corpus: '{s}'")),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: add is_read field and remove all use of 'tag:unread'
|
|
||||||
#[derive(Debug, SimpleObject)]
|
#[derive(Debug, SimpleObject)]
|
||||||
pub struct ThreadSummary {
|
pub struct ThreadSummary {
|
||||||
pub thread: ThreadId,
|
pub thread: ThreadId,
|
||||||
@ -59,7 +30,6 @@ pub struct ThreadSummary {
|
|||||||
pub authors: String,
|
pub authors: String,
|
||||||
pub subject: String,
|
pub subject: String,
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
pub corpus: Corpus,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Union)]
|
#[derive(Debug, Union)]
|
||||||
@ -97,10 +67,6 @@ pub struct Message {
|
|||||||
pub to: Vec<Email>,
|
pub to: Vec<Email>,
|
||||||
// All CC headers found in email
|
// All CC headers found in email
|
||||||
pub cc: Vec<Email>,
|
pub cc: Vec<Email>,
|
||||||
// X-Original-To header found in email
|
|
||||||
pub x_original_to: Option<Email>,
|
|
||||||
// Delivered-To header found in email
|
|
||||||
pub delivered_to: Option<Email>,
|
|
||||||
// First Subject header found in email
|
// First Subject header found in email
|
||||||
pub subject: Option<String>,
|
pub subject: Option<String>,
|
||||||
// Parsed Date header, if found and valid
|
// Parsed Date header, if found and valid
|
||||||
@ -238,43 +204,12 @@ impl Body {
|
|||||||
content_tree: "".to_string(),
|
content_tree: "".to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_html(&self) -> Option<String> {
|
|
||||||
match self {
|
|
||||||
Body::Html(h) => Some(h.html.clone()),
|
|
||||||
Body::PlainText(p) => Some(format!("<pre>{}</pre>", html_escape::encode_text(&p.text))),
|
|
||||||
Body::UnhandledContentType(u) => {
|
|
||||||
Some(format!("<pre>{}</pre>", html_escape::encode_text(&u.text)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_html_content_tree(&self) -> Option<String> {
|
|
||||||
match self {
|
|
||||||
Body::Html(h) => Some(h.content_tree.clone()),
|
|
||||||
Body::PlainText(p) => Some(p.content_tree.clone()),
|
|
||||||
Body::UnhandledContentType(u) => Some(u.content_tree.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, SimpleObject)]
|
#[derive(Debug, SimpleObject)]
|
||||||
pub struct Email {
|
pub struct Email {
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
pub addr: Option<String>,
|
pub addr: Option<String>,
|
||||||
pub photo_url: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Email {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
|
||||||
match (&self.name, &self.addr) {
|
|
||||||
(Some(name), Some(addr)) => write!(f, "{name} <{addr}>")?,
|
|
||||||
(Some(name), None) => write!(f, "{name}")?,
|
|
||||||
(None, Some(addr)) => write!(f, "{addr}")?,
|
|
||||||
(None, None) => write!(f, "<UNKNOWN>")?,
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(SimpleObject)]
|
#[derive(SimpleObject)]
|
||||||
@ -289,17 +224,6 @@ pub struct Tag {
|
|||||||
struct SearchCursor {
|
struct SearchCursor {
|
||||||
newsreader_offset: i32,
|
newsreader_offset: i32,
|
||||||
notmuch_offset: i32,
|
notmuch_offset: i32,
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
tantivy_offset: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request_id() -> String {
|
|
||||||
let now = std::time::SystemTime::now();
|
|
||||||
let nanos = now
|
|
||||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.as_nanos();
|
|
||||||
format!("{nanos:x}")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct QueryRoot;
|
pub struct QueryRoot;
|
||||||
@ -307,42 +231,20 @@ pub struct QueryRoot;
|
|||||||
impl QueryRoot {
|
impl QueryRoot {
|
||||||
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
|
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
|
||||||
build_info::build_info!(fn bi);
|
build_info::build_info!(fn bi);
|
||||||
Ok(letterbox_shared::build_version(bi))
|
Ok(shared::build_version(bi))
|
||||||
}
|
}
|
||||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
|
||||||
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
|
|
||||||
let newsreader_query: Query = query.parse()?;
|
let newsreader_query: Query = query.parse()?;
|
||||||
|
|
||||||
let newsreader_count = newsreader::count(pool, &newsreader_query).await?;
|
let newsreader_count = newsreader::count(pool, &newsreader_query).await?;
|
||||||
let notmuch_count = nm::count(nm, &newsreader_query).await?;
|
let notmuch_count = nm::count(nm, &newsreader_query.to_notmuch()).await?;
|
||||||
#[cfg(feature = "tantivy")]
|
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count}");
|
||||||
let tantivy_count = tantivy.count(&newsreader_query).await?;
|
Ok(newsreader_count + notmuch_count)
|
||||||
#[cfg(not(feature = "tantivy"))]
|
|
||||||
let tantivy_count = 0;
|
|
||||||
|
|
||||||
let total = newsreader_count + notmuch_count + tantivy_count;
|
|
||||||
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
|
|
||||||
Ok(total)
|
|
||||||
}
|
|
||||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
|
||||||
async fn catchup<'ctx>(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'ctx>,
|
|
||||||
query: String,
|
|
||||||
) -> Result<Vec<String>, Error> {
|
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
compute_catchup_ids(nm, pool, &query).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: this function doesn't get parallelism, possibly because notmuch is sync and blocks,
|
|
||||||
// rewrite that with tokio::process:Command
|
|
||||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
|
||||||
async fn search<'ctx>(
|
async fn search<'ctx>(
|
||||||
&self,
|
&self,
|
||||||
ctx: &Context<'ctx>,
|
ctx: &Context<'ctx>,
|
||||||
@ -352,12 +254,15 @@ impl QueryRoot {
|
|||||||
last: Option<i32>,
|
last: Option<i32>,
|
||||||
query: String,
|
query: String,
|
||||||
) -> Result<Connection<OpaqueCursor<SearchCursor>, ThreadSummary>, Error> {
|
) -> Result<Connection<OpaqueCursor<SearchCursor>, ThreadSummary>, Error> {
|
||||||
|
// TODO: add keywords to limit search to one corpus, i.e. is:news or is:mail
|
||||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",);
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",);
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
|
|
||||||
|
enum ThreadSummaryCursor {
|
||||||
|
Newsreader(i32, ThreadSummary),
|
||||||
|
Notmuch(i32, ThreadSummary),
|
||||||
|
}
|
||||||
Ok(connection::query(
|
Ok(connection::query(
|
||||||
after,
|
after,
|
||||||
before,
|
before,
|
||||||
@ -368,79 +273,64 @@ impl QueryRoot {
|
|||||||
first: Option<usize>,
|
first: Option<usize>,
|
||||||
last: Option<usize>| async move {
|
last: Option<usize>| async move {
|
||||||
info!(
|
info!(
|
||||||
"search(after {:?} before {:?} first {first:?} last {last:?} query: {query:?})",
|
"search({:?} {:?} {first:?} {last:?} {query:?})",
|
||||||
after.as_ref().map(|v| &v.0),
|
after.as_ref().map(|v| &v.0),
|
||||||
before.as_ref().map(|v| &v.0)
|
before.as_ref().map(|v| &v.0)
|
||||||
);
|
);
|
||||||
let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset);
|
let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset);
|
||||||
let notmuch_after = after.as_ref().map(|sc| sc.notmuch_offset);
|
let notmuch_after = after.as_ref().map(|sc| sc.notmuch_offset);
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy_after = after.as_ref().map(|sc| sc.tantivy_offset);
|
|
||||||
|
|
||||||
let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset);
|
let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset);
|
||||||
let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset);
|
let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset);
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy_before = before.as_ref().map(|sc| sc.tantivy_offset);
|
|
||||||
let first = first.map(|v| v as i32);
|
|
||||||
let last = last.map(|v| v as i32);
|
|
||||||
|
|
||||||
let query: Query = query.parse()?;
|
let newsreader_query: Query = query.parse()?;
|
||||||
info!("newsreader_query {query:?}");
|
info!("newsreader_query {newsreader_query:?}");
|
||||||
|
let newsreader_results = if newsreader_query.is_newsreader {
|
||||||
|
newsreader::search(
|
||||||
|
pool,
|
||||||
|
newsreader_after,
|
||||||
|
newsreader_before,
|
||||||
|
first.map(|v| v as i32),
|
||||||
|
last.map(|v| v as i32),
|
||||||
|
&newsreader_query,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts))
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
let newsreader_fut = newsreader_search(
|
let notmuch_results = if newsreader_query.is_notmuch {
|
||||||
pool,
|
nm::search(
|
||||||
newsreader_after,
|
nm,
|
||||||
newsreader_before,
|
notmuch_after,
|
||||||
first,
|
notmuch_before,
|
||||||
last,
|
first.map(|v| v as i32),
|
||||||
&query,
|
last.map(|v| v as i32),
|
||||||
);
|
newsreader_query.to_notmuch(),
|
||||||
let notmuch_fut =
|
)
|
||||||
notmuch_search(nm, notmuch_after, notmuch_before, first, last, &query);
|
.await?
|
||||||
#[cfg(feature = "tantivy")]
|
.into_iter()
|
||||||
let tantivy_fut = tantivy_search(
|
.map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts))
|
||||||
tantivy,
|
.collect()
|
||||||
pool,
|
} else {
|
||||||
tantivy_after,
|
Vec::new()
|
||||||
tantivy_before,
|
};
|
||||||
first,
|
|
||||||
last,
|
|
||||||
&query,
|
|
||||||
);
|
|
||||||
#[cfg(not(feature = "tantivy"))]
|
|
||||||
let tantivy_fut =
|
|
||||||
async { Ok::<Vec<ThreadSummaryCursor>, async_graphql::Error>(Vec::new()) };
|
|
||||||
|
|
||||||
let (newsreader_results, notmuch_results, tantivy_results) =
|
|
||||||
join!(newsreader_fut, notmuch_fut, tantivy_fut);
|
|
||||||
|
|
||||||
let newsreader_results = newsreader_results?;
|
|
||||||
let notmuch_results = notmuch_results?;
|
|
||||||
let tantivy_results = tantivy_results?;
|
|
||||||
info!(
|
|
||||||
"newsreader_results ({}) notmuch_results ({}) tantivy_results ({})",
|
|
||||||
newsreader_results.len(),
|
|
||||||
notmuch_results.len(),
|
|
||||||
tantivy_results.len()
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut results: Vec<_> = newsreader_results
|
let mut results: Vec<_> = newsreader_results
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(notmuch_results)
|
.chain(notmuch_results)
|
||||||
.chain(tantivy_results)
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// The leading '-' is to reverse sort
|
// The leading '-' is to reverse sort
|
||||||
results.sort_by_key(|item| match item {
|
results.sort_by_key(|item| match item {
|
||||||
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
||||||
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
ThreadSummaryCursor::Tantivy(_, ts) => -ts.timestamp,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut has_next_page = before.is_some();
|
let mut has_next_page = before.is_some();
|
||||||
if let Some(first) = first {
|
if let Some(first) = first {
|
||||||
let first = first as usize;
|
|
||||||
if results.len() > first {
|
if results.len() > first {
|
||||||
has_next_page = true;
|
has_next_page = true;
|
||||||
results.truncate(first);
|
results.truncate(first);
|
||||||
@ -449,7 +339,6 @@ impl QueryRoot {
|
|||||||
|
|
||||||
let mut has_previous_page = after.is_some();
|
let mut has_previous_page = after.is_some();
|
||||||
if let Some(last) = last {
|
if let Some(last) = last {
|
||||||
let last = last as usize;
|
|
||||||
if results.len() > last {
|
if results.len() > last {
|
||||||
has_previous_page = true;
|
has_previous_page = true;
|
||||||
results.truncate(last);
|
results.truncate(last);
|
||||||
@ -457,17 +346,8 @@ impl QueryRoot {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut connection = Connection::new(has_previous_page, has_next_page);
|
let mut connection = Connection::new(has_previous_page, has_next_page);
|
||||||
// Set starting offset as the value from cursor to preserve state if no results from a corpus survived the truncation
|
let mut newsreader_offset = 0;
|
||||||
let mut newsreader_offset =
|
let mut notmuch_offset = 0;
|
||||||
after.as_ref().map(|sc| sc.newsreader_offset).unwrap_or(0);
|
|
||||||
let mut notmuch_offset = after.as_ref().map(|sc| sc.notmuch_offset).unwrap_or(0);
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy_offset = after.as_ref().map(|sc| sc.tantivy_offset).unwrap_or(0);
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"newsreader_offset ({}) notmuch_offset ({})",
|
|
||||||
newsreader_offset, notmuch_offset,
|
|
||||||
);
|
|
||||||
|
|
||||||
connection.edges.extend(results.into_iter().map(|item| {
|
connection.edges.extend(results.into_iter().map(|item| {
|
||||||
let thread_summary;
|
let thread_summary;
|
||||||
@ -480,17 +360,10 @@ impl QueryRoot {
|
|||||||
thread_summary = ts;
|
thread_summary = ts;
|
||||||
notmuch_offset = offset;
|
notmuch_offset = offset;
|
||||||
}
|
}
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
ThreadSummaryCursor::Tantivy(offset, ts) => {
|
|
||||||
thread_summary = ts;
|
|
||||||
tantivy_offset = offset;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let cur = OpaqueCursor(SearchCursor {
|
let cur = OpaqueCursor(SearchCursor {
|
||||||
newsreader_offset,
|
newsreader_offset,
|
||||||
notmuch_offset,
|
notmuch_offset,
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
tantivy_offset,
|
|
||||||
});
|
});
|
||||||
Edge::new(cur, thread_summary)
|
Edge::new(cur, thread_summary)
|
||||||
}));
|
}));
|
||||||
@ -500,7 +373,6 @@ impl QueryRoot {
|
|||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(rid=request_id()))]
|
|
||||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
@ -509,84 +381,28 @@ impl QueryRoot {
|
|||||||
tags.append(&mut nm::tags(nm, needs_unread)?);
|
tags.append(&mut nm::tags(nm, needs_unread)?);
|
||||||
Ok(tags)
|
Ok(tags)
|
||||||
}
|
}
|
||||||
#[instrument(skip_all, fields(thread_id=thread_id, rid=request_id()))]
|
|
||||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
let config = ctx.data_unchecked::<Config>();
|
||||||
let debug_content_tree = ctx
|
let debug_content_tree = ctx
|
||||||
.look_ahead()
|
.look_ahead()
|
||||||
.field("messages")
|
.field("messages")
|
||||||
.field("body")
|
.field("body")
|
||||||
.field("contentTree")
|
.field("contentTree")
|
||||||
.exists();
|
.exists();
|
||||||
|
// TODO: look at thread_id and conditionally load newsreader
|
||||||
if newsreader::is_newsreader_thread(&thread_id) {
|
if newsreader::is_newsreader_thread(&thread_id) {
|
||||||
Ok(newsreader::thread(cacher, pool, thread_id).await?)
|
Ok(newsreader::thread(config, pool, thread_id).await?)
|
||||||
} else {
|
} else {
|
||||||
Ok(nm::thread(nm, pool, thread_id, debug_content_tree).await?)
|
Ok(nm::thread(nm, thread_id, debug_content_tree).await?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub struct Mutation;
|
||||||
enum ThreadSummaryCursor {
|
|
||||||
Newsreader(i32, ThreadSummary),
|
|
||||||
Notmuch(i32, ThreadSummary),
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
Tantivy(i32, ThreadSummary),
|
|
||||||
}
|
|
||||||
async fn newsreader_search(
|
|
||||||
pool: &PgPool,
|
|
||||||
after: Option<i32>,
|
|
||||||
before: Option<i32>,
|
|
||||||
first: Option<i32>,
|
|
||||||
last: Option<i32>,
|
|
||||||
query: &Query,
|
|
||||||
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
||||||
Ok(newsreader::search(pool, after, before, first, last, &query)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn notmuch_search(
|
|
||||||
nm: &Notmuch,
|
|
||||||
after: Option<i32>,
|
|
||||||
before: Option<i32>,
|
|
||||||
first: Option<i32>,
|
|
||||||
last: Option<i32>,
|
|
||||||
query: &Query,
|
|
||||||
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
||||||
Ok(nm::search(nm, after, before, first, last, &query)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
async fn tantivy_search(
|
|
||||||
tantivy: &TantivyConnection,
|
|
||||||
pool: &PgPool,
|
|
||||||
after: Option<i32>,
|
|
||||||
before: Option<i32>,
|
|
||||||
first: Option<i32>,
|
|
||||||
last: Option<i32>,
|
|
||||||
query: &Query,
|
|
||||||
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
||||||
Ok(tantivy
|
|
||||||
.search(pool, after, before, first, last, &query)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(cur, ts)| ThreadSummaryCursor::Tantivy(cur, ts))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct MutationRoot;
|
|
||||||
#[Object]
|
#[Object]
|
||||||
impl MutationRoot {
|
impl Mutation {
|
||||||
#[instrument(skip_all, fields(query=query, unread=unread, rid=request_id()))]
|
|
||||||
async fn set_read_status<'ctx>(
|
async fn set_read_status<'ctx>(
|
||||||
&self,
|
&self,
|
||||||
ctx: &Context<'ctx>,
|
ctx: &Context<'ctx>,
|
||||||
@ -595,17 +411,16 @@ impl MutationRoot {
|
|||||||
) -> Result<bool, Error> {
|
) -> Result<bool, Error> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
|
|
||||||
let query: Query = query.parse()?;
|
for q in query.split_whitespace() {
|
||||||
newsreader::set_read_status(pool, &query, unread).await?;
|
if newsreader::is_newsreader_thread(&q) {
|
||||||
#[cfg(feature = "tantivy")]
|
newsreader::set_read_status(pool, &q, unread).await?;
|
||||||
tantivy.reindex_thread(pool, &query).await?;
|
} else {
|
||||||
nm::set_read_status(nm, &query, unread).await?;
|
nm::set_read_status(nm, q, unread).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
|
||||||
async fn tag_add<'ctx>(
|
async fn tag_add<'ctx>(
|
||||||
&self,
|
&self,
|
||||||
ctx: &Context<'ctx>,
|
ctx: &Context<'ctx>,
|
||||||
@ -617,7 +432,6 @@ impl MutationRoot {
|
|||||||
nm.tag_add(&tag, &query)?;
|
nm.tag_add(&tag, &query)?;
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
|
||||||
async fn tag_remove<'ctx>(
|
async fn tag_remove<'ctx>(
|
||||||
&self,
|
&self,
|
||||||
ctx: &Context<'ctx>,
|
ctx: &Context<'ctx>,
|
||||||
@ -629,161 +443,6 @@ impl MutationRoot {
|
|||||||
nm.tag_remove(&tag, &query)?;
|
nm.tag_remove(&tag, &query)?;
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
#[instrument(skip_all, fields(query=query, wake_time=wake_time.to_string(), rid=request_id()))]
|
|
||||||
async fn snooze<'ctx>(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'ctx>,
|
|
||||||
query: String,
|
|
||||||
wake_time: DateTime<Utc>,
|
|
||||||
) -> Result<bool, Error> {
|
|
||||||
info!("TODO snooze {query} until {wake_time})");
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
sqlx::query!(
|
|
||||||
r#"
|
|
||||||
INSERT INTO snooze (message_id, wake)
|
|
||||||
VALUES ($1, $2)
|
|
||||||
ON CONFLICT (message_id) DO UPDATE
|
|
||||||
SET wake = $2
|
|
||||||
"#,
|
|
||||||
query,
|
|
||||||
wake_time
|
|
||||||
)
|
|
||||||
.execute(pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
|
|
||||||
let unread = false;
|
|
||||||
let query: Query = query.parse()?;
|
|
||||||
newsreader::set_read_status(pool, &query, unread).await?;
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
tantivy.reindex_thread(pool, &query).await?;
|
|
||||||
nm::set_read_status(nm, &query, unread).await?;
|
|
||||||
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
/// Drop and recreate tantivy index. Warning this is slow
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
|
|
||||||
tantivy.drop_and_load_index()?;
|
|
||||||
tantivy.reindex_all(pool).await?;
|
|
||||||
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
#[instrument(skip_all, fields(rid=request_id()))]
|
|
||||||
async fn label_unprocessed<'ctx>(
|
|
||||||
&self,
|
|
||||||
ctx: &Context<'ctx>,
|
|
||||||
limit: Option<usize>,
|
|
||||||
) -> Result<bool, Error> {
|
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await?;
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(rid=request_id()))]
|
|
||||||
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
|
||||||
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
|
||||||
let pool = ctx.data_unchecked::<PgPool>();
|
|
||||||
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
|
||||||
newsreader::refresh(pool, cacher).await?;
|
|
||||||
|
|
||||||
// Process email labels
|
|
||||||
label_unprocessed(&nm, &pool, false, Some(1000), "tag:unprocessed").await?;
|
|
||||||
|
|
||||||
// Look for snoozed messages and mark unread
|
|
||||||
wakeup(&nm, &pool).await?;
|
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
{
|
|
||||||
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
||||||
// TODO: parallelize
|
|
||||||
tantivy.refresh(pool).await?;
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SubscriptionRoot;
|
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;
|
||||||
#[Subscription]
|
|
||||||
impl SubscriptionRoot {
|
|
||||||
async fn values(&self, _ctx: &Context<'_>) -> Result<impl Stream<Item = usize>, Error> {
|
|
||||||
Ok(stream::iter(0..10))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
|
||||||
|
|
||||||
#[instrument(name = "wakeup", skip_all)]
|
|
||||||
pub async fn wakeup(nm: &Notmuch, pool: &PgPool) -> Result<(), Error> {
|
|
||||||
for row in sqlx::query!(
|
|
||||||
r#"
|
|
||||||
SELECT id, message_id
|
|
||||||
FROM snooze
|
|
||||||
WHERE wake < NOW();
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let query: Query = row.message_id.parse()?;
|
|
||||||
info!("need to wake {query}");
|
|
||||||
let unread = true;
|
|
||||||
newsreader::set_read_status(pool, &query, unread).await?;
|
|
||||||
#[cfg(feature = "tantivy")]
|
|
||||||
tantivy.reindex_thread(pool, &query).await?;
|
|
||||||
nm::set_read_status(nm, &query, unread).await?;
|
|
||||||
|
|
||||||
sqlx::query!("DELETE FROM snooze WHERE id = $1", row.id)
|
|
||||||
.execute(pool)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, fields(query=query))]
|
|
||||||
pub async fn compute_catchup_ids(
|
|
||||||
nm: &Notmuch,
|
|
||||||
pool: &PgPool,
|
|
||||||
query: &str,
|
|
||||||
) -> Result<Vec<String>, Error> {
|
|
||||||
let query: Query = query.parse()?;
|
|
||||||
// TODO: implement optimized versions of fetching just IDs
|
|
||||||
let newsreader_fut = newsreader_search(pool, None, None, None, None, &query);
|
|
||||||
let notmuch_fut = notmuch_search(nm, None, None, None, None, &query);
|
|
||||||
let (newsreader_results, notmuch_results) = join!(newsreader_fut, notmuch_fut);
|
|
||||||
|
|
||||||
let newsreader_results = newsreader_results?;
|
|
||||||
let notmuch_results = notmuch_results?;
|
|
||||||
info!(
|
|
||||||
"newsreader_results ({}) notmuch_results ({})",
|
|
||||||
newsreader_results.len(),
|
|
||||||
notmuch_results.len(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut results: Vec<_> = newsreader_results
|
|
||||||
.into_iter()
|
|
||||||
.chain(notmuch_results)
|
|
||||||
.collect();
|
|
||||||
// The leading '-' is to reverse sort
|
|
||||||
results.sort_by_key(|item| match item {
|
|
||||||
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
|
||||||
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
|
||||||
});
|
|
||||||
let ids = results
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| match r {
|
|
||||||
ThreadSummaryCursor::Newsreader(_, ts) => ts.thread,
|
|
||||||
ThreadSummaryCursor::Notmuch(_, ts) => ts.thread,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(ids)
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,49 +1,31 @@
|
|||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod email_extract;
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod graphql;
|
pub mod graphql;
|
||||||
pub mod newsreader;
|
pub mod newsreader;
|
||||||
pub mod nm;
|
pub mod nm;
|
||||||
pub mod ws;
|
|
||||||
|
|
||||||
#[cfg(feature = "tantivy")]
|
use std::{collections::HashMap, convert::Infallible, str::FromStr, sync::Arc};
|
||||||
pub mod tantivy;
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
convert::Infallible,
|
|
||||||
fmt,
|
|
||||||
str::FromStr,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use cacher::{Cacher, FilesystemCacher};
|
use cacher::{Cacher, FilesystemCacher};
|
||||||
use chrono::NaiveDateTime;
|
|
||||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||||
pub use error::ServerError;
|
|
||||||
use linkify::{LinkFinder, LinkKind};
|
use linkify::{LinkFinder, LinkKind};
|
||||||
|
use log::{error, info, warn};
|
||||||
use lol_html::{
|
use lol_html::{
|
||||||
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
|
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
|
||||||
RewriteStrSettings,
|
RewriteStrSettings,
|
||||||
};
|
};
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
use regex::Regex;
|
|
||||||
use reqwest::StatusCode;
|
|
||||||
use scraper::{Html, Selector};
|
use scraper::{Html, Selector};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tracing::{debug, error, info, warn};
|
use tokio::sync::Mutex;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
graphql::{Corpus, ThreadSummary},
|
config::DeSelector,
|
||||||
newsreader::is_newsreader_thread,
|
newsreader::{extract_thread_id, is_newsreader_thread},
|
||||||
nm::is_notmuch_thread_or_id,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const NEWSREADER_TAG_PREFIX: &'static str = "News/";
|
|
||||||
const NEWSREADER_THREAD_PREFIX: &'static str = "news:";
|
|
||||||
|
|
||||||
// TODO: figure out how to use Cow
|
// TODO: figure out how to use Cow
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
trait Transformer: Send + Sync {
|
trait Transformer: Send + Sync {
|
||||||
@ -67,8 +49,6 @@ pub enum TransformError {
|
|||||||
ReqwestError(#[from] reqwest::Error),
|
ReqwestError(#[from] reqwest::Error),
|
||||||
#[error("failed to parse HTML: {0}")]
|
#[error("failed to parse HTML: {0}")]
|
||||||
HtmlParsingError(String),
|
HtmlParsingError(String),
|
||||||
#[error("got a retryable error code {0} for {1}")]
|
|
||||||
RetryableHttpStatusError(StatusCode, String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SanitizeHtml<'a> {
|
struct SanitizeHtml<'a> {
|
||||||
@ -99,44 +79,23 @@ struct StripHtml;
|
|||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl Transformer for StripHtml {
|
impl Transformer for StripHtml {
|
||||||
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
|
||||||
debug!("StripHtml should_run {link:?} {}", html.contains("<"));
|
|
||||||
// Lame test
|
// Lame test
|
||||||
html.contains("<")
|
html.contains("<")
|
||||||
}
|
}
|
||||||
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
debug!("StripHtml {link:?}");
|
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
let element_content_handlers = vec![
|
let element_content_handlers = vec![text!("*", |t| {
|
||||||
element!("style", |el| {
|
text += t.as_str();
|
||||||
el.remove();
|
Ok(())
|
||||||
Ok(())
|
})];
|
||||||
}),
|
let _ = rewrite_str(
|
||||||
element!("script", |el| {
|
|
||||||
el.remove();
|
|
||||||
Ok(())
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
let html = rewrite_str(
|
|
||||||
html,
|
html,
|
||||||
RewriteStrSettings {
|
RewriteStrSettings {
|
||||||
element_content_handlers,
|
element_content_handlers,
|
||||||
..RewriteStrSettings::default()
|
..RewriteStrSettings::default()
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
let element_content_handlers = vec![text!("*", |t| {
|
|
||||||
text += t.as_str();
|
|
||||||
Ok(())
|
|
||||||
})];
|
|
||||||
let _ = rewrite_str(
|
|
||||||
&html,
|
|
||||||
RewriteStrSettings {
|
|
||||||
element_content_handlers,
|
|
||||||
..RewriteStrSettings::default()
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
let re = Regex::new(r"\s+").expect("failed to parse regex");
|
|
||||||
let text = re.replace_all(&text, " ").to_string();
|
|
||||||
|
|
||||||
Ok(text)
|
Ok(text)
|
||||||
}
|
}
|
||||||
@ -150,10 +109,10 @@ impl Transformer for InlineStyle {
|
|||||||
let css = concat!(
|
let css = concat!(
|
||||||
"/* chrome-default.css */\n",
|
"/* chrome-default.css */\n",
|
||||||
include_str!("chrome-default.css"),
|
include_str!("chrome-default.css"),
|
||||||
//"\n/* mvp.css */\n",
|
"\n/* mvp.css */\n",
|
||||||
//include_str!("mvp.css"),
|
include_str!("mvp.css"),
|
||||||
//"\n/* Xinu Specific overrides */\n",
|
"\n/* Xinu Specific overrides */\n",
|
||||||
//include_str!("custom.css"),
|
include_str!("custom.css"),
|
||||||
);
|
);
|
||||||
let inline_opts = InlineOptions {
|
let inline_opts = InlineOptions {
|
||||||
inline_style_tags: true,
|
inline_style_tags: true,
|
||||||
@ -259,14 +218,13 @@ impl Transformer for AddOutlink {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SlurpContents<'c> {
|
struct SlurpContents<'h> {
|
||||||
cacher: &'c FilesystemCacher,
|
cacher: Arc<Mutex<FilesystemCacher>>,
|
||||||
inline_css: bool,
|
site_selectors: &'h HashMap<String, Vec<DeSelector>>,
|
||||||
site_selectors: HashMap<String, Vec<Selector>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'c> SlurpContents<'c> {
|
impl<'h> SlurpContents<'h> {
|
||||||
fn get_selectors(&self, link: &Url) -> Option<&[Selector]> {
|
fn get_selectors(&self, link: &Url) -> Option<&[DeSelector]> {
|
||||||
for (host, selector) in self.site_selectors.iter() {
|
for (host, selector) in self.site_selectors.iter() {
|
||||||
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
|
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
|
||||||
return Some(&selector);
|
return Some(&selector);
|
||||||
@ -277,117 +235,40 @@ impl<'c> SlurpContents<'c> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl<'c> Transformer for SlurpContents<'c> {
|
impl<'h> Transformer for SlurpContents<'h> {
|
||||||
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
fn should_run(&self, link: &Option<Url>, _: &str) -> bool {
|
||||||
debug!("SlurpContents should_run {link:?}");
|
|
||||||
let mut will_slurp = false;
|
|
||||||
if let Some(link) = link {
|
if let Some(link) = link {
|
||||||
will_slurp = self.get_selectors(link).is_some();
|
return self.get_selectors(link).is_some();
|
||||||
}
|
}
|
||||||
if !will_slurp && self.inline_css {
|
false
|
||||||
return InlineStyle {}.should_run(link, html);
|
|
||||||
}
|
|
||||||
will_slurp
|
|
||||||
}
|
}
|
||||||
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
debug!("SlurpContents {link:?}");
|
|
||||||
let retryable_status: HashSet<StatusCode> = vec![
|
|
||||||
StatusCode::UNAUTHORIZED,
|
|
||||||
StatusCode::FORBIDDEN,
|
|
||||||
StatusCode::REQUEST_TIMEOUT,
|
|
||||||
StatusCode::TOO_MANY_REQUESTS,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.collect();
|
|
||||||
if let Some(test_link) = link {
|
|
||||||
// If SlurpContents is configured for inline CSS, but no
|
|
||||||
// configuration found for this site, use the local InlineStyle
|
|
||||||
// transform.
|
|
||||||
if self.inline_css && self.get_selectors(test_link).is_none() {
|
|
||||||
debug!("local inline CSS for {link:?}");
|
|
||||||
return InlineStyle {}.transform(link, html).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let Some(link) = link else {
|
let Some(link) = link else {
|
||||||
return Ok(html.to_string());
|
return Ok(html.to_string());
|
||||||
};
|
};
|
||||||
let Some(selectors) = self.get_selectors(&link) else {
|
let Some(selectors) = self.get_selectors(&link) else {
|
||||||
return Ok(html.to_string());
|
return Ok(html.to_string());
|
||||||
};
|
};
|
||||||
let cacher = self.cacher;
|
let mut cacher = self.cacher.lock().await;
|
||||||
let body = if let Some(body) = cacher.get(link.as_str()) {
|
let body = if let Some(body) = cacher.get(link.as_str()) {
|
||||||
|
info!("cache hit for {link}");
|
||||||
String::from_utf8_lossy(&body).to_string()
|
String::from_utf8_lossy(&body).to_string()
|
||||||
} else {
|
} else {
|
||||||
let resp = reqwest::get(link.as_str()).await?;
|
let body = reqwest::get(link.as_str()).await?.text().await?;
|
||||||
let status = resp.status();
|
|
||||||
if status.is_server_error() {
|
|
||||||
error!("status error for {link}: {status}");
|
|
||||||
return Ok(html.to_string());
|
|
||||||
}
|
|
||||||
if retryable_status.contains(&status) {
|
|
||||||
error!("retryable error for {link}: {status}");
|
|
||||||
return Ok(html.to_string());
|
|
||||||
}
|
|
||||||
if !status.is_success() {
|
|
||||||
error!("unsuccessful for {link}: {status}");
|
|
||||||
return Ok(html.to_string());
|
|
||||||
}
|
|
||||||
let body = resp.text().await?;
|
|
||||||
cacher.set(link.as_str(), body.as_bytes());
|
cacher.set(link.as_str(), body.as_bytes());
|
||||||
body
|
body
|
||||||
};
|
};
|
||||||
let body = Arc::new(body);
|
|
||||||
let base_url = Some(link.clone());
|
|
||||||
let body = if self.inline_css {
|
|
||||||
debug!("inlining CSS for {link}");
|
|
||||||
let inner_body = Arc::clone(&body);
|
|
||||||
let res = tokio::task::spawn_blocking(move || {
|
|
||||||
let css = concat!(
|
|
||||||
"/* chrome-default.css */\n",
|
|
||||||
include_str!("chrome-default.css"),
|
|
||||||
"\n/* vars.css */\n",
|
|
||||||
include_str!("../static/vars.css"),
|
|
||||||
//"\n/* Xinu Specific overrides */\n",
|
|
||||||
//include_str!("custom.css"),
|
|
||||||
);
|
|
||||||
let res = CSSInliner::options()
|
|
||||||
.base_url(base_url)
|
|
||||||
.extra_css(Some(std::borrow::Cow::Borrowed(css)))
|
|
||||||
.build()
|
|
||||||
.inline(&inner_body);
|
|
||||||
|
|
||||||
match res {
|
|
||||||
Ok(inlined_html) => inlined_html,
|
|
||||||
Err(err) => {
|
|
||||||
error!("failed to inline remote CSS: {err}");
|
|
||||||
Arc::into_inner(inner_body).expect("failed to take body out of Arc")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
match res {
|
|
||||||
Ok(inlined_html) => inlined_html,
|
|
||||||
Err(err) => {
|
|
||||||
error!("failed to spawn inline remote CSS: {err}");
|
|
||||||
Arc::into_inner(body).expect("failed to take body out of Arc")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
debug!("using body as-is for {link:?}");
|
|
||||||
Arc::into_inner(body).expect("failed to take body out of Arc")
|
|
||||||
};
|
|
||||||
|
|
||||||
let doc = Html::parse_document(&body);
|
let doc = Html::parse_document(&body);
|
||||||
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
for selector in selectors {
|
for selector in selectors {
|
||||||
for frag in doc.select(&selector) {
|
for frag in doc.select(&selector.0) {
|
||||||
results.push(frag.html())
|
results.push(frag.html())
|
||||||
// TODO: figure out how to warn if there were no hits
|
// TODO: figure out how to warn if there were no hits
|
||||||
//warn!("couldn't find '{:?}' in {}", selector, link);
|
//warn!("couldn't find '{:?}' in {}", selector, link);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(results.join("<br>"))
|
Ok(results.join(""))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -443,34 +324,6 @@ pub fn sanitize_html(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut element_content_handlers = vec![
|
let mut element_content_handlers = vec![
|
||||||
// Remove width and height attributes on elements
|
|
||||||
element!("[width],[height]", |el| {
|
|
||||||
el.remove_attribute("width");
|
|
||||||
el.remove_attribute("height");
|
|
||||||
Ok(())
|
|
||||||
}),
|
|
||||||
// Remove width and height values from inline styles
|
|
||||||
element!("[style]", |el| {
|
|
||||||
let style = el.get_attribute("style").unwrap();
|
|
||||||
let style = style
|
|
||||||
.split(";")
|
|
||||||
.filter(|s| {
|
|
||||||
let Some((k, _)) = s.split_once(':') else {
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
match k {
|
|
||||||
"width" | "max-width" | "min-width" | "height" | "max-height"
|
|
||||||
| "min-height" => false,
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(";");
|
|
||||||
if let Err(e) = el.set_attribute("style", &style) {
|
|
||||||
error!("Failed to set style attribute: {e}");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}),
|
|
||||||
// Open links in new tab
|
// Open links in new tab
|
||||||
element!("a[href]", |el| {
|
element!("a[href]", |el| {
|
||||||
el.set_attribute("target", "_blank").unwrap();
|
el.set_attribute("target", "_blank").unwrap();
|
||||||
@ -715,7 +568,7 @@ fn compute_offset_limit(
|
|||||||
first: Option<i32>,
|
first: Option<i32>,
|
||||||
last: Option<i32>,
|
last: Option<i32>,
|
||||||
) -> (i32, i32) {
|
) -> (i32, i32) {
|
||||||
let default_page_size = 10000;
|
let default_page_size = 100;
|
||||||
match (after, before, first, last) {
|
match (after, before, first, last) {
|
||||||
// Reasonable defaults
|
// Reasonable defaults
|
||||||
(None, None, None, None) => (0, default_page_size),
|
(None, None, None, None) => (0, default_page_size),
|
||||||
@ -745,51 +598,14 @@ fn compute_offset_limit(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug)]
|
||||||
pub struct Query {
|
pub struct Query {
|
||||||
pub unread_only: bool,
|
pub unread_only: bool,
|
||||||
pub tags: Vec<String>,
|
pub tag: Option<String>,
|
||||||
pub uids: Vec<String>,
|
pub uid: Option<String>,
|
||||||
pub remainder: Vec<String>,
|
pub remainder: Vec<String>,
|
||||||
pub is_notmuch: bool,
|
pub is_notmuch: bool,
|
||||||
pub is_newsreader: bool,
|
pub is_newsreader: bool,
|
||||||
pub is_tantivy: bool,
|
|
||||||
pub is_snoozed: bool,
|
|
||||||
pub corpus: Option<Corpus>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Query {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
|
||||||
if self.unread_only {
|
|
||||||
write!(f, "is:unread ")?;
|
|
||||||
}
|
|
||||||
for tag in &self.tags {
|
|
||||||
write!(f, "tag:{tag} ")?;
|
|
||||||
}
|
|
||||||
for uid in &self.uids {
|
|
||||||
write!(f, "id:{uid} ")?;
|
|
||||||
}
|
|
||||||
if self.is_notmuch {
|
|
||||||
write!(f, "is:mail ")?;
|
|
||||||
}
|
|
||||||
if self.is_newsreader {
|
|
||||||
write!(f, "is:newsreader ")?;
|
|
||||||
}
|
|
||||||
if self.is_newsreader {
|
|
||||||
write!(f, "is:news ")?;
|
|
||||||
}
|
|
||||||
if self.is_snoozed {
|
|
||||||
write!(f, "is:snoozed ")?;
|
|
||||||
}
|
|
||||||
match self.corpus {
|
|
||||||
Some(c) => write!(f, "corpus:{c:?}")?,
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
for rem in &self.remainder {
|
|
||||||
write!(f, "{rem} ")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
@ -804,25 +620,13 @@ impl Query {
|
|||||||
if self.unread_only {
|
if self.unread_only {
|
||||||
parts.push("is:unread".to_string());
|
parts.push("is:unread".to_string());
|
||||||
}
|
}
|
||||||
for tag in &self.tags {
|
if let Some(site) = &self.tag {
|
||||||
parts.push(format!("tag:{tag}"));
|
parts.push(format!("tag:{site}"));
|
||||||
}
|
}
|
||||||
for uid in &self.uids {
|
if let Some(uid) = &self.uid {
|
||||||
parts.push(uid.clone());
|
parts.push(uid.clone());
|
||||||
}
|
}
|
||||||
for r in &self.remainder {
|
parts.extend(self.remainder.clone());
|
||||||
// Rewrite "to:" to include ExtraTo:. ExtraTo: is configured in
|
|
||||||
// notmuch-config to index Delivered-To and X-Original-To headers.
|
|
||||||
if r.starts_with("to:") {
|
|
||||||
parts.push("(".to_string());
|
|
||||||
parts.push(r.to_string());
|
|
||||||
parts.push("OR".to_string());
|
|
||||||
parts.push(r.replace("to:", "ExtraTo:"));
|
|
||||||
parts.push(")".to_string());
|
|
||||||
} else {
|
|
||||||
parts.push(r.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
parts.join(" ")
|
parts.join(" ")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -831,137 +635,44 @@ impl FromStr for Query {
|
|||||||
type Err = Infallible;
|
type Err = Infallible;
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let mut unread_only = false;
|
let mut unread_only = false;
|
||||||
let mut tags = Vec::new();
|
let mut tag = None;
|
||||||
let mut uids = Vec::new();
|
let mut uid = None;
|
||||||
let mut remainder = Vec::new();
|
let mut remainder = Vec::new();
|
||||||
let mut is_notmuch = false;
|
let mut is_notmuch = false;
|
||||||
let mut is_newsreader = false;
|
let mut is_newsreader = false;
|
||||||
let mut is_tantivy = false;
|
|
||||||
let mut is_snoozed = false;
|
|
||||||
let mut corpus = None;
|
|
||||||
for word in s.split_whitespace() {
|
for word in s.split_whitespace() {
|
||||||
if word == "is:unread" {
|
if word == "is:unread" {
|
||||||
unread_only = true
|
unread_only = true
|
||||||
} else if word.starts_with("tag:") {
|
} else if word.starts_with("tag:") {
|
||||||
let t = &word["tag:".len()..];
|
tag = Some(word["tag:".len()..].to_string())
|
||||||
// Per-address emails are faked as `tag:@<domain>/<username>`, rewrite to `to:` form
|
|
||||||
if t.starts_with('@') && t.contains('.') {
|
|
||||||
let t = match t.split_once('/') {
|
|
||||||
None => format!("to:{t}"),
|
|
||||||
Some((domain, user)) => format!("to:{user}{domain}"),
|
|
||||||
};
|
|
||||||
remainder.push(t);
|
|
||||||
} else {
|
|
||||||
tags.push(t.to_string());
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
} else if word.starts_with("tag:") {
|
} else if word.starts_with("tag:") {
|
||||||
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
|
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
|
||||||
// database
|
// database
|
||||||
site = Some(NON_EXISTENT_SITE_NAME.to_string());
|
site = Some(NON_EXISTENT_SITE_NAME.to_string());
|
||||||
*/
|
*/
|
||||||
} else if word.starts_with("corpus:") {
|
|
||||||
let c = word["corpus:".len()..].to_string();
|
|
||||||
corpus = c.parse::<Corpus>().map(|c| Some(c)).unwrap_or_else(|e| {
|
|
||||||
warn!("Error parsing corpus '{c}': {e:?}");
|
|
||||||
None
|
|
||||||
});
|
|
||||||
} else if is_newsreader_thread(word) {
|
} else if is_newsreader_thread(word) {
|
||||||
uids.push(word.to_string());
|
uid = Some(extract_thread_id(word).to_string())
|
||||||
} else if is_notmuch_thread_or_id(word) {
|
|
||||||
uids.push(word.to_string());
|
|
||||||
} else if word == "is:mail" || word == "is:email" || word == "is:notmuch" {
|
} else if word == "is:mail" || word == "is:email" || word == "is:notmuch" {
|
||||||
is_notmuch = true;
|
is_notmuch = true;
|
||||||
} else if word == "is:news" {
|
} else if word == "is:news" || word == "is:newsreader" {
|
||||||
is_newsreader = true;
|
is_newsreader = true;
|
||||||
} else if word == "is:newsreader" {
|
|
||||||
is_newsreader = true;
|
|
||||||
} else if word == "is:snoozed" {
|
|
||||||
is_snoozed = true;
|
|
||||||
} else {
|
} else {
|
||||||
remainder.push(word.to_string());
|
remainder.push(word.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If we don't see any explicit filters for a corpus, flip them all on
|
// If we don't see any explicit filters for a corpus, flip them all on
|
||||||
if corpus.is_none() && !(is_notmuch || is_tantivy || is_newsreader) {
|
if !(is_notmuch || is_newsreader) {
|
||||||
is_notmuch = true;
|
|
||||||
is_newsreader = true;
|
is_newsreader = true;
|
||||||
is_tantivy = true;
|
is_notmuch = true;
|
||||||
}
|
}
|
||||||
Ok(Query {
|
Ok(Query {
|
||||||
unread_only,
|
unread_only,
|
||||||
tags,
|
tag,
|
||||||
uids,
|
uid,
|
||||||
remainder,
|
remainder,
|
||||||
is_notmuch,
|
is_notmuch,
|
||||||
is_newsreader,
|
is_newsreader,
|
||||||
is_tantivy,
|
|
||||||
is_snoozed,
|
|
||||||
corpus,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub struct ThreadSummaryRecord {
|
|
||||||
pub site: Option<String>,
|
|
||||||
pub date: Option<NaiveDateTime>,
|
|
||||||
pub is_read: Option<bool>,
|
|
||||||
pub title: Option<String>,
|
|
||||||
pub uid: String,
|
|
||||||
pub name: Option<String>,
|
|
||||||
pub corpus: Corpus,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
|
|
||||||
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
|
|
||||||
let mut tags = vec![format!("{NEWSREADER_TAG_PREFIX}{site}")];
|
|
||||||
if !r.is_read.unwrap_or(true) {
|
|
||||||
tags.push("unread".to_string());
|
|
||||||
};
|
|
||||||
let mut title = r.title.unwrap_or("NO TITLE".to_string());
|
|
||||||
title = clean_title(&title).await.expect("failed to clean title");
|
|
||||||
ThreadSummary {
|
|
||||||
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
|
|
||||||
timestamp: r.date.expect("post missing date").and_utc().timestamp() as isize,
|
|
||||||
date_relative: format!("{:?}", r.date),
|
|
||||||
//date_relative: "TODO date_relative".to_string(),
|
|
||||||
matched: 0,
|
|
||||||
total: 1,
|
|
||||||
authors: r.name.unwrap_or_else(|| site.clone()),
|
|
||||||
subject: title,
|
|
||||||
tags,
|
|
||||||
corpus: r.corpus,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async fn clean_title(title: &str) -> Result<String, ServerError> {
|
|
||||||
// Make title HTML so html parsers work
|
|
||||||
let mut title = format!("<html>{title}</html>");
|
|
||||||
let title_tranformers: Vec<Box<dyn Transformer>> =
|
|
||||||
vec![Box::new(EscapeHtml), Box::new(StripHtml)];
|
|
||||||
// Make title HTML so html parsers work
|
|
||||||
title = format!("<html>{title}</html>");
|
|
||||||
for t in title_tranformers.iter() {
|
|
||||||
if t.should_run(&None, &title) {
|
|
||||||
title = t.transform(&None, &title).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(title)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::{SanitizeHtml, Transformer};
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn strip_sizes() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
let ss = SanitizeHtml {
|
|
||||||
cid_prefix: "",
|
|
||||||
base_url: &None,
|
|
||||||
};
|
|
||||||
let input = r#"<p width=16 height=16 style="color:blue;width:16px;height:16px;">This el has width and height attributes and inline styles</p>"#;
|
|
||||||
let want = r#"<p style="color:blue;">This el has width and height attributes and inline styles</p>"#;
|
|
||||||
let got = ss.transform(&None, input).await?;
|
|
||||||
assert_eq!(got, want);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,82 +1,59 @@
|
|||||||
use std::collections::HashMap;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use cacher::FilesystemCacher;
|
use cacher::FilesystemCacher;
|
||||||
use futures::{stream::FuturesUnordered, StreamExt};
|
use log::info;
|
||||||
use letterbox_shared::compute_color;
|
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use scraper::Selector;
|
use scraper::Selector;
|
||||||
|
use shared::compute_color;
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use tracing::{error, info, instrument, warn};
|
use tokio::sync::Mutex;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
clean_title, compute_offset_limit,
|
compute_offset_limit,
|
||||||
|
config::Config,
|
||||||
error::ServerError,
|
error::ServerError,
|
||||||
graphql::{Corpus, NewsPost, Tag, Thread, ThreadSummary},
|
graphql::{NewsPost, Tag, Thread, ThreadSummary},
|
||||||
thread_summary_from_row, AddOutlink, FrameImages, Query, SanitizeHtml, SlurpContents,
|
AddOutlink, EscapeHtml, FrameImages, InlineStyle, Query, SanitizeHtml, SlurpContents,
|
||||||
StripHtml, ThreadSummaryRecord, Transformer, NEWSREADER_TAG_PREFIX, NEWSREADER_THREAD_PREFIX,
|
StripHtml, Transformer,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn is_newsreader_query(query: &Query) -> bool {
|
const TAG_PREFIX: &'static str = "News/";
|
||||||
query.is_newsreader || query.corpus == Some(Corpus::Newsreader)
|
const THREAD_PREFIX: &'static str = "news:";
|
||||||
|
|
||||||
|
pub fn is_newsreader_search(query: &str) -> bool {
|
||||||
|
query.contains(TAG_PREFIX)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_newsreader_thread(query: &str) -> bool {
|
pub fn is_newsreader_thread(query: &str) -> bool {
|
||||||
query.starts_with(NEWSREADER_THREAD_PREFIX)
|
query.starts_with(THREAD_PREFIX)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_thread_id(query: &str) -> &str {
|
pub fn extract_thread_id(query: &str) -> &str {
|
||||||
if query.starts_with(NEWSREADER_THREAD_PREFIX) {
|
&query[THREAD_PREFIX.len()..]
|
||||||
&query[NEWSREADER_THREAD_PREFIX.len()..]
|
|
||||||
} else {
|
|
||||||
query
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_site(tag: &str) -> &str {
|
pub fn extract_site(tag: &str) -> &str {
|
||||||
&tag[NEWSREADER_TAG_PREFIX.len()..]
|
&tag[TAG_PREFIX.len()..]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_news_tag(tag: &str) -> String {
|
pub fn make_news_tag(tag: &str) -> String {
|
||||||
format!("tag:{NEWSREADER_TAG_PREFIX}{tag}")
|
format!("tag:{TAG_PREFIX}{tag}")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn site_from_tags(tags: &[String]) -> Option<String> {
|
|
||||||
for t in tags {
|
|
||||||
if t.starts_with(NEWSREADER_TAG_PREFIX) {
|
|
||||||
return Some(extract_site(t).to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(name = "newsreader::count", skip_all, fields(query=%query))]
|
|
||||||
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
|
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
|
||||||
if !is_newsreader_query(query) {
|
if !query.remainder.is_empty() {
|
||||||
return Ok(0);
|
// TODO: handle full text search against all sites, for now, early return if search words
|
||||||
}
|
// are specified.
|
||||||
let site = site_from_tags(&query.tags);
|
|
||||||
if !query.tags.is_empty() && site.is_none() {
|
|
||||||
// Newsreader can only handle all sites read/unread queries, anything with a non-site tag
|
|
||||||
// isn't supported
|
|
||||||
return Ok(0);
|
return Ok(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
let search_term = query.remainder.join(" ");
|
let row = sqlx::query_file!("sql/count.sql", query.tag, query.unread_only)
|
||||||
let search_term = search_term.trim();
|
|
||||||
let search_term = if search_term.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(search_term)
|
|
||||||
};
|
|
||||||
// TODO: add support for looking for search_term in title and site
|
|
||||||
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only, search_term)
|
|
||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(name = "newsreader::search", skip_all, fields(query=%query))]
|
|
||||||
pub async fn search(
|
pub async fn search(
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
after: Option<i32>,
|
after: Option<i32>,
|
||||||
@ -86,17 +63,9 @@ pub async fn search(
|
|||||||
query: &Query,
|
query: &Query,
|
||||||
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
||||||
if query.is_snoozed {
|
if !query.remainder.is_empty() {
|
||||||
warn!("TODO implement snooze for newsreader::search");
|
// TODO: handle full text search against all sites, for now, early return if search words
|
||||||
return Ok(Vec::new());
|
// are specified.
|
||||||
}
|
|
||||||
if !is_newsreader_query(query) {
|
|
||||||
return Ok(Vec::new());
|
|
||||||
}
|
|
||||||
let site = site_from_tags(&query.tags);
|
|
||||||
if !query.tags.is_empty() && site.is_none() {
|
|
||||||
// Newsreader can only handle all sites read/unread queries, anything with a non-site tag
|
|
||||||
// isn't supported
|
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,48 +77,53 @@ pub async fn search(
|
|||||||
limit = limit + 1;
|
limit = limit + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let site = query.tag.as_ref().map(|t| extract_site(&t).to_string());
|
||||||
info!(
|
info!(
|
||||||
"search offset {offset} limit {limit} site {site:?} unread_only {}",
|
"search offset {offset} limit {limit} site {site:?} unread_only {}",
|
||||||
query.unread_only
|
query.unread_only
|
||||||
);
|
);
|
||||||
let search_term = query.remainder.join(" ");
|
|
||||||
let search_term = search_term.trim();
|
|
||||||
let search_term = if search_term.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(search_term)
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: add support for looking for search_term in title and site
|
// TODO: further limit results to include query.remainder if set
|
||||||
let rows = sqlx::query_file!(
|
let rows = sqlx::query_file!(
|
||||||
"sql/threads.sql",
|
"sql/threads.sql",
|
||||||
site,
|
site,
|
||||||
query.unread_only,
|
query.unread_only,
|
||||||
offset as i64,
|
offset as i64,
|
||||||
limit as i64,
|
limit as i64
|
||||||
search_term
|
|
||||||
)
|
)
|
||||||
.fetch_all(pool)
|
.fetch_all(pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for (i, r) in rows.into_iter().enumerate() {
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
|
||||||
|
let mut tags = vec![format!("{TAG_PREFIX}{site}")];
|
||||||
|
if !r.is_read.unwrap_or(true) {
|
||||||
|
tags.push("unread".to_string());
|
||||||
|
};
|
||||||
|
let mut title = r.title.unwrap_or("NO TITLE".to_string());
|
||||||
|
title = clean_title(&title).await.expect("failed to clean title");
|
||||||
res.push((
|
res.push((
|
||||||
i as i32 + offset,
|
i as i32 + offset,
|
||||||
thread_summary_from_row(ThreadSummaryRecord {
|
ThreadSummary {
|
||||||
site: r.site,
|
thread: format!("{THREAD_PREFIX}{}", r.uid),
|
||||||
date: r.date,
|
timestamp: r
|
||||||
is_read: r.is_read,
|
.date
|
||||||
title: r.title,
|
.expect("post missing date")
|
||||||
uid: r.uid,
|
.assume_utc()
|
||||||
name: r.name,
|
.unix_timestamp() as isize,
|
||||||
corpus: Corpus::Newsreader,
|
date_relative: "TODO date_relative".to_string(),
|
||||||
})
|
matched: 0,
|
||||||
.await,
|
total: 1,
|
||||||
|
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||||
|
subject: title,
|
||||||
|
tags,
|
||||||
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
#[instrument(name = "newsreader::tags", skip_all, fields(needs_unread=%_needs_unread))]
|
|
||||||
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||||
// TODO: optimize query by using needs_unread
|
// TODO: optimize query by using needs_unread
|
||||||
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||||
@ -157,10 +131,7 @@ pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, Server
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|tag| {
|
.map(|tag| {
|
||||||
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||||
let name = format!(
|
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
|
||||||
"{NEWSREADER_TAG_PREFIX}{}",
|
|
||||||
tag.site.expect("tag must have site")
|
|
||||||
);
|
|
||||||
let hex = compute_color(&name);
|
let hex = compute_color(&name);
|
||||||
Tag {
|
Tag {
|
||||||
name,
|
name,
|
||||||
@ -173,15 +144,14 @@ pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, Server
|
|||||||
Ok(tags)
|
Ok(tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(name = "newsreader::thread", skip_all, fields(thread_id=%thread_id))]
|
|
||||||
pub async fn thread(
|
pub async fn thread(
|
||||||
cacher: &FilesystemCacher,
|
config: &Config,
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
thread_id: String,
|
thread_id: String,
|
||||||
) -> Result<Thread, ServerError> {
|
) -> Result<Thread, ServerError> {
|
||||||
let id = thread_id
|
let id = thread_id
|
||||||
.strip_prefix(NEWSREADER_THREAD_PREFIX)
|
.strip_prefix(THREAD_PREFIX)
|
||||||
.expect("news thread doesn't start with '{NEWSREADER_THREAD_PREFIX}'")
|
.expect("news thread doesn't start with '{THREAD_PREFIX}'")
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let r = sqlx::query_file!("sql/thread.sql", id)
|
let r = sqlx::query_file!("sql/thread.sql", id)
|
||||||
@ -190,32 +160,50 @@ pub async fn thread(
|
|||||||
|
|
||||||
let slug = r.site.unwrap_or("no-slug".to_string());
|
let slug = r.site.unwrap_or("no-slug".to_string());
|
||||||
let site = r.name.unwrap_or("NO SITE".to_string());
|
let site = r.name.unwrap_or("NO SITE".to_string());
|
||||||
// TODO: remove the various places that have this as an Option
|
let default_homepage = "http://no-homepage";
|
||||||
let link = Some(Url::parse(&r.link)?);
|
let link = &r
|
||||||
let mut body = r.clean_summary.unwrap_or("NO SUMMARY".to_string());
|
.link
|
||||||
let body_transformers: Vec<Box<dyn Transformer>> = vec![
|
.as_ref()
|
||||||
|
.map(|h| {
|
||||||
|
if h.is_empty() {
|
||||||
|
default_homepage.to_string()
|
||||||
|
} else {
|
||||||
|
h.to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|h| Url::parse(&h).ok())
|
||||||
|
.flatten();
|
||||||
|
let mut body = r.summary.unwrap_or("NO SUMMARY".to_string());
|
||||||
|
// TODO: add site specific cleanups. For example:
|
||||||
|
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
|
||||||
|
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolent
|
||||||
|
let cacher = Arc::new(Mutex::new(FilesystemCacher::new(&config.slurp_cache_path)?));
|
||||||
|
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
|
||||||
Box::new(SlurpContents {
|
Box::new(SlurpContents {
|
||||||
cacher,
|
cacher,
|
||||||
inline_css: true,
|
site_selectors: &config.slurp_site_selectors,
|
||||||
site_selectors: slurp_contents_selectors(),
|
|
||||||
}),
|
}),
|
||||||
Box::new(FrameImages),
|
Box::new(FrameImages),
|
||||||
Box::new(AddOutlink),
|
Box::new(AddOutlink),
|
||||||
// TODO: causes doubling of images in cloudflare blogs
|
Box::new(EscapeHtml),
|
||||||
//Box::new(EscapeHtml),
|
|
||||||
Box::new(SanitizeHtml {
|
Box::new(SanitizeHtml {
|
||||||
cid_prefix: "",
|
cid_prefix: "",
|
||||||
base_url: &link,
|
base_url: &link,
|
||||||
}),
|
}),
|
||||||
|
Box::new(InlineStyle),
|
||||||
];
|
];
|
||||||
for t in body_transformers.iter() {
|
for t in body_tranformers.iter() {
|
||||||
if t.should_run(&link, &body) {
|
if t.should_run(&link, &body) {
|
||||||
body = t.transform(&link, &body).await?;
|
body = t.transform(&link, &body).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
||||||
let is_read = r.is_read.unwrap_or(false);
|
let is_read = r.is_read.unwrap_or(false);
|
||||||
let timestamp = r.date.expect("post missing date").and_utc().timestamp();
|
let timestamp = r
|
||||||
|
.date
|
||||||
|
.expect("post missing date")
|
||||||
|
.assume_utc()
|
||||||
|
.unix_timestamp();
|
||||||
Ok(Thread::News(NewsPost {
|
Ok(Thread::News(NewsPost {
|
||||||
thread_id,
|
thread_id,
|
||||||
is_read,
|
is_read,
|
||||||
@ -230,157 +218,28 @@ pub async fn thread(
|
|||||||
timestamp,
|
timestamp,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
#[instrument(name = "newsreader::set_read_status", skip_all, fields(query=%query,unread=%unread))]
|
|
||||||
pub async fn set_read_status<'ctx>(
|
pub async fn set_read_status<'ctx>(
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
query: &Query,
|
query: &str,
|
||||||
unread: bool,
|
unread: bool,
|
||||||
) -> Result<bool, ServerError> {
|
) -> Result<bool, ServerError> {
|
||||||
// TODO: make single query when query.uids.len() > 1
|
let query: Query = query.parse()?;
|
||||||
let uids: Vec<_> = query
|
sqlx::query_file!("sql/set_unread.sql", !unread, query.uid)
|
||||||
.uids
|
|
||||||
.iter()
|
|
||||||
.filter(|uid| is_newsreader_thread(uid))
|
|
||||||
.map(
|
|
||||||
|uid| extract_thread_id(uid), // TODO strip prefix
|
|
||||||
)
|
|
||||||
.collect();
|
|
||||||
for uid in uids {
|
|
||||||
sqlx::query_file!("sql/set_unread.sql", !unread, uid)
|
|
||||||
.execute(pool)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
#[instrument(name = "newsreader::refresh", skip_all)]
|
|
||||||
pub async fn refresh<'ctx>(pool: &PgPool, cacher: &FilesystemCacher) -> Result<bool, ServerError> {
|
|
||||||
async fn update_search_summary(
|
|
||||||
pool: &PgPool,
|
|
||||||
cacher: &FilesystemCacher,
|
|
||||||
link: String,
|
|
||||||
body: String,
|
|
||||||
id: i32,
|
|
||||||
) -> Result<(), ServerError> {
|
|
||||||
let slurp_contents = SlurpContents {
|
|
||||||
cacher,
|
|
||||||
inline_css: true,
|
|
||||||
site_selectors: slurp_contents_selectors(),
|
|
||||||
};
|
|
||||||
let strip_html = StripHtml;
|
|
||||||
|
|
||||||
info!("adding {link} to search index");
|
|
||||||
let mut body = body;
|
|
||||||
if let Ok(link) = Url::parse(&link) {
|
|
||||||
let link = Some(link);
|
|
||||||
if slurp_contents.should_run(&link, &body) {
|
|
||||||
body = slurp_contents.transform(&link, &body).await?;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("failed to parse link: {}", link);
|
|
||||||
}
|
|
||||||
body = strip_html.transform(&None, &body).await?;
|
|
||||||
sqlx::query!(
|
|
||||||
"UPDATE post SET search_summary = $1 WHERE id = $2",
|
|
||||||
body,
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.execute(pool)
|
.execute(pool)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut unordered: FuturesUnordered<_> = sqlx::query_file!("sql/need-search-summary.sql",)
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|r| {
|
|
||||||
let Some(body) = r.clean_summary else {
|
|
||||||
error!("clean_summary missing for {}", r.link);
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
let id = r.id;
|
|
||||||
Some(update_search_summary(pool, cacher, r.link, body, id))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
while let Some(res) = unordered.next().await {
|
|
||||||
//let res = res;
|
|
||||||
match res {
|
|
||||||
Ok(()) => {}
|
|
||||||
Err(err) => {
|
|
||||||
info!("failed refresh {err:?}");
|
|
||||||
// TODO:
|
|
||||||
//fd.error = Some(err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
async fn clean_title(title: &str) -> Result<String, ServerError> {
|
||||||
fn slurp_contents_selectors() -> HashMap<String, Vec<Selector>> {
|
// Make title HTML so html parsers work
|
||||||
hashmap![
|
let mut title = format!("<html>{title}</html>");
|
||||||
"atmeta.com".to_string() => vec![
|
let title_tranformers: Vec<Box<dyn Transformer>> =
|
||||||
Selector::parse("div.entry-content").unwrap(),
|
vec![Box::new(EscapeHtml), Box::new(StripHtml)];
|
||||||
],
|
// Make title HTML so html parsers work
|
||||||
"blog.prusa3d.com".to_string() => vec![
|
title = format!("<html>{title}</html>");
|
||||||
Selector::parse("article.content .post-block").unwrap(),
|
for t in title_tranformers.iter() {
|
||||||
],
|
if t.should_run(&None, &title) {
|
||||||
"blog.cloudflare.com".to_string() => vec![
|
title = t.transform(&None, &title).await?;
|
||||||
Selector::parse(".author-lists .author-name-tooltip").unwrap(),
|
}
|
||||||
Selector::parse(".post-full-content").unwrap()
|
}
|
||||||
],
|
Ok(title)
|
||||||
"blog.zsa.io".to_string() => vec![
|
|
||||||
Selector::parse("section.blog-article").unwrap(),
|
|
||||||
],
|
|
||||||
"engineering.fb.com".to_string() => vec![
|
|
||||||
Selector::parse("article").unwrap(),
|
|
||||||
],
|
|
||||||
"grafana.com".to_string() => vec![
|
|
||||||
Selector::parse(".blog-content").unwrap(),
|
|
||||||
],
|
|
||||||
"hackaday.com".to_string() => vec![
|
|
||||||
Selector::parse("div.entry-featured-image").unwrap(),
|
|
||||||
Selector::parse("div.entry-content").unwrap()
|
|
||||||
],
|
|
||||||
"ingowald.blog".to_string() => vec![
|
|
||||||
Selector::parse("article").unwrap(),
|
|
||||||
],
|
|
||||||
"jvns.ca".to_string() => vec![
|
|
||||||
Selector::parse("article").unwrap(),
|
|
||||||
],
|
|
||||||
"mitchellh.com".to_string() => vec![Selector::parse("div.w-full").unwrap()],
|
|
||||||
"natwelch.com".to_string() => vec![
|
|
||||||
Selector::parse("article div.prose").unwrap(),
|
|
||||||
],
|
|
||||||
"seiya.me".to_string() => vec![
|
|
||||||
Selector::parse("header + div").unwrap(),
|
|
||||||
],
|
|
||||||
"rustacean-station.org".to_string() => vec![
|
|
||||||
Selector::parse("article").unwrap(),
|
|
||||||
],
|
|
||||||
"slashdot.org".to_string() => vec![
|
|
||||||
Selector::parse("span.story-byline").unwrap(),
|
|
||||||
Selector::parse("div.p").unwrap(),
|
|
||||||
],
|
|
||||||
"theonion.com".to_string() => vec![
|
|
||||||
// Single image joke w/ title
|
|
||||||
Selector::parse("article > section > div > figure").unwrap(),
|
|
||||||
// Single cartoon
|
|
||||||
Selector::parse("article > div > div > figure").unwrap(),
|
|
||||||
// Image at top of article
|
|
||||||
Selector::parse("article > header > div > div > figure").unwrap(),
|
|
||||||
// Article body
|
|
||||||
Selector::parse("article .entry-content > *").unwrap(),
|
|
||||||
],
|
|
||||||
"trofi.github.io".to_string() => vec![
|
|
||||||
Selector::parse("#content").unwrap(),
|
|
||||||
],
|
|
||||||
"www.redox-os.org".to_string() => vec![
|
|
||||||
Selector::parse("div.content").unwrap(),
|
|
||||||
],
|
|
||||||
"www.smbc-comics.com".to_string() => vec![
|
|
||||||
Selector::parse("img#cc-comic").unwrap(),
|
|
||||||
Selector::parse("div#aftercomic img").unwrap(),
|
|
||||||
],
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
981
server/src/nm.rs
981
server/src/nm.rs
File diff suppressed because it is too large
Load Diff
@ -1,353 +0,0 @@
|
|||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use log::{debug, error, info, warn};
|
|
||||||
use sqlx::{postgres::PgPool, types::time::PrimitiveDateTime};
|
|
||||||
use tantivy::{
|
|
||||||
collector::{DocSetCollector, TopDocs},
|
|
||||||
doc, query,
|
|
||||||
query::{AllQuery, BooleanQuery, Occur, QueryParser, TermQuery},
|
|
||||||
schema::{Facet, IndexRecordOption, Value},
|
|
||||||
DocAddress, Index, IndexReader, Searcher, TantivyDocument, TantivyError, Term,
|
|
||||||
};
|
|
||||||
use tracing::{info_span, instrument, Instrument};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
compute_offset_limit,
|
|
||||||
error::ServerError,
|
|
||||||
graphql::{Corpus, ThreadSummary},
|
|
||||||
newsreader::{extract_thread_id, is_newsreader_thread},
|
|
||||||
thread_summary_from_row, Query, ThreadSummaryRecord,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn is_tantivy_query(query: &Query) -> bool {
|
|
||||||
query.is_tantivy || query.corpus == Some(Corpus::Tantivy)
|
|
||||||
}
|
|
||||||
pub struct TantivyConnection {
|
|
||||||
db_path: String,
|
|
||||||
index: Index,
|
|
||||||
reader: IndexReader,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_index(db_path: &str) -> Result<Index, TantivyError> {
|
|
||||||
Ok(match Index::open_in_dir(db_path) {
|
|
||||||
Ok(idx) => idx,
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to open {db_path}: {err}");
|
|
||||||
create_news_db(db_path)?;
|
|
||||||
Index::open_in_dir(db_path)?
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TantivyConnection {
|
|
||||||
pub fn new(tantivy_db_path: &str) -> Result<TantivyConnection, TantivyError> {
|
|
||||||
let index = get_index(tantivy_db_path)?;
|
|
||||||
let reader = index.reader()?;
|
|
||||||
|
|
||||||
Ok(TantivyConnection {
|
|
||||||
db_path: tantivy_db_path.to_string(),
|
|
||||||
index,
|
|
||||||
reader,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
#[instrument(name = "tantivy::refresh", skip_all)]
|
|
||||||
pub async fn refresh(&self, pool: &PgPool) -> Result<(), ServerError> {
|
|
||||||
let start_time = std::time::Instant::now();
|
|
||||||
let p_uids: Vec<_> = sqlx::query_file!("sql/all-uids.sql")
|
|
||||||
.fetch_all(pool)
|
|
||||||
.instrument(info_span!("postgres query"))
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| r.uid)
|
|
||||||
.collect();
|
|
||||||
info!(
|
|
||||||
"refresh from postgres got {} uids in {}",
|
|
||||||
p_uids.len(),
|
|
||||||
start_time.elapsed().as_secs_f32()
|
|
||||||
);
|
|
||||||
|
|
||||||
let t_span = info_span!("tantivy query");
|
|
||||||
let _enter = t_span.enter();
|
|
||||||
let start_time = std::time::Instant::now();
|
|
||||||
let (searcher, _query) = self.searcher_and_query(&Query::default())?;
|
|
||||||
let docs = searcher.search(&AllQuery, &DocSetCollector)?;
|
|
||||||
let uid = self.index.schema().get_field("uid")?;
|
|
||||||
let t_uids: Vec<_> = docs
|
|
||||||
.into_iter()
|
|
||||||
.map(|doc_address| {
|
|
||||||
searcher
|
|
||||||
.doc(doc_address)
|
|
||||||
.map(|doc: TantivyDocument| {
|
|
||||||
debug!("doc: {doc:#?}");
|
|
||||||
doc.get_first(uid)
|
|
||||||
.expect("uid")
|
|
||||||
.as_str()
|
|
||||||
.expect("as_str")
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
.expect("searcher.doc")
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
drop(_enter);
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"refresh tantivy got {} uids in {}",
|
|
||||||
t_uids.len(),
|
|
||||||
start_time.elapsed().as_secs_f32()
|
|
||||||
);
|
|
||||||
let t_set: HashSet<_> = t_uids.into_iter().collect();
|
|
||||||
let need: Vec<_> = p_uids
|
|
||||||
.into_iter()
|
|
||||||
.filter(|uid| !t_set.contains(uid.as_str()))
|
|
||||||
.collect();
|
|
||||||
if !need.is_empty() {
|
|
||||||
info!(
|
|
||||||
"need to reindex {} uids: {:?}...",
|
|
||||||
need.len(),
|
|
||||||
&need[..need.len().min(10)]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let batch_size = 1000;
|
|
||||||
let uids: Vec<_> = need[..need.len().min(batch_size)]
|
|
||||||
.into_iter()
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
self.reindex_uids(pool, &uids).await
|
|
||||||
}
|
|
||||||
#[instrument(skip(self, pool))]
|
|
||||||
async fn reindex_uids(&self, pool: &PgPool, uids: &[String]) -> Result<(), ServerError> {
|
|
||||||
if uids.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
// TODO: add SlurpContents and convert HTML to text
|
|
||||||
|
|
||||||
let pool: &PgPool = pool;
|
|
||||||
|
|
||||||
let mut index_writer = self.index.writer(50_000_000)?;
|
|
||||||
let schema = self.index.schema();
|
|
||||||
let site = schema.get_field("site")?;
|
|
||||||
let title = schema.get_field("title")?;
|
|
||||||
let summary = schema.get_field("summary")?;
|
|
||||||
let link = schema.get_field("link")?;
|
|
||||||
let date = schema.get_field("date")?;
|
|
||||||
let is_read = schema.get_field("is_read")?;
|
|
||||||
let uid = schema.get_field("uid")?;
|
|
||||||
let id = schema.get_field("id")?;
|
|
||||||
let tag = schema.get_field("tag")?;
|
|
||||||
|
|
||||||
info!("reindexing {} posts", uids.len());
|
|
||||||
let rows = sqlx::query_file_as!(PostgresDoc, "sql/posts-from-uids.sql", uids)
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if uids.len() != rows.len() {
|
|
||||||
error!(
|
|
||||||
"Had {} uids and only got {} rows: uids {uids:?}",
|
|
||||||
uids.len(),
|
|
||||||
rows.len()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
for r in rows {
|
|
||||||
let id_term = Term::from_field_text(uid, &r.uid);
|
|
||||||
index_writer.delete_term(id_term);
|
|
||||||
let slug = r.site;
|
|
||||||
let tag_facet = Facet::from(&format!("/News/{slug}"));
|
|
||||||
index_writer.add_document(doc!(
|
|
||||||
site => slug.clone(),
|
|
||||||
title => r.title,
|
|
||||||
// TODO: clean and extract text from HTML
|
|
||||||
summary => r.summary,
|
|
||||||
link => r.link,
|
|
||||||
date => tantivy::DateTime::from_primitive(r.date),
|
|
||||||
is_read => r.is_read,
|
|
||||||
uid => r.uid,
|
|
||||||
id => r.id as u64,
|
|
||||||
tag => tag_facet,
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
info_span!("IndexWriter.commit").in_scope(|| index_writer.commit())?;
|
|
||||||
info_span!("IndexReader.reload").in_scope(|| self.reader.reload())?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
#[instrument(name = "tantivy::reindex_thread", skip_all, fields(query=%query))]
|
|
||||||
pub async fn reindex_thread(&self, pool: &PgPool, query: &Query) -> Result<(), ServerError> {
|
|
||||||
let uids: Vec<_> = query
|
|
||||||
.uids
|
|
||||||
.iter()
|
|
||||||
.filter(|uid| is_newsreader_thread(uid))
|
|
||||||
.map(|uid| extract_thread_id(uid).to_string())
|
|
||||||
.collect();
|
|
||||||
Ok(self.reindex_uids(pool, &uids).await?)
|
|
||||||
}
|
|
||||||
#[instrument(name = "tantivy::reindex_all", skip_all)]
|
|
||||||
pub async fn reindex_all(&self, pool: &PgPool) -> Result<(), ServerError> {
|
|
||||||
let rows = sqlx::query_file!("sql/all-posts.sql")
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let uids: Vec<String> = rows.into_iter().map(|r| r.uid).collect();
|
|
||||||
self.reindex_uids(pool, &uids).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn searcher_and_query(
|
|
||||||
&self,
|
|
||||||
query: &Query,
|
|
||||||
) -> Result<(Searcher, Box<dyn query::Query>), ServerError> {
|
|
||||||
// TODO: only create one reader
|
|
||||||
// From https://tantivy-search.github.io/examples/basic_search.html
|
|
||||||
// "For a search server you will typically create one reader for the entire lifetime of
|
|
||||||
// your program, and acquire a new searcher for every single request."
|
|
||||||
//
|
|
||||||
// I think there's some challenge in making the reader work if we reindex, so reader my
|
|
||||||
// need to be stored indirectly, and be recreated on reindex
|
|
||||||
// I think creating a reader takes 200-300 ms.
|
|
||||||
let schema = self.index.schema();
|
|
||||||
let searcher = self.reader.searcher();
|
|
||||||
let title = schema.get_field("title")?;
|
|
||||||
let summary = schema.get_field("summary")?;
|
|
||||||
let query_parser = QueryParser::for_index(&self.index, vec![title, summary]);
|
|
||||||
// Tantivy uses '*' to match all docs, not empty string
|
|
||||||
let term = &query.remainder.join(" ");
|
|
||||||
let term = if term.is_empty() { "*" } else { term };
|
|
||||||
info!("query_parser('{term}')");
|
|
||||||
|
|
||||||
let tantivy_query = query_parser.parse_query(&term)?;
|
|
||||||
|
|
||||||
let tag = schema.get_field("tag")?;
|
|
||||||
let is_read = schema.get_field("is_read")?;
|
|
||||||
let mut terms = vec![(Occur::Must, tantivy_query)];
|
|
||||||
for t in &query.tags {
|
|
||||||
let facet = Facet::from(&format!("/{t}"));
|
|
||||||
let facet_term = Term::from_facet(tag, &facet);
|
|
||||||
let facet_term_query = Box::new(TermQuery::new(facet_term, IndexRecordOption::Basic));
|
|
||||||
terms.push((Occur::Must, facet_term_query));
|
|
||||||
}
|
|
||||||
if query.unread_only {
|
|
||||||
info!("searching for unread only");
|
|
||||||
let term = Term::from_field_bool(is_read, false);
|
|
||||||
terms.push((
|
|
||||||
Occur::Must,
|
|
||||||
Box::new(TermQuery::new(term, IndexRecordOption::Basic)),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let search_query = BooleanQuery::new(terms);
|
|
||||||
Ok((searcher, Box::new(search_query)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(name="tantivy::count", skip_all, fields(query=%query))]
|
|
||||||
pub async fn count(&self, query: &Query) -> Result<usize, ServerError> {
|
|
||||||
if !is_tantivy_query(query) {
|
|
||||||
return Ok(0);
|
|
||||||
}
|
|
||||||
info!("tantivy::count {query:?}");
|
|
||||||
use tantivy::collector::Count;
|
|
||||||
let (searcher, query) = self.searcher_and_query(&query)?;
|
|
||||||
Ok(searcher.search(&query, &Count)?)
|
|
||||||
}
|
|
||||||
#[instrument(name="tantivy::search", skip_all, fields(query=%query))]
|
|
||||||
pub async fn search(
|
|
||||||
&self,
|
|
||||||
pool: &PgPool,
|
|
||||||
after: Option<i32>,
|
|
||||||
before: Option<i32>,
|
|
||||||
first: Option<i32>,
|
|
||||||
last: Option<i32>,
|
|
||||||
query: &Query,
|
|
||||||
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
|
||||||
if !is_tantivy_query(query) {
|
|
||||||
return Ok(Vec::new());
|
|
||||||
}
|
|
||||||
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|
|
||||||
if before.is_none() {
|
|
||||||
// When searching forward, the +1 is to see if there are more pages of data available.
|
|
||||||
// Searching backwards implies there's more pages forward, because the value represented by
|
|
||||||
// `before` is on the next page.
|
|
||||||
limit = limit + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (searcher, search_query) = self.searcher_and_query(&query)?;
|
|
||||||
info!("Tantivy::search(query '{query:?}', off {offset}, lim {limit}, search_query {search_query:?})");
|
|
||||||
let top_docs = searcher.search(
|
|
||||||
&search_query,
|
|
||||||
&TopDocs::with_limit(limit as usize)
|
|
||||||
.and_offset(offset as usize)
|
|
||||||
.order_by_u64_field("date", tantivy::index::Order::Desc),
|
|
||||||
)?;
|
|
||||||
info!("search found {} docs", top_docs.len());
|
|
||||||
let uid = self.index.schema().get_field("uid")?;
|
|
||||||
let uids = top_docs
|
|
||||||
.into_iter()
|
|
||||||
.map(|(_, doc_address): (u64, DocAddress)| {
|
|
||||||
searcher.doc(doc_address).map(|doc: TantivyDocument| {
|
|
||||||
debug!("doc: {doc:#?}");
|
|
||||||
doc.get_first(uid)
|
|
||||||
.expect("doc missing uid")
|
|
||||||
.as_str()
|
|
||||||
.expect("doc str missing")
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<String>, TantivyError>>()?;
|
|
||||||
|
|
||||||
//let uids = format!("'{}'", uids.join("','"));
|
|
||||||
info!("uids {uids:?}");
|
|
||||||
let rows = sqlx::query_file!("sql/threads-from-uid.sql", &uids as &[String])
|
|
||||||
.fetch_all(pool)
|
|
||||||
.await?;
|
|
||||||
let mut res = Vec::new();
|
|
||||||
info!("found {} hits joining w/ tantivy", rows.len());
|
|
||||||
for (i, r) in rows.into_iter().enumerate() {
|
|
||||||
res.push((
|
|
||||||
i as i32 + offset,
|
|
||||||
thread_summary_from_row(ThreadSummaryRecord {
|
|
||||||
site: r.site,
|
|
||||||
date: r.date,
|
|
||||||
is_read: r.is_read,
|
|
||||||
title: r.title,
|
|
||||||
uid: r.uid,
|
|
||||||
name: r.name,
|
|
||||||
corpus: Corpus::Tantivy,
|
|
||||||
})
|
|
||||||
.await,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
pub fn drop_and_load_index(&self) -> Result<(), TantivyError> {
|
|
||||||
create_news_db(&self.db_path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_news_db(tantivy_db_path: &str) -> Result<(), TantivyError> {
|
|
||||||
info!("create_news_db");
|
|
||||||
// Don't care if directory didn't exist
|
|
||||||
let _ = std::fs::remove_dir_all(tantivy_db_path);
|
|
||||||
std::fs::create_dir_all(tantivy_db_path)?;
|
|
||||||
use tantivy::schema::*;
|
|
||||||
let mut schema_builder = Schema::builder();
|
|
||||||
schema_builder.add_text_field("site", STRING | STORED);
|
|
||||||
schema_builder.add_text_field("title", TEXT | STORED);
|
|
||||||
schema_builder.add_text_field("summary", TEXT);
|
|
||||||
schema_builder.add_text_field("link", STRING | STORED);
|
|
||||||
schema_builder.add_date_field("date", FAST | INDEXED | STORED);
|
|
||||||
schema_builder.add_bool_field("is_read", FAST | INDEXED | STORED);
|
|
||||||
schema_builder.add_text_field("uid", STRING | STORED);
|
|
||||||
schema_builder.add_u64_field("id", FAST);
|
|
||||||
schema_builder.add_facet_field("tag", FacetOptions::default());
|
|
||||||
|
|
||||||
let schema = schema_builder.build();
|
|
||||||
Index::create_in_dir(tantivy_db_path, schema)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct PostgresDoc {
|
|
||||||
site: String,
|
|
||||||
title: String,
|
|
||||||
summary: String,
|
|
||||||
link: String,
|
|
||||||
date: PrimitiveDateTime,
|
|
||||||
is_read: bool,
|
|
||||||
uid: String,
|
|
||||||
id: i32,
|
|
||||||
}
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
use std::{collections::HashMap, net::SocketAddr};
|
|
||||||
|
|
||||||
use axum::extract::ws::{Message, WebSocket};
|
|
||||||
use letterbox_shared::WebsocketMessage;
|
|
||||||
use tracing::{info, warn};
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct ConnectionTracker {
|
|
||||||
peers: HashMap<SocketAddr, WebSocket>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ConnectionTracker {
|
|
||||||
pub async fn add_peer(&mut self, socket: WebSocket, who: SocketAddr) {
|
|
||||||
warn!("adding {who:?} to connection tracker");
|
|
||||||
self.peers.insert(who, socket);
|
|
||||||
self.send_message_all(WebsocketMessage::RefreshMessages)
|
|
||||||
.await;
|
|
||||||
}
|
|
||||||
pub async fn send_message_all(&mut self, msg: WebsocketMessage) {
|
|
||||||
info!("send_message_all {msg}");
|
|
||||||
let m = serde_json::to_string(&msg).expect("failed to json encode WebsocketMessage");
|
|
||||||
let mut bad_peers = Vec::new();
|
|
||||||
for (who, socket) in &mut self.peers.iter_mut() {
|
|
||||||
if let Err(e) = socket.send(Message::Text(m.clone().into())).await {
|
|
||||||
warn!("{:?} is bad, scheduling for removal: {e}", who);
|
|
||||||
bad_peers.push(who.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for b in bad_peers {
|
|
||||||
info!("removing bad peer {b:?}");
|
|
||||||
self.peers.remove(&b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset=utf-8 />
|
|
||||||
<meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
|
|
||||||
<title>GraphQL Playground</title>
|
|
||||||
<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/css/index.css" />
|
|
||||||
<link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/favicon.png" />
|
|
||||||
<script src="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/js/middleware.js"></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="root">
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
background-color: rgb(23, 42, 58);
|
|
||||||
font-family: Open Sans, sans-serif;
|
|
||||||
height: 90vh;
|
|
||||||
}
|
|
||||||
|
|
||||||
#root {
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.loading {
|
|
||||||
font-size: 32px;
|
|
||||||
font-weight: 200;
|
|
||||||
color: rgba(255, 255, 255, .6);
|
|
||||||
margin-left: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
|
||||||
width: 78px;
|
|
||||||
height: 78px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.title {
|
|
||||||
font-weight: 400;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<img src='//cdn.jsdelivr.net/npm/graphql-playground-react/build/logo.png' alt=''>
|
|
||||||
<div class="loading"> Loading
|
|
||||||
<span class="title">GraphQL Playground</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<script>window.addEventListener('load', function (event) {
|
|
||||||
GraphQLPlayground.init(document.getElementById('root'), {
|
|
||||||
// options as 'endpoint' belong here
|
|
||||||
endpoint: "/api/graphql",
|
|
||||||
})
|
|
||||||
})</script>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
||||||
@ -1,42 +0,0 @@
|
|||||||
:root {
|
|
||||||
--active-brightness: 0.85;
|
|
||||||
--border-radius: 5px;
|
|
||||||
--box-shadow: 2px 2px 10px;
|
|
||||||
--color-accent: #118bee15;
|
|
||||||
--color-bg: #fff;
|
|
||||||
--color-bg-secondary: #e9e9e9;
|
|
||||||
--color-link: #118bee;
|
|
||||||
--color-secondary: #920de9;
|
|
||||||
--color-secondary-accent: #920de90b;
|
|
||||||
--color-shadow: #f4f4f4;
|
|
||||||
--color-table: #118bee;
|
|
||||||
--color-text: #000;
|
|
||||||
--color-text-secondary: #999;
|
|
||||||
--color-scrollbar: #cacae8;
|
|
||||||
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
|
|
||||||
--hover-brightness: 1.2;
|
|
||||||
--justify-important: center;
|
|
||||||
--justify-normal: left;
|
|
||||||
--line-height: 1.5;
|
|
||||||
/*
|
|
||||||
--width-card: 285px;
|
|
||||||
--width-card-medium: 460px;
|
|
||||||
--width-card-wide: 800px;
|
|
||||||
*/
|
|
||||||
--width-content: 1080px;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
:root[color-mode="user"] {
|
|
||||||
--color-accent: #0097fc4f;
|
|
||||||
--color-bg: #333;
|
|
||||||
--color-bg-secondary: #555;
|
|
||||||
--color-link: #0097fc;
|
|
||||||
--color-secondary: #e20de9;
|
|
||||||
--color-secondary-accent: #e20de94f;
|
|
||||||
--color-shadow: #bbbbbb20;
|
|
||||||
--color-table: #0097fc;
|
|
||||||
--color-text: #f7f7f7;
|
|
||||||
--color-text-secondary: #aaa;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>DMARC Report</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
{% if report.report_metadata.is_some() %}
|
|
||||||
{% let meta = report.report_metadata.as_ref().unwrap() %}
|
|
||||||
<b>Reporter:</b> {{ meta.org_name }}<br>
|
|
||||||
<b>Contact:</b> {{ meta.email }}<br>
|
|
||||||
<b>Report ID:</b> {{ meta.report_id }}<br>
|
|
||||||
{% if meta.date_range.is_some() %}
|
|
||||||
{% let dr = meta.date_range.as_ref().unwrap() %}
|
|
||||||
<b>Date range:</b>
|
|
||||||
{{ dr.begin }}
|
|
||||||
to
|
|
||||||
{{ dr.end }}
|
|
||||||
<br>
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if report.policy_published.is_some() %}
|
|
||||||
{% let pol = report.policy_published.as_ref().unwrap() %}
|
|
||||||
<b>Policy Published:</b>
|
|
||||||
<ul>
|
|
||||||
<li>Domain: {{ pol.domain }}</li>
|
|
||||||
<li>ADKIM: {{ pol.adkim }}</li>
|
|
||||||
<li>ASPF: {{ pol.aspf }}</li>
|
|
||||||
<li>Policy: {{ pol.p }}</li>
|
|
||||||
<li>Subdomain Policy: {{ pol.sp }}</li>
|
|
||||||
<li>Percent: {{ pol.pct }}</li>
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if report.record.is_some() %}
|
|
||||||
<b>Records:</b>
|
|
||||||
<table style="border-collapse:collapse;width:100%;font-size:0.95em;">
|
|
||||||
<thead>
|
|
||||||
<tr style="background:#f0f0f0;">
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Source IP</th>
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Count</th>
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Header From</th>
|
|
||||||
{% if report.has_envelope_to %}
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Envelope To</th>
|
|
||||||
{% endif %}
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Disposition</th>
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">DKIM</th>
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">SPF</th>
|
|
||||||
<th style="border:1px solid #bbb;padding:4px 8px;">Auth Results</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{% for rec in report.record.as_ref().unwrap() %}
|
|
||||||
<tr>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.source_ip }}</td>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.count }}</td>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.header_from }}</td>
|
|
||||||
{% if report.has_envelope_to %}
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.envelope_to }}</td>
|
|
||||||
{% endif %}
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.disposition }}</td>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.dkim }}</td>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">{{ rec.spf }}</td>
|
|
||||||
<td style="border:1px solid #bbb;padding:4px 8px;">
|
|
||||||
{% if rec.auth_results.is_some() %}
|
|
||||||
{% let auth = rec.auth_results.as_ref().unwrap() %}
|
|
||||||
{% for dkimres in auth.dkim %}
|
|
||||||
<span style="white-space:nowrap;">
|
|
||||||
DKIM: domain=<b>{{ dkimres.domain }}</b>
|
|
||||||
selector=<b>{{ dkimres.selector }}</b>
|
|
||||||
result=<b>{{ dkimres.result }}</b>
|
|
||||||
</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% for spfres in auth.spf %}
|
|
||||||
<span style="white-space:nowrap;">
|
|
||||||
SPF: domain=<b>{{ spfres.domain }}</b>
|
|
||||||
scope=<b>{{ spfres.scope }}</b>
|
|
||||||
result=<b>{{ spfres.result }}</b>
|
|
||||||
</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% for reason in rec.reason %}
|
|
||||||
<span style="white-space:nowrap;">Reason: {{ reason }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if report.report_metadata.is_none() && report.policy_published.is_none() && report.record.is_none() %}
|
|
||||||
<p>No DMARC summary found.</p>
|
|
||||||
{% endif %}
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@ -1,109 +0,0 @@
|
|||||||
<style>
|
|
||||||
.ical-flex {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: row;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
align-items: stretch;
|
|
||||||
gap: 0.5em;
|
|
||||||
max-width: 700px;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ical-flex .summary-block {
|
|
||||||
flex: 1 1 0%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ical-flex .calendar-block {
|
|
||||||
flex: none;
|
|
||||||
margin-left: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 599px) {
|
|
||||||
.ical-flex {
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ical-flex>div.summary-block {
|
|
||||||
margin-bottom: 0.5em;
|
|
||||||
margin-left: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ical-flex>div.calendar-block {
|
|
||||||
margin-left: 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<div class="ical-flex">
|
|
||||||
<div class="summary-block"
|
|
||||||
style="background:#f7f7f7; border-radius:8px; box-shadow:0 2px 8px #bbb; padding:16px 18px; margin:0 0 8px 0; min-width:220px; max-width:700px; font-size:15px; color:#222;">
|
|
||||||
<div
|
|
||||||
style="display: flex; flex-direction: row; flex-wrap: wrap; align-items: flex-start; gap: 0.5em; width: 100%;">
|
|
||||||
<div style="flex: 1 1 220px; min-width: 180px;">
|
|
||||||
<div style="font-size:17px; font-weight:bold; margin-bottom:8px; color:#333;"><b>Summary:</b> {{ summary
|
|
||||||
}}</div>
|
|
||||||
<div style="margin-bottom:4px;"><b>Start:</b> {{ local_fmt_start }}</div>
|
|
||||||
<div style="margin-bottom:4px;"><b>End:</b> {{ local_fmt_end }}</div>
|
|
||||||
{% if !recurrence_display.is_empty() %}
|
|
||||||
<div style="margin-bottom:4px;">
|
|
||||||
<b>Repeats:</b> {{ recurrence_display }}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% if !organizer_cn.is_empty() %}
|
|
||||||
<div style="margin-bottom:4px;"><b>Organizer:</b> {{ organizer_cn }}</div>
|
|
||||||
{% elif !organizer.is_empty() %}
|
|
||||||
<div style="margin-bottom:4px;"><b>Organizer:</b> {{ organizer }}</div>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% if all_days.len() > 0 %}
|
|
||||||
<div class="calendar-block" style="flex: none; margin-left: auto; min-width: 180px;">
|
|
||||||
<table class="ical-month"
|
|
||||||
style="border-collapse:collapse; min-width:220px; background:#fff; box-shadow:0 2px 8px #bbb; font-size:14px; margin:0;">
|
|
||||||
<caption
|
|
||||||
style="caption-side:top; text-align:center; font-weight:bold; font-size:16px; padding-bottom:8px 0;">
|
|
||||||
{{ caption }}</caption>
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
{% for wd in ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"] %}
|
|
||||||
<th
|
|
||||||
style="padding:4px 6px; border-bottom:1px solid #ccc; color:#666; font-weight:600; background:#f7f7f7">
|
|
||||||
{{ wd }}</th>
|
|
||||||
{% endfor %}
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{% for week in all_days|batch(7) %}
|
|
||||||
<tr>
|
|
||||||
{% for day in week %}
|
|
||||||
{% if event_days.contains(day) %}
|
|
||||||
<td
|
|
||||||
data-event-day="{{ day.format("%Y-%m-%d") }}"
|
|
||||||
style="background:#ffd700; color:#222; font-weight:bold; border:1px solid #aaa; border-radius:4px; text-align:center;">
|
|
||||||
{{ day.day() }}
|
|
||||||
</td>
|
|
||||||
{% elif today.is_some() && today.unwrap() == day %}
|
|
||||||
<td
|
|
||||||
style="border:2px solid #2196f3; border-radius:4px; text-align:center; background:#e3f2fd; color:#222; box-shadow:0 0 0 2px #2196f3;">
|
|
||||||
{{ day.day() }}
|
|
||||||
</td>
|
|
||||||
{% else %}
|
|
||||||
<td style="border:1px solid #eee; text-align:center;background:#f7f7f7;color:#bbb;">
|
|
||||||
{{ day.day() }}
|
|
||||||
</td>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% if !description_paragraphs.is_empty() %}
|
|
||||||
<div style="max-width:700px; width:100%;">
|
|
||||||
{% for p in description_paragraphs %}
|
|
||||||
<p style="margin: 0 0 8px 0; color:#444;">{{ p }}</p>
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title>TLS Report</title>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h3>TLS Report Summary:</h3>
|
|
||||||
<p>Organization: {{ report.organization_name }}</p>
|
|
||||||
<p>Date Range: {{ report.date_range.start_datetime }} to {{ report.date_range.end_datetime }}</p>
|
|
||||||
<p>Contact: {{ report.contact_info }}</p>
|
|
||||||
<p>Report ID: {{ report.report_id }}</p>
|
|
||||||
|
|
||||||
<h4><b>Policies:</b></h4>
|
|
||||||
{% for policy in report.policies %}
|
|
||||||
<h5><b>Policy Domain:</b> {{ policy.policy.policy_domain }}</h5>
|
|
||||||
<ul>
|
|
||||||
<li><b>Policy Type:</b> {{ policy.policy.policy_type }}</li>
|
|
||||||
<li><b>Policy String:</b> {{ policy.policy.policy_string | join(", ") }}</li>
|
|
||||||
<li><b>Successful Sessions:</b> {{ policy.summary.total_successful_session_count }}</li>
|
|
||||||
<li><b>Failed Sessions:</b> {{ policy.summary.total_failure_session_count }}</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<ul>
|
|
||||||
{% for mx_host in policy.policy.mx_host %}
|
|
||||||
<li><b>Hostname:</b> {{ mx_host.hostname }}, <b>Failures:</b> {{ mx_host.failure_count }}, <b>Result:</b> {{
|
|
||||||
mx_host.result_type }}</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<ul>
|
|
||||||
{% for detail in policy.failure_details %}
|
|
||||||
<li><b>Result:</b> {{ detail.result_type }}, <b>Sending IP:</b> {{ detail.sending_mta_ip }}, <b>Failed
|
|
||||||
Sessions:</b> {{ detail.failed_session_count }}
|
|
||||||
{% if detail.failure_reason_code != "" %}
|
|
||||||
(<b>Reason:</b> {{ detail.failure_reason_code }})
|
|
||||||
{% endif %}
|
|
||||||
</li>
|
|
||||||
(<b>Receiving IP:</b> {{ detail.receiving_ip }})
|
|
||||||
(<b>Receiving MX:</b> {{ detail.receiving_mx_hostname }})
|
|
||||||
(<b>Additional Info:</b> {{ detail.additional_info }})
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endfor %}
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
||||||
48
server/testdata/dmarc-example-no-envelope-to.xml
vendored
48
server/testdata/dmarc-example-no-envelope-to.xml
vendored
@ -1,48 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" ?>
|
|
||||||
<feedback>
|
|
||||||
<version>1.0</version>
|
|
||||||
<report_metadata>
|
|
||||||
<org_name>google.com</org_name>
|
|
||||||
<email>noreply-dmarc-support@google.com</email>
|
|
||||||
<extra_contact_info>https://support.google.com/a/answer/2466580</extra_contact_info>
|
|
||||||
<report_id>5142106658860834914</report_id>
|
|
||||||
<date_range>
|
|
||||||
<begin>1755302400</begin>
|
|
||||||
<end>1755388799</end>
|
|
||||||
</date_range>
|
|
||||||
</report_metadata>
|
|
||||||
<policy_published>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<adkim>s</adkim>
|
|
||||||
<aspf>s</aspf>
|
|
||||||
<p>quarantine</p>
|
|
||||||
<sp>reject</sp>
|
|
||||||
<pct>100</pct>
|
|
||||||
<np>reject</np>
|
|
||||||
</policy_published>
|
|
||||||
<record>
|
|
||||||
<row>
|
|
||||||
<source_ip>74.207.253.222</source_ip>
|
|
||||||
<count>1</count>
|
|
||||||
<policy_evaluated>
|
|
||||||
<disposition>none</disposition>
|
|
||||||
<dkim>pass</dkim>
|
|
||||||
<spf>pass</spf>
|
|
||||||
</policy_evaluated>
|
|
||||||
</row>
|
|
||||||
<identifiers>
|
|
||||||
<header_from>xinu.tv</header_from>
|
|
||||||
</identifiers>
|
|
||||||
<auth_results>
|
|
||||||
<dkim>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<result>pass</result>
|
|
||||||
<selector>mail</selector>
|
|
||||||
</dkim>
|
|
||||||
<spf>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<result>pass</result>
|
|
||||||
</spf>
|
|
||||||
</auth_results>
|
|
||||||
</record>
|
|
||||||
</feedback>
|
|
||||||
78
server/testdata/dmarc-example.xml
vendored
78
server/testdata/dmarc-example.xml
vendored
@ -1,78 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<feedback xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
|
||||||
<version>1.0</version>
|
|
||||||
<report_metadata>
|
|
||||||
<org_name>Outlook.com</org_name>
|
|
||||||
<email>dmarcreport@microsoft.com</email>
|
|
||||||
<report_id>e6c5a2ce6e074d7d8cd041a0d6f32a3d</report_id>
|
|
||||||
<date_range>
|
|
||||||
<begin>1755302400</begin>
|
|
||||||
<end>1755388800</end>
|
|
||||||
</date_range>
|
|
||||||
</report_metadata>
|
|
||||||
<policy_published>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<adkim>s</adkim>
|
|
||||||
<aspf>s</aspf>
|
|
||||||
<p>quarantine</p>
|
|
||||||
<sp>reject</sp>
|
|
||||||
<pct>100</pct>
|
|
||||||
<fo>1</fo>
|
|
||||||
</policy_published>
|
|
||||||
<record>
|
|
||||||
<row>
|
|
||||||
<source_ip>74.207.253.222</source_ip>
|
|
||||||
<count>1</count>
|
|
||||||
<policy_evaluated>
|
|
||||||
<disposition>none</disposition>
|
|
||||||
<dkim>pass</dkim>
|
|
||||||
<spf>pass</spf>
|
|
||||||
</policy_evaluated>
|
|
||||||
</row>
|
|
||||||
<identifiers>
|
|
||||||
<envelope_to>msn.com</envelope_to>
|
|
||||||
<envelope_from>xinu.tv</envelope_from>
|
|
||||||
<header_from>xinu.tv</header_from>
|
|
||||||
</identifiers>
|
|
||||||
<auth_results>
|
|
||||||
<dkim>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<selector>mail</selector>
|
|
||||||
<result>pass</result>
|
|
||||||
</dkim>
|
|
||||||
<spf>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<scope>mfrom</scope>
|
|
||||||
<result>pass</result>
|
|
||||||
</spf>
|
|
||||||
</auth_results>
|
|
||||||
</record>
|
|
||||||
<record>
|
|
||||||
<row>
|
|
||||||
<source_ip>74.207.253.222</source_ip>
|
|
||||||
<count>1</count>
|
|
||||||
<policy_evaluated>
|
|
||||||
<disposition>none</disposition>
|
|
||||||
<dkim>pass</dkim>
|
|
||||||
<spf>pass</spf>
|
|
||||||
</policy_evaluated>
|
|
||||||
</row>
|
|
||||||
<identifiers>
|
|
||||||
<envelope_to>hotmail.com</envelope_to>
|
|
||||||
<envelope_from>xinu.tv</envelope_from>
|
|
||||||
<header_from>xinu.tv</header_from>
|
|
||||||
</identifiers>
|
|
||||||
<auth_results>
|
|
||||||
<dkim>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<selector>mail</selector>
|
|
||||||
<result>pass</result>
|
|
||||||
</dkim>
|
|
||||||
<spf>
|
|
||||||
<domain>xinu.tv</domain>
|
|
||||||
<scope>mfrom</scope>
|
|
||||||
<result>pass</result>
|
|
||||||
</spf>
|
|
||||||
</auth_results>
|
|
||||||
</record>
|
|
||||||
</feedback>
|
|
||||||
167
server/testdata/google-calendar-example-2.eml
vendored
167
server/testdata/google-calendar-example-2.eml
vendored
@ -1,167 +0,0 @@
|
|||||||
Return-Path: <couchmoney+caf_=gmail=xinu.tv@gmail.com>
|
|
||||||
Delivered-To: bill@xinu.tv
|
|
||||||
Received: from phx.xinu.tv [74.207.253.222]
|
|
||||||
by nixos-01.h.xinu.tv with IMAP (fetchmail-6.5.1)
|
|
||||||
for <wathiede@localhost> (single-drop); Mon, 25 Aug 2025 14:29:47 -0700 (PDT)
|
|
||||||
Received: from phx.xinu.tv
|
|
||||||
by phx.xinu.tv with LMTP
|
|
||||||
id TPD3E8vVrGjawyMAJR8clQ
|
|
||||||
(envelope-from <couchmoney+caf_=gmail=xinu.tv@gmail.com>)
|
|
||||||
for <bill@xinu.tv>; Mon, 25 Aug 2025 14:29:47 -0700
|
|
||||||
X-Original-To: gmail@xinu.tv
|
|
||||||
Received-SPF: Pass (mailfrom) identity=mailfrom; client-ip=2a00:1450:4864:20::12e; helo=mail-lf1-x12e.google.com; envelope-from=couchmoney+caf_=gmail=xinu.tv@gmail.com; receiver=xinu.tv
|
|
||||||
Authentication-Results: phx.xinu.tv;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=google.com header.i=@google.com header.a=rsa-sha256 header.s=20230601 header.b=4sz9KOqm
|
|
||||||
Received: from mail-lf1-x12e.google.com (mail-lf1-x12e.google.com [IPv6:2a00:1450:4864:20::12e])
|
|
||||||
by phx.xinu.tv (Postfix) with ESMTPS id 2F9058B007
|
|
||||||
for <gmail@xinu.tv>; Mon, 25 Aug 2025 14:29:45 -0700 (PDT)
|
|
||||||
Received: by mail-lf1-x12e.google.com with SMTP id 2adb3069b0e04-55f4969c95aso994593e87.0
|
|
||||||
for <gmail@xinu.tv>; Mon, 25 Aug 2025 14:29:45 -0700 (PDT)
|
|
||||||
ARC-Seal: i=2; a=rsa-sha256; t=1756157384; cv=pass;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=Y2CP7y9twLnWB5v8iyzZCw0vp33wQBS0qzltdtzX2NIWFhHu6MEp2XH8cONssaGrEN
|
|
||||||
kyjXajT7uaEpn6G8H6/NB9v9Vo2yk5Lq2f+RhODMYoocYs9YY9NJI4ZxMph0UeMO6RkQ
|
|
||||||
m+HH0iIeC2Mzgj1Bzq4qFEwb397YIijoxx+1RxyA2D3cwSuZtERSvFOEkHqv9ziWxBcD
|
|
||||||
u3tvySEuzjyQFU6bxfkax6sZljSRGzfj0iZJAl/Fw5tUgrhndQ55O5RDe4NfPNj0cw/3
|
|
||||||
XDELzsnepBgnW8Jpqpnh7iK6XMFSf4sPQmyiMCMDNVYtmm6hYFNo3/dOpgaPn/ImRr8j
|
|
||||||
d9lw==
|
|
||||||
ARC-Message-Signature: i=2; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:reply-to
|
|
||||||
:mime-version:dkim-signature:delivered-to;
|
|
||||||
bh=RJDaNO07yMMdVMfY1VnSbfmQtoKb6bs6XzWwF6+91ZY=;
|
|
||||||
fh=xB02AmI2fnPF5rMnM90IwqQ6Il76V+xMgSnSW+E42fE=;
|
|
||||||
b=H7Ze4a8zoCYB77xcnUnFTogJ/utYS/USzTL/7eS3nA6OPbD+zWRiiVmbSfQcNK7d25
|
|
||||||
LapXyYnRJKgc8sqqQ6XO26STA8xx/9G620pdTytChIzKsmm/T5cdlf1M8DJ+NlwkzzSG
|
|
||||||
6Xe5I0MuXSKzBDMmcBcMlY9+mp61eZNo/cGT34MfZvLDS7JCs5uQYy2gRyajCKzRddEP
|
|
||||||
NBfMgnP1Ag9B5KkpJr4QfA2IWoNlj/qom/bRcdcdjwQ3gwDeiG8rdrEwBt9juwqk8d95
|
|
||||||
C0LnVKfrXAZgolmJpljyIFb1IMMyBUIQhK+7cXFhV1AD6Laz0df9gmPWp5mGZz9qlYaY
|
|
||||||
BqJA==;
|
|
||||||
darn=xinu.tv
|
|
||||||
ARC-Authentication-Results: i=2; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=4sz9KOqm;
|
|
||||||
spf=pass (google.com: domain of 3odssaaoscuanoeqnnkpiuugcvvnguejqqnu.qtieqwejoqpgaiockn.eqo@calendar-server.bounces.google.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=3OdSsaAoSCuANOEQNNKPIUUGCVVNGUEJQQNU.QTIEQWEJOQPGaIOCKN.EQO@calendar-server.bounces.google.com;
|
|
||||||
dmarc=pass (p=REJECT sp=REJECT dis=NONE) header.from=google.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=1e100.net; s=20230601; t=1756157384; x=1756762184;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:reply-to
|
|
||||||
:mime-version:dkim-signature:delivered-to:x-forwarded-for
|
|
||||||
:x-forwarded-to:x-gm-message-state:from:to:cc:subject:date
|
|
||||||
:message-id:reply-to;
|
|
||||||
bh=RJDaNO07yMMdVMfY1VnSbfmQtoKb6bs6XzWwF6+91ZY=;
|
|
||||||
b=m95okwnmqNvW4GhCfY8yZvCu5NxuhHCL2+A54SlIrRudednXK05YGzjZ5LOuCAaY1g
|
|
||||||
htpRv2cGHBj2mEnHh+3GIX5vQCmXw2ptzOGzfYe9TwavuKPkkKPiSD5wA1fk8quqHDOD
|
|
||||||
4XDM7dsn3xewJ+6GQyc6NPBQq53hmpAojbLXnmNtAIyfAvuxtHP1G+GSO+ZIApgg56K6
|
|
||||||
TaYrwqnRx66P8B2Ze111LCdnmOOLzweJ1muYyavPdCtTG5BbJgqzaI67bQhuUNZDhVbP
|
|
||||||
FdtT4Q7WzNt30JHCVIAkkHejD9Fh/mYSmETXpD+ISvZJ47DNnLP4RXjmmAWcHJkKsh+q
|
|
||||||
v3QQ==
|
|
||||||
X-Forwarded-Encrypted: i=2; AJvYcCUeIjyIxPoWuMqg9l5aomQv7Z9wLYkwDIS1FYz7bNmHs1Cs0CSHG8Y5B0iU/nlo9xRenTW/Xw==@xinu.tv
|
|
||||||
X-Gm-Message-State: AOJu0Yznjr5TC7UpZJk74jrsJzMBwx6/39s9e5ufIA5/FmHZ6I1bEdTc
|
|
||||||
vqpeeLdzSZTI2uZiR7zzKHiwmNJHt/LncR9kDR5f0I6b3MZuXpAgr0aKYdXw7B+b+h7D7uMM3Tm
|
|
||||||
JF9ccf09JxIzRzeRI9Vb52PUs4SIeiIU9J80QY53UqN/Rx8XMF+ncRSX5d4V4pQ==
|
|
||||||
X-Received: by 2002:a05:6512:110e:b0:55f:3bab:f204 with SMTP id 2adb3069b0e04-55f3babf35emr3087055e87.31.1756156987711;
|
|
||||||
Mon, 25 Aug 2025 14:23:07 -0700 (PDT)
|
|
||||||
X-Forwarded-To: gmail@xinu.tv
|
|
||||||
X-Forwarded-For: couchmoney@gmail.com gmail@xinu.tv
|
|
||||||
Delivered-To: couchmoney@gmail.com
|
|
||||||
Received: by 2002:a05:6504:6116:b0:2b8:eb6f:82ec with SMTP id i22csp44357ltt;
|
|
||||||
Mon, 25 Aug 2025 14:23:06 -0700 (PDT)
|
|
||||||
X-Received: by 2002:a05:6e02:164e:b0:3ed:94a6:2edb with SMTP id e9e14a558f8ab-3ed94a63097mr41416195ab.21.1756156986122;
|
|
||||||
Mon, 25 Aug 2025 14:23:06 -0700 (PDT)
|
|
||||||
ARC-Seal: i=1; a=rsa-sha256; t=1756156986; cv=none;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=Nu0W/67J2nYqDAXf27QdfmUyuA6TGJwusKLaHRaE05YdEu/FWLfUk2ATV+g3iUQ19b
|
|
||||||
wh7awaA5kemxwiBqAy5kjjlXqlDrkK0Ow2fANdc6lRKvlRNJRYUnojMkP8w/v4Nv8YQj
|
|
||||||
Wci0HMhL4ni/yeqXeoaj1yKtwJU5MvRMxZZC7TinlCHKF5+MqgD8VNax8OTDOqxYvSDi
|
|
||||||
aIlyUBTial0AiP/K+3bsoIWEc2RoyBBBNIe88C4s1fcv17GCGn5RkN3lYtr+nwvp5wNE
|
|
||||||
fKxPCYMtXkNyv8jgjmgxKLcYBDK0B4Zo+ghMWXZneDWo3qotDVkr0GBC3J2N7BcZpjCA
|
|
||||||
XEDA==
|
|
||||||
ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:reply-to
|
|
||||||
:mime-version:dkim-signature;
|
|
||||||
bh=RJDaNO07yMMdVMfY1VnSbfmQtoKb6bs6XzWwF6+91ZY=;
|
|
||||||
fh=mbzrMIWIgWMC0ni1xEx+ViW4J0RLAdLdPT2cX81nTlk=;
|
|
||||||
b=NvhrlkKGEVx63UMsx510U8ePUo7OgRQBWxZ4BIpQWg6Fk0jJPaZgRoEpUdZ747et1P
|
|
||||||
rWTx/yVaEUHBqWtt0I4ktiD8Hr4cVqAwKvtiN32JpkGCsVBjYBWqxEalWIOg6abn8xLE
|
|
||||||
7x9j4GqD/cQhd3DiS6UtADsJ67MjjzLpGkskvxo67vKRGCfSLCKdbna2LO5TtoZ7fKO7
|
|
||||||
i+dhDol6IIgA2Sg+PZlzq6gbZTaFbglUNI7uOwz0fNWjhHH4ZfmPEycYxJ9bTuPISrqS
|
|
||||||
BkXxGQFkvlg42NHWt5L8aPzrx8OMoYfTniIqU19GeEFEVUbmzYCg/twZ0f5nxugHWDbD
|
|
||||||
PMvQ==;
|
|
||||||
dara=google.com
|
|
||||||
ARC-Authentication-Results: i=1; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=4sz9KOqm;
|
|
||||||
spf=pass (google.com: domain of 3odssaaoscuanoeqnnkpiuugcvvnguejqqnu.qtieqwejoqpgaiockn.eqo@calendar-server.bounces.google.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=3OdSsaAoSCuANOEQNNKPIUUGCVVNGUEJQQNU.QTIEQWEJOQPGaIOCKN.EQO@calendar-server.bounces.google.com;
|
|
||||||
dmarc=pass (p=REJECT sp=REJECT dis=NONE) header.from=google.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
Received: from mail-sor-f73.google.com (mail-sor-f73.google.com. [209.85.220.73])
|
|
||||||
by mx.google.com with SMTPS id ca18e2360f4ac-886c8fc41ebsor461233039f.7.2025.08.25.14.23.05
|
|
||||||
for <couchmoney@gmail.com>
|
|
||||||
(Google Transport Security);
|
|
||||||
Mon, 25 Aug 2025 14:23:06 -0700 (PDT)
|
|
||||||
Received-SPF: pass (google.com: domain of 3odssaaoscuanoeqnnkpiuugcvvnguejqqnu.qtieqwejoqpgaiockn.eqo@calendar-server.bounces.google.com designates 209.85.220.73 as permitted sender) client-ip=209.85.220.73;
|
|
||||||
Authentication-Results: mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=4sz9KOqm;
|
|
||||||
spf=pass (google.com: domain of 3odssaaoscuanoeqnnkpiuugcvvnguejqqnu.qtieqwejoqpgaiockn.eqo@calendar-server.bounces.google.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=3OdSsaAoSCuANOEQNNKPIUUGCVVNGUEJQQNU.QTIEQWEJOQPGaIOCKN.EQO@calendar-server.bounces.google.com;
|
|
||||||
dmarc=pass (p=REJECT sp=REJECT dis=NONE) header.from=google.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=google.com; s=20230601; t=1756156985; x=1756761785; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=RJDaNO07yMMdVMfY1VnSbfmQtoKb6bs6XzWwF6+91ZY=;
|
|
||||||
b=4sz9KOqmGGwObcaR0iSSMVeeMvZHqMzvY4cw++RddJd0V48WoyPPI5q1oMeGiVZ6fm
|
|
||||||
eEWVr8xH9/T1JUqUZXJHY6CPixN9nTpLvZlpikG1KOFv5+I5DNVX/O5i6M5C/yIPRVGv
|
|
||||||
ja0ygA7WTL48IkHV7+PTPwHmhF8zv1/BeNdko4BSywfql64J6NMM5RnOAejTIf5AR/IL
|
|
||||||
CW7H2IcmiOGBHfgMApQljg3wB+WgUel7RXZfMnHCbSlmynJ6bDJ4tq7uU16GLpnI6qAe
|
|
||||||
s9w8cOpFPiQk8uKEqdc682XxKlwqYdh07RWO/EdlZ8WeSoxMfU6YZL7c1s6xxK2c9sT7
|
|
||||||
8Xxg==
|
|
||||||
X-Google-Smtp-Source: AGHT+IFJwttd47Uo06h0EKkogFtVf4poWcHfmodh4dZqSviwYROSgnnyI2ZJSibXGnOUHiLIfAwFn6KP9CzXMoyncWSb
|
|
||||||
MIME-Version: 1.0
|
|
||||||
X-Received: by 2002:a05:6602:14c9:b0:884:47f0:b89f with SMTP id
|
|
||||||
ca18e2360f4ac-886bd0f2960mr1726062839f.3.1756156985586; Mon, 25 Aug 2025
|
|
||||||
14:23:05 -0700 (PDT)
|
|
||||||
Reply-To: tconvertino@gmail.com
|
|
||||||
Auto-Submitted: auto-generated
|
|
||||||
Message-ID: <calendar-43033c42-cc1e-4014-a5e8-c4552d41247e@google.com>
|
|
||||||
Date: Mon, 25 Aug 2025 21:23:05 +0000
|
|
||||||
Subject: New event: McClure BLT @ Monthly from 7:30am to 8:30am on the second
|
|
||||||
Thursday from Thu Sep 11 to Fri Jan 30, 2026 (PDT) (tconvertino@gmail.com)
|
|
||||||
From: "lmcollings@seattleschools.org (Google Calendar)" <calendar-notification@google.com>
|
|
||||||
To: couchmoney@gmail.com
|
|
||||||
Content-Type: multipart/alternative; boundary="0000000000004bc1be063d372904"
|
|
||||||
|
|
||||||
--0000000000004bc1be063d372904
|
|
||||||
Content-Type: text/plain; charset="UTF-8"; format=flowed; delsp=yes
|
|
||||||
Content-Transfer-Encoding: base64
|
|
||||||
|
|
||||||
TWNDbHVyZSBCTFQNCk1vbnRobHkgZnJvbSA3OjMwYW0gdG8gODozMGFtIG9uIHRoZSBzZWNvbmQg
|
|
||||||
VGh1cnNkYXkgZnJvbSBUaHVyc2RheSBTZXAgMTEgIA0KdG8gRnJpZGF5IEphbiAzMCwgMjAyNg0K
|
|
||||||
UGFjaWZpYyBUaW1lIC0gTG9zIEFuZ2VsZXMNCg0KTG9jYXRpb24NCk1jQ2x1cmUgTGlicmFyeQkN
|
|
||||||
Cmh0dHBzOi8vd3d3Lmdvb2dsZS5jb20vbWFwcy9zZWFyY2gvTWNDbHVyZStMaWJyYXJ5P2hsPWVu
|
|
||||||
DQoNCg0KDQpCTFQgd2lsbCBtZWV0IG9uIHRoZSAybmQgVGh1cnNkYXkgb2YgZXZlcnkgbW9udGgg
|
|
||||||
dW50aWwgSmFudWFyeSB3aGVuIHdlICANCmJlZ2luIGxvb2tpbmcgYXQgYnVkZ2V0LiBBZGRpdGlv
|
|
||||||
bmFsIG1lZXRpbmdzIG1heSBhbHNvIGJlIHNjaGVkdWxlZCBlYXJsaWVyICANCmlmIG5lZWRlZC4N
|
|
||||||
ClRoYW5rcywNCk1jQ2x1cmUgQkxUDQoNCg0KDQpPcmdhbml6ZXINCmxtY29sbGluZ3NAc2VhdHRs
|
|
||||||
ZXNjaG9vbHMub3JnDQpsbWNvbGxpbmdzQHNlYXR0bGVzY2hvb2xzLm9yZw0KDQpHdWVzdHMNCmxt
|
|
||||||
Y29sbGluZ3NAc2VhdHRsZXNjaG9vbHMub3JnIC0gb3JnYW5pemVyDQp0Y29udmVydGlub0BnbWFp
|
|
||||||
bC5jb20gLSBjcmVhdG9yDQptYW5kcy5hbmRydXNAZ21haWwuY29tDQphbXNjaHVtZXJAc2VhdHRs
|
|
||||||
ZXNjaG9vbHMub3JnDQphcGplbm5pbmdzQHNlYXR0bGVzY2hvbHMub3JnDQpsbWJsYXVAc2VhdHRs
|
|
||||||
ZXNjaG9vbHMub3JnDQptbmxhbmRpc0BzZWF0dGxlc2Nob29scy5vcmcNCnRtYnVyY2hhcmR0QHNl
|
|
||||||
YXR0bGVzY2hvb2xzLm9yZw0KbWNjbHVyZWFsbHN0YWZmQHNlYXR0bGVzY2hvbHMub3JnIC0gb3B0
|
|
||||||
aW9uYWwNClZpZXcgYWxsIGd1ZXN0IGluZm8gIA0KaHR0cHM6Ly9jYWxlbmRhci5nb29nbGUuY29t
|
|
||||||
L2NhbGVuZGFyL3I/ZWlkPVh6WXdjVE13WXpGbk5qQnZNekJsTVdrMk1HODBZV016WnpZd2NtbzRa
|
|
||||||
M0JzT0RoeWFqSmpNV2c0TkhNelpHZzVae1l3Y3pNd1l6Rm5OakJ2TXpCak1XYzNORG96T0dkb2Fq
|
|
||||||
WXhNR3RoWjNFeE5qUnhhemhuY0djMk5HOHpNR014WnpZd2J6TXdZekZuTmpCdk16QmpNV2MyTUc4
|
|
||||||
ek1tTXhaell3YnpNd1l6Rm5PR2R4TTJGalNXODNOSUF6YVdReGJUY3hNbXBqWkRGck5qVXhNamhq
|
|
||||||
TVcwM01USnFNbWRvYnpnMGN6TTJaSEJwTmprek1DQjBZMjl1ZG1WeWRHbHViMEJ0JmVzPTENCg0K
|
|
||||||
fn4vL35+DQpJbnZpdGF0aW9uIGZyb20gR29vZ2xlIENhbGVuZGFyOiBodHRwczovL2NhbGVuZGFy
|
|
||||||
Lmdvb2dsZS5jb20vY2FsZW5kYXIvDQoNCllvdSBhcmUgcmVjZWl2aW5nIHRoaXMgZW1haWwgYmVj
|
|
||||||
YXVzZSB5b3UgYXJlIHN1YnNjcmliZWQgdG8gY2FsZW5kYXIgIA0Kbm90aWZpY2F0aW9ucy4gVG8g
|
|
||||||
c3RvcCByZWNlaXZpbmcgdGhlc2UgZW1haWxzLCBnbyB0byAgDQpodHRwczovL2NhbGVuZGFyLmdv
|
|
||||||
b2dsZS5jb20vY2FsZW5kYXIvci9zZXR0aW5ncywgc2VsZWN0IHRoaXMgY2FsZW5kYXIsIGFuZCANCmNoYW5nZSAiT3RoZXIgbm90aWZpY2F0aW9ucyIuDQoNCkZvcndhcmRpbmcgdGhpcyBpbnZp
|
|
||||||
dGF0aW9uIGNvdWxkIGFsbG93IGFueSByZWNpcGllbnQgdG8gc2VuZCBhIHJlc3BvbnNlIHRvICAN
|
|
||||||
CnRoZSBvcmdhbml6ZXIsIGJlIGFkZGVkIHRvIHRoZSBndWVzdCBsaXN0LCBpbnZpdGUgb3RoZXJz
|
|
||||||
IHJlZ2FyZGxlc3Mgb2YgIA0KdGhlaXIgb3duIGludml0YXRpb24gc3RhdHVzLCBvciBtb2RpZnkg
|
|
||||||
eW91ciBSU1ZQLg0KDQpMZWFybiBtb3JlIGh0dHBzOi8vc3VwcG9ydC5nb29nbGUuY29tL2NhbGVu
|
|
||||||
ZGFyL2Fuc3dlci8zNzEzNSNmb3J3YXJkaW5nDQo=
|
|
||||||
--0000000000004bc1be063d372904--
|
|
||||||
206
server/testdata/google-calendar-example-3.eml
vendored
206
server/testdata/google-calendar-example-3.eml
vendored
@ -1,206 +0,0 @@
|
|||||||
Return-Path: <couchmoney+caf_=gmail=xinu.tv@gmail.com>
|
|
||||||
Delivered-To: bill@xinu.tv
|
|
||||||
Received: from phx.xinu.tv [74.207.253.222]
|
|
||||||
by nixos-01.h.xinu.tv with IMAP (fetchmail-6.5.1)
|
|
||||||
for <wathiede@localhost> (single-drop); Thu, 28 Aug 2025 12:11:15 -0700 (PDT)
|
|
||||||
Received: from phx.xinu.tv
|
|
||||||
by phx.xinu.tv with LMTP
|
|
||||||
id 1gVrANOpsGg9TSQAJR8clQ
|
|
||||||
(envelope-from <couchmoney+caf_=gmail=xinu.tv@gmail.com>)
|
|
||||||
for <bill@xinu.tv>; Thu, 28 Aug 2025 12:11:15 -0700
|
|
||||||
X-Original-To: gmail@xinu.tv
|
|
||||||
Received-SPF: Pass (mailfrom) identity=mailfrom; client-ip=2a00:1450:4864:20::230; helo=mail-lj1-x230.google.com; envelope-from=couchmoney+caf_=gmail=xinu.tv@gmail.com; receiver=xinu.tv
|
|
||||||
Authentication-Results: phx.xinu.tv;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=google.com header.i=@google.com header.a=rsa-sha256 header.s=20230601 header.b=RjBRlfFL;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=gmail.com header.i=@gmail.com header.a=rsa-sha256 header.s=20230601 header.b=HaiL0lRL
|
|
||||||
Received: from mail-lj1-x230.google.com (mail-lj1-x230.google.com [IPv6:2a00:1450:4864:20::230])
|
|
||||||
by phx.xinu.tv (Postfix) with ESMTPS id B4E848B007
|
|
||||||
for <gmail@xinu.tv>; Thu, 28 Aug 2025 12:11:13 -0700 (PDT)
|
|
||||||
Received: by mail-lj1-x230.google.com with SMTP id 38308e7fff4ca-336a85b8fc5so8142611fa.3
|
|
||||||
for <gmail@xinu.tv>; Thu, 28 Aug 2025 12:11:13 -0700 (PDT)
|
|
||||||
ARC-Seal: i=2; a=rsa-sha256; t=1756408272; cv=pass;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=Nq93fJSEgPuxWsaf3dc6cCKbOP/bXMQJfmuZJBvrid99GipahJY/Ka4SGoLc8HBMH2
|
|
||||||
Ip9YDLG2Lblqz/N1KOud9gnAmQ6Zg4hfPZGvhUfCGaXbCi2lOhRlfx6QM0lM1B8rAXaA
|
|
||||||
S3Lt2qFFXrVBlvaJePwI+wVpc1wPbvd5PblaaUTYUVJeYSfdPtgNAy0Aehty9TF0Jo2h
|
|
||||||
9yrzCWMJ6kMTpsDw7sfDSnv7s43Q3jOPzXDjHdJfrK8aUXGQenwT+1acJkIw78wBFt3R
|
|
||||||
IG5CBLIKmwDpjquJzRPkEjHiNDRxhaKaCShTCVLTjmrYgbHXPM/gUewaKLfeIuTzOVuA
|
|
||||||
mnkw==
|
|
||||||
ARC-Message-Signature: i=2; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to;
|
|
||||||
bh=lgr/fFBrye/qM438Us9TAp1/DYWNuYxn2NUL4vzX/SU=;
|
|
||||||
fh=twOWSYT+4sbeBuT1oeA5xzauBIj0SLZH5qI1YanOQio=;
|
|
||||||
b=FBstDUezbqJRRRxTwlKY4UXNSJ4z9aZdvb9KOlxXfFLCzUh3r5w+9P4+a/uH1Uw65g
|
|
||||||
xbxzPRgMduPWgKDAweqXk9SGX3mjqF0oyd5yhGTiU/jpHg6ZLXf//g45zJqRjfMnRi8I
|
|
||||||
vbEEAxUKyhPfbrQ8/byfq/isJHFiR0Vjr2U0HOqcctRgCTfrZr1b14jRVopjVqhk37ef
|
|
||||||
KapCbmTbBLznJLQH6jfi4LvKpSlJDW6l7R/CC4WtAzgcmHyA9nfjM4+egLg15giMpn3a
|
|
||||||
549c+jYBFgsjblhmyFw05dGSpUvP+jJeKTcFnlZe6yU7Qjnqhs6TlV/Jm8HAkPH1zdS5
|
|
||||||
XDAw==;
|
|
||||||
darn=xinu.tv
|
|
||||||
ARC-Authentication-Results: i=2; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=RjBRlfFL;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=HaiL0lRL;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=1e100.net; s=20230601; t=1756408272; x=1757013072;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to
|
|
||||||
:x-forwarded-for:x-forwarded-to:x-gm-message-state:from:to:cc
|
|
||||||
:subject:date:message-id:reply-to;
|
|
||||||
bh=lgr/fFBrye/qM438Us9TAp1/DYWNuYxn2NUL4vzX/SU=;
|
|
||||||
b=VJaqGIpPE1gxGhbAl1Np3yZR/0QPEs/C6KtFdnsaH9ubxFrDOeF4uIygqAUN9YFmll
|
|
||||||
YZsN4G0iexB097atKRIXLrreE3pH3cOY56ym94fWRZGythS0MRZlw40QoHLLf3joTC6D
|
|
||||||
WHtaNcea0hO3V6l/6gKlOffJ/cv2GnyPi0Sv7neOC5v18VTxZwZn+Wp+pTPpWFcmvQ4J
|
|
||||||
IMSV0vNgIRrYJaItUt1d59B9Ah+0bcyd7jJ0TDRVvN97S8iSlSIw6NMwxjZMuyJSWO7X
|
|
||||||
5zm8xA+H+L8+pLMmGKfdBYxhNo/ibdwda+w/ECKIjdnFtbreGbYLsUnkLdPeumQ6LXs/
|
|
||||||
Q2mg==
|
|
||||||
X-Forwarded-Encrypted: i=2; AJvYcCXpJ2X9EF2q2d4efhhe9B8o7LcuPPe25tZZwgkhfxerDzSbY0obB8Eik41xltO5i7k4ANaJKQ==@xinu.tv
|
|
||||||
X-Gm-Message-State: AOJu0Yz5+coY8ftW9IS5OD7ZbkwXnD43Mcp5BZjn5I2cv4v+u+ilxOi+
|
|
||||||
0DKABW1HVFh3MqQ/Z9nU+svpDl4kHa5lTr5siCXHTf0Wpo4LT3UsILyLUvwua0tsx9da14Gl6Fb
|
|
||||||
R1xVSmax6VR4PgZzrnOKZZx1x1re2RaTFGMAaA0Ei5ua3bZpn8axccwggYc94Jw==
|
|
||||||
X-Received: by 2002:a2e:a984:0:b0:336:7b24:2af7 with SMTP id 38308e7fff4ca-3367b242dd2mr36540291fa.17.1756408271464;
|
|
||||||
Thu, 28 Aug 2025 12:11:11 -0700 (PDT)
|
|
||||||
X-Forwarded-To: gmail@xinu.tv
|
|
||||||
X-Forwarded-For: couchmoney@gmail.com gmail@xinu.tv
|
|
||||||
Delivered-To: couchmoney@gmail.com
|
|
||||||
Received: by 2002:a05:6504:955:b0:2b8:eb6f:82ec with SMTP id k21csp1133490lts;
|
|
||||||
Thu, 28 Aug 2025 12:11:10 -0700 (PDT)
|
|
||||||
X-Received: by 2002:a05:6602:3c3:b0:86d:9ec7:267e with SMTP id ca18e2360f4ac-886bd155520mr3955796839f.4.1756408269941;
|
|
||||||
Thu, 28 Aug 2025 12:11:09 -0700 (PDT)
|
|
||||||
ARC-Seal: i=1; a=rsa-sha256; t=1756408269; cv=none;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=Gvk+jquchLt+hySEph55datOhigiuAMXW4mgi5vTVp51rzJ7PB+rH7vx23tj1QAB+0
|
|
||||||
RIOZTaB67H8yFXwAUNZWd1GMnpocZR+tI4bMxbKzDYd7zgaTzSSa2InDROhqOhHqBpX8
|
|
||||||
eWD23F+xRon/qEYQd0YEjZVt20WvKzpvjbpvCyWpq7Z4y376KoJArxsspsKZlALrCfKq
|
|
||||||
cyt9B/EKr3ZmAzRiswiH7KY/iHd1qYgtYy0tYGNtjU0nZ+5fK/tVlw+lJuLtt+aA+ZCy
|
|
||||||
o5y8Y5/thdSJsT159u+bV5eICZWC5kGnztNsXg0Nr2H22XzUC1epWZvJkZW2j+SXQm5k
|
|
||||||
Wdew==
|
|
||||||
ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature;
|
|
||||||
bh=lgr/fFBrye/qM438Us9TAp1/DYWNuYxn2NUL4vzX/SU=;
|
|
||||||
fh=mbzrMIWIgWMC0ni1xEx+ViW4J0RLAdLdPT2cX81nTlk=;
|
|
||||||
b=hra/E01IWuIFrWtk3uTcoj04apbHeQcQBSINqYDpr3cO7rXknIvpeXoWLvk0EIJI5y
|
|
||||||
syt60ekwVnsX/qb2F1HbN896dm97QrEGIwAiJyN2oTFauLoYObpcuhPS317hU4+YubO+
|
|
||||||
RLUntXsPK2qiifmPCOMPD6wACQB9YXpOPHrrl5x/yZlria1Tfg3XQcZIYsWcU/Qil94x
|
|
||||||
GtK+i82uzPXEQ0fVieEgJaZtmrW7OFEpPjd1KGp6sYtGvOxUfxVKl5MhLrCqfcLN9fd7
|
|
||||||
Xren0S32b/IsZA8ASdFca3CNjaAL2Ajlatb39XN17txnKrpQje/ReiVkm9wwo194NwCp
|
|
||||||
3dfQ==;
|
|
||||||
dara=google.com
|
|
||||||
ARC-Authentication-Results: i=1; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=RjBRlfFL;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=HaiL0lRL;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
Received: from mail-sor-f73.google.com (mail-sor-f73.google.com. [209.85.220.73])
|
|
||||||
by mx.google.com with SMTPS id ca18e2360f4ac-88711b2248fsor90547939f.5.2025.08.28.12.11.09
|
|
||||||
for <couchmoney@gmail.com>
|
|
||||||
(Google Transport Security);
|
|
||||||
Thu, 28 Aug 2025 12:11:09 -0700 (PDT)
|
|
||||||
Received-SPF: pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) client-ip=209.85.220.73;
|
|
||||||
Authentication-Results: mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=RjBRlfFL;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=HaiL0lRL;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=google.com; s=20230601; t=1756408269; x=1757013069; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=lgr/fFBrye/qM438Us9TAp1/DYWNuYxn2NUL4vzX/SU=;
|
|
||||||
b=RjBRlfFLVsAeeTCwo5Z3c1Y5G+pvz4XSTyHiVKUHmxClmpM30ZeHTVLl36njuM/7rx
|
|
||||||
mFwbzGk80zXgGpZyc7qnhSIVxXeMv4iex2UIc1D7Rcw3CF4q/HPlulcD9uVnsxRvng5Z
|
|
||||||
6PVcBQH3qGn0zvDDb0QHEcuDed4sNd/4wkYMOchxlp1TfdrbMZdCI+EXwTyvGgbVjd+/
|
|
||||||
erPyF5JZL/UJx7+gWoXSE7yJkPQrKYiv4LApu0STV4iSOEL8XsTQ4nZiZHSLeeKr0y7w
|
|
||||||
TUWhjfOCgD/YTZW5PTuFBW+lI03Ny19iGHbQNwKrLLcGwW7TJ2PYBR90vsIfaJtG5RM6
|
|
||||||
MP1w==
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=gmail.com; s=20230601; t=1756408269; x=1757013069; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=lgr/fFBrye/qM438Us9TAp1/DYWNuYxn2NUL4vzX/SU=;
|
|
||||||
b=HaiL0lRLeUjb1Rw8g5U5npEElUjhuKY2dPzOaldvum7ZqfY26X35u8SQTxCXWcSsGp
|
|
||||||
RKrlHykB6fjjPSjSGBB+uKe98anrorlvgkhUluES0LzmAZ6STVlPUfPHb/RreJQ7Ol1r
|
|
||||||
N7oNIEg5EnGia1g6rWliSMHY7Fb4sQzMaS2P+qhtq0OFzB6F57atJAwTUWaspDHycfdh
|
|
||||||
S8ji+q7DEiLq1LfXIxj+WwenT/iRFIJsfmvXsHgQiKMoYGdENfAGZPdo7W0sTEK3TkWz
|
|
||||||
xFOny/4bQmx/49F4C1HnLsHoBi0j6sezIQsc+U83vvChFXXrELQrK5PiJL+UOCLZo48R
|
|
||||||
RJDQ==
|
|
||||||
X-Google-Smtp-Source: AGHT+IG3ta6ofCYBa0SfJ7K3lq1EjsCnjr+BZDRz/SVLQfyo54CcUFgE5iTTB5E+h//QXT9iTojhKpMp6QZ4QB+5HAcs
|
|
||||||
MIME-Version: 1.0
|
|
||||||
X-Received: by 2002:a05:6602:1544:b0:887:6a2:6054 with SMTP id
|
|
||||||
ca18e2360f4ac-88706a263famr584022039f.9.1756408269509; Thu, 28 Aug 2025
|
|
||||||
12:11:09 -0700 (PDT)
|
|
||||||
Reply-To: tconvertino@gmail.com
|
|
||||||
Sender: Google Calendar <calendar-notification@google.com>
|
|
||||||
Auto-Submitted: auto-generated
|
|
||||||
Message-ID: <calendar-8ecdd8ef-29ed-4f61-857d-1215ab585aba@google.com>
|
|
||||||
Date: Thu, 28 Aug 2025 19:11:09 +0000
|
|
||||||
Subject: New event: Dentist appt @ Tue Sep 23, 2025 3pm - 4pm (PDT) (tconvertino@gmail.com)
|
|
||||||
From: tconvertino@gmail.com
|
|
||||||
To: couchmoney@gmail.com
|
|
||||||
Content-Type: multipart/alternative; boundary="000000000000fc1bff063d71aa4b"
|
|
||||||
X-Spamd-Result: default: False [-0.80 / 15.00];
|
|
||||||
ARC_ALLOW(-1.00)[google.com:s=arc-20240605:i=2];
|
|
||||||
URI_COUNT_ODD(1.00)[1];
|
|
||||||
DMARC_POLICY_ALLOW(-0.50)[gmail.com,none];
|
|
||||||
R_DKIM_ALLOW(-0.20)[google.com:s=20230601,gmail.com:s=20230601];
|
|
||||||
R_SPF_ALLOW(-0.20)[+ip6:2a00:1450:4000::/36];
|
|
||||||
MIME_BASE64_TEXT(0.10)[];
|
|
||||||
MANY_INVISIBLE_PARTS(0.10)[2];
|
|
||||||
MIME_GOOD(-0.10)[multipart/alternative,text/plain];
|
|
||||||
FREEMAIL_TO(0.00)[gmail.com];
|
|
||||||
RCVD_COUNT_THREE(0.00)[3];
|
|
||||||
FORGED_SENDER(0.00)[tconvertino@gmail.com,couchmoney@gmail.com];
|
|
||||||
FROM_NEQ_ENVFROM(0.00)[tconvertino@gmail.com,couchmoney@gmail.com];
|
|
||||||
MIME_TRACE(0.00)[0:+,1:+,2:~];
|
|
||||||
FREEMAIL_ENVFROM(0.00)[gmail.com];
|
|
||||||
RCPT_COUNT_ONE(0.00)[1];
|
|
||||||
FREEMAIL_REPLYTO(0.00)[gmail.com];
|
|
||||||
FREEMAIL_FROM(0.00)[gmail.com];
|
|
||||||
URIBL_BLOCKED(0.00)[mail-lj1-x230.google.com:rdns,mail-lj1-x230.google.com:helo];
|
|
||||||
TAGGED_FROM(0.00)[caf_=gmail=xinutv];
|
|
||||||
HAS_REPLYTO(0.00)[tconvertino@gmail.com];
|
|
||||||
NEURAL_HAM(-0.00)[-0.995];
|
|
||||||
FWD_GOOGLE(0.00)[couchmoney@gmail.com];
|
|
||||||
TO_DN_NONE(0.00)[];
|
|
||||||
FORGED_SENDER_FORWARDING(0.00)[];
|
|
||||||
RCVD_TLS_LAST(0.00)[];
|
|
||||||
TO_DOM_EQ_FROM_DOM(0.00)[];
|
|
||||||
FROM_NO_DN(0.00)[];
|
|
||||||
ASN(0.00)[asn:15169, ipnet:2a00:1450::/32, country:US];
|
|
||||||
DKIM_TRACE(0.00)[google.com:+,gmail.com:+];
|
|
||||||
MISSING_XM_UA(0.00)[];
|
|
||||||
REPLYTO_EQ_FROM(0.00)[]
|
|
||||||
X-Rspamd-Server: phx
|
|
||||||
X-Rspamd-Action: no action
|
|
||||||
X-Rspamd-Queue-Id: B4E848B007
|
|
||||||
X-TUID: eMNiZ49uiDPB
|
|
||||||
|
|
||||||
--000000000000fc1bff063d71aa4b
|
|
||||||
Content-Type: text/plain; charset="UTF-8"; format=flowed; delsp=yes
|
|
||||||
Content-Transfer-Encoding: base64
|
|
||||||
|
|
||||||
RGVudGlzdCBhcHB0DQpUdWVzZGF5IFNlcCAyMywgMjAyNSDii4UgM3BtIOKAkyA0cG0NClBhY2lm
|
|
||||||
aWMgVGltZSAtIExvcyBBbmdlbGVzDQoNCg0KDQpPcmdhbml6ZXINCnRjb252ZXJ0aW5vQGdtYWls
|
|
||||||
LmNvbQ0KdGNvbnZlcnRpbm9AZ21haWwuY29tDQoNCn5+Ly9+fg0KSW52aXRhdGlvbiBmcm9tIEdv
|
|
||||||
b2dsZSBDYWxlbmRhcjogaHR0cHM6Ly9jYWxlbmRhci5nb29nbGUuY29tL2NhbGVuZGFyLw0KDQpZ
|
|
||||||
b3UgYXJlIHJlY2VpdmluZyB0aGlzIGVtYWlsIGJlY2F1c2UgeW91IGFyZSBzdWJzY3JpYmVkIHRv
|
|
||||||
IGNhbGVuZGFyICANCm5vdGlmaWNhdGlvbnMuIFRvIHN0b3AgcmVjZWl2aW5nIHRoZXNlIGVtYWls
|
|
||||||
cywgZ28gdG8gIA0KaHR0cHM6Ly9jYWxlbmRhci5nb29nbGUuY29tL2NhbGVuZGFyL3Ivc2V0dGlu
|
|
||||||
Z3MsIHNlbGVjdCB0aGlzIGNhbGVuZGFyLCBhbmQgIA0KY2hhbmdlICJPdGhlciBub3RpZmljYXRp
|
|
||||||
b25zIi4NCg0KRm9yd2FyZGluZyB0aGlzIGludml0YXRpb24gY291bGQgYWxsb3cgYW55IHJlY2lw
|
|
||||||
aWVudCB0byBzZW5kIGEgcmVzcG9uc2UgdG8gIA0KdGhlIG9yZ2FuaXplciwgYmUgYWRkZWQgdG8g
|
|
||||||
dGhlIGd1ZXN0IGxpc3QsIGludml0ZSBvdGhlcnMgcmVnYXJkbGVzcyBvZiAgDQp0aGVpciBvd24g
|
|
||||||
aW52aXRhdGlvbiBzdGF0dXMsIG9yIG1vZGlmeSB5b3VyIFJTVlAuDQoNCkxlYXJuIG1vcmUgaHR0
|
|
||||||
cHM6Ly9zdXBwb3J0Lmdvb2dsZS5jb20vY2FsZW5kYXIvYW5zd2VyLzM3MTM1I2ZvcndhcmRpbmcN
|
|
||||||
Cg==
|
|
||||||
--000000000000fc1bff063d71aa4b
|
|
||||||
Content-Type: text/html; charset="UTF-8"
|
|
||||||
Content-Transfer-Encoding: quoted-printable
|
|
||||||
|
|
||||||
<!doctype html><html xmlns=3D"http://www.w3.org/1999/xhtml" xmlns:v=3D"urn:="...truncated for brevity...
|
|
||||||
175
server/testdata/google-calendar-example-thursday.eml
vendored
175
server/testdata/google-calendar-example-thursday.eml
vendored
@ -1,175 +0,0 @@
|
|||||||
Return-Path: <couchmoney+caf_=gmail=xinu.tv@gmail.com>
|
|
||||||
Delivered-To: bill@xinu.tv
|
|
||||||
Received: from phx.xinu.tv [74.207.253.222]
|
|
||||||
by nixos-01.h.xinu.tv with IMAP (fetchmail-6.5.1)
|
|
||||||
for <wathiede@localhost> (single-drop); Thu, 11 Sep 2025 12:27:35 -0700 (PDT)
|
|
||||||
Received: from phx.xinu.tv
|
|
||||||
by phx.xinu.tv with LMTP
|
|
||||||
id CqRrBqciw2hiKicAJR8clQ
|
|
||||||
(envelope-from <couchmoney+caf_=gmail=xinu.tv@gmail.com>)
|
|
||||||
for <bill@xinu.tv>; Thu, 11 Sep 2025 12:27:35 -0700
|
|
||||||
X-Original-To: gmail@xinu.tv
|
|
||||||
Received-SPF: Pass (mailfrom) identity=mailfrom; client-ip=2a00:1450:4864:20::130; helo=mail-lf1-x130.google.com; envelope-from=couchmoney+caf_=gmail=xinu.tv@gmail.com; receiver=xinu.tv
|
|
||||||
Authentication-Results: phx.xinu.tv;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=google.com header.i=@google.com header.a=rsa-sha256 header.s=20230601 header.b=dc+iKaXd;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=gmail.com header.i=@gmail.com header.a=rsa-sha256 header.s=20230601 header.b=kf8o8wAd
|
|
||||||
Received: from mail-lf1-x130.google.com (mail-lf1-x130.google.com [IPv6:2a00:1450:4864:20::130])
|
|
||||||
by phx.xinu.tv (Postfix) with ESMTPS id D7E2D80037
|
|
||||||
for <gmail@xinu.tv>; Thu, 11 Sep 2025 12:27:33 -0700 (PDT)
|
|
||||||
Received: by mail-lf1-x130.google.com with SMTP id 2adb3069b0e04-55f716e25d9so1141446e87.1
|
|
||||||
for <gmail@xinu.tv>; Thu, 11 Sep 2025 12:27:33 -0700 (PDT)
|
|
||||||
ARC-Seal: i=2; a=rsa-sha256; t=1757618852; cv=pass;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=MZ+1JfQuPR9luCCxiZNUeqSEpjt1vLuM3bTRCaal/W0NBxkCH0y5v9WfPR0KJ2BPb1
|
|
||||||
Rtnt/5ayDtmsLf8l6yTTVsBlFYW70ehqXWMD10MMcDEMvnib4KKDAacGaSmijAK4cYGq
|
|
||||||
FOU9CGNY986OMXMk54TD9NF3fkKDIKcAoh81D6at5/DE3Puuxofq0vZmtmVqQBNKG169
|
|
||||||
REkhcDpkXTMs/4rJpmZwXp2HbjD84avusBwSlYIQUWsBgO4g7THHjoR4Uk56cek9aEds
|
|
||||||
ip8IkTO6KRFe6u8FebQsZ/Q9sSAK3pheMExWFVMha9Y0XhACVOZiV600zRCPS9MNHhYw
|
|
||||||
XEaA==
|
|
||||||
ARC-Message-Signature: i=2; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to;
|
|
||||||
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
|
|
||||||
fh=WnbwIlqFRbBot/H7TyqablNBDgXRuegsgjC3piothTI=;
|
|
||||||
b=aYMo5f7VI2b4CiAvLELRJ9zM3dF7ZH8FEqmoAtCcfPHrT9kLLCnriuyXG1R6sC3eoR
|
|
||||||
++boT29xoScVroIlfcI77Ty7N5X1fawOABkVDWWt7z5w4WhiesT0klxw5nINj9hnLBiK
|
|
||||||
22nrMevpRpFtmuDO7cle78lSAFZoZuyv+aXCK9RnLKvIm2JuXRrvU8LivxbbpNB4gNl0
|
|
||||||
hE1jsGuZm1SOJ54SRLwwa4HpSiOJV2x2txTtPCzmvE/LZvNESPjfi3Y2u7gaR87OzkNs
|
|
||||||
gNi5Xoc+D908zBsmcYKpUYiQcPL79s3DfNwYFIs/rR8Z2xgaHbFD/YmqRUmCEeNLv7o2
|
|
||||||
RR8g==;
|
|
||||||
darn=xinu.tv
|
|
||||||
ARC-Authentication-Results: i=2; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=1e100.net; s=20230601; t=1757618852; x=1758223652;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to
|
|
||||||
:x-forwarded-for:x-forwarded-to:x-gm-message-state:from:to:cc
|
|
||||||
:subject:date:message-id:reply-to;
|
|
||||||
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
|
|
||||||
b=GKJkb+LmE79XIMEhHRvoCodKS+GBTOCShzMe06Q+zKxUZFHi6XMg8GqteuXQO9LVbw
|
|
||||||
nPUVN4QO2Hvqch0xzjbc0ryyMOD0u7HqpDUAEZCzamFXIfsX6hZXKLhFqy4YomtsG3os
|
|
||||||
TCOWBGLqwu7KalfOVg2p+csOR68i0mGyBII1sKcL9vUv9kIQJZxQKHGkuIc48cf6tbUB
|
|
||||||
L+mkVbMwXLSbpuTJszPmIVZV5o0K52KN+2QoLcmXGfw0mUOnjNI0oSovdbPg4SSDZ3cw
|
|
||||||
iIsC9vjvtCSFS3pf+Fp807s+Zjh5P6xeSxGU57qhC+HT9kTzIioh5EqKnGqcskDTqrI1
|
|
||||||
uCiQ==
|
|
||||||
X-Forwarded-Encrypted: i=2; AJvYcCUfSSA2sT31daRt2+W7dAD9YPx1gqa4JFpVuqCtxVtjqbKfKhOX/EcDQiECQ4BEWjmAP+IqTQ==@xinu.tv
|
|
||||||
X-Gm-Message-State: AOJu0Ywn7D0BjTaGiM/UFG0WhGuyYGfpLijg+ouhrOaGZzSREyTcRa37
|
|
||||||
XA3bzQ/LKTpzWhhh01GMwnigmELbWdIVr/BeRLVCuJdh+m+JBMgnAjBTIDs9RF3/xfR7rpG7VOB
|
|
||||||
6k+ugF+8QRKB4BcL2t8MvfJD03CkrzuhhvUtFTRHopcSZrkqzh8GOJayq42VveQ==
|
|
||||||
X-Received: by 2002:a05:6512:3b24:b0:55f:6580:818c with SMTP id 2adb3069b0e04-57050fe2fa3mr165340e87.46.1757618851553;
|
|
||||||
Thu, 11 Sep 2025 12:27:31 -0700 (PDT)
|
|
||||||
X-Forwarded-To: gmail@xinu.tv
|
|
||||||
X-Forwarded-For: couchmoney@gmail.com gmail@xinu.tv
|
|
||||||
Delivered-To: couchmoney@gmail.com
|
|
||||||
Received: by 2002:a05:6504:d09:b0:2c3:f6c4:ad72 with SMTP id c9csp3388833lty;
|
|
||||||
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
|
|
||||||
X-Received: by 2002:a05:6602:36ce:b0:889:b536:779b with SMTP id ca18e2360f4ac-8903378d714mr78653239f.7.1757618849269;
|
|
||||||
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
|
|
||||||
ARC-Seal: i=1; a=rsa-sha256; t=1757618849; cv=none;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=Ln2bufZfSNhR/NmMPrG2QFdtvupjJtLDQnFvsL8HTPn+Dlrt5ff+6k6Wpupab/5mS7
|
|
||||||
hXjtVD0jnryGUiM5h+SNjxwzNPM3PBoueTpAzzBkjHQqMxJVpspgsGJUVOWAVRBWtWo
|
|
||||||
39qFyoP0vhzGRWDAuAFV+4VDhsvH7GL8lTrZCSMzrngTadmEdJ5haUIQOa50KFUn5HrK
|
|
||||||
1r12gayb+TaGaWfQfDo0Me689T8MQnS0ITUuzgvFxfgHZBz3h+IPnC0hrlhdziGovETo
|
|
||||||
GvHzgCCtiVzu6rop6VMLjLuAYmmT9+jZ3GjSRb+078C9cJR17YpguOC14Cyv4od1Tf7y
|
|
||||||
RFiQ==;
|
|
||||||
dara=google.com
|
|
||||||
ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature;
|
|
||||||
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
|
|
||||||
fh=mbzrMIWIgWMC0ni1xEx+ViW4J0RLAdLdPT2cX81nTlk=;
|
|
||||||
b=JRkHr3CKSkCrafdLzBRtaBOGNl3/0ZSTtgubaNXtvhAiIqRqiQYocfLnVM6N/9sH7O
|
|
||||||
byTXYaRoaRLw/35WM+QTFGP3zUGRkM3eO4UVS/utVIss1IVLDjfmZHalqLYl8RokW5br
|
|
||||||
89Z/xYIyjTE7WUdy6uMSrExCNm5VWjO/qcMKsE5s5oDbXdSLaUYxLTurICM3LQksGkCY
|
|
||||||
wiAWaDDqK14+uhEhW5AyEnebDSYhL9U8UadIv+eK6Ng9q1kwOUzxICRQXEyUtnKhaDKJ
|
|
||||||
eZ1Qe1mp1CjCulr+I15fz3VwUJ6W1cv6cytcxPbu4p5GPn2gb2hS1eR81HVTL6V1Sp5G
|
|
||||||
NdDQ==;
|
|
||||||
dara=google.com
|
|
||||||
ARC-Authentication-Results: i=1; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
Received: from mail-sor-f73.google.com (mail-sor-f73.google.com. [209.85.220.73])
|
|
||||||
by mx.google.com with SMTPS id ca18e2360f4ac-88f2ea1122asor117632339f.3.2025.09.11.12.27.29
|
|
||||||
for <couchmoney@gmail.com>
|
|
||||||
(Google Transport Security);
|
|
||||||
Thu, 11 Sep 2025 12:27:29 -0700 (PDT)
|
|
||||||
Received-SPF: pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) client-ip=209.85.220.73;
|
|
||||||
Authentication-Results: mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=dc+iKaXd;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=kf8o8wAd;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=google.com; s=20230601; t=1757618849; x=1758223649; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
|
|
||||||
b=dc+iKaXdFyqu6K0MIgk848QuwpQXvwzwlEVkxmjuCWvn9DzanMbYn5QJRyRTKilRna
|
|
||||||
BZ7gJSPriHUHcJd4fVKgGuCaQg0TxenCwm+0R64oB1xcDLfonayo/nCrFqEcCLHNmi7x
|
|
||||||
lTyWGJ0rLw6nKazxtcCdIbDhVgiE7/fXNI89w6XFp6pcKLl48yFIoCG1f6uY4iQ7QqNU
|
|
||||||
hLHzjmlzjTi58xFLao7SizZ0lr7E5cHXKHp1Ls/hkDzzcY0Y+O5+3r+NQw4MtpHTcY6/
|
|
||||||
kQlg6OhyMx8PTu4cuepQKXLHV4aFaNJbDQTp8wew4xPIgi7pm2p6hb6C3GgwY6ptOvLd
|
|
||||||
wuag==
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=gmail.com; s=20230601; t=1757618849; x=1758223649; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=mVNsDGUAhSGrAIoTy8PIfvCBxBB4yaBy/VZH8i3gPl4=;
|
|
||||||
b=kf8o8wAd5DSU/NC7SDiuIoohCu+/7wTjWyQqDYbBjUFGaBaYdj6aD5JWNQ1KEA2W8o
|
|
||||||
E+Qy2ymyrzodKa1eOsQX2UDAYKOKpdxMWvx1u19+SC3Dp8DP4puRMrL2ObiSEMLCuOvz
|
|
||||||
Mxmkd+ZUP72EhVuQwK1iSm04/cjQaMsSiPhvSBaxXMaaarwlKeOoCoIo+qC/Z9emiBBv
|
|
||||||
Gk0sQcLA+CByvsxuvD9GInSA0rdoZ0ijhSb0Y475Hieam1QQqy/fhe8lgujzhXNFoIbR
|
|
||||||
5EA9GE0VV9PDoNanaT+u954YeOFBL2YZ5gm2gHltw8tBI98LKnC42Pa3qyMznBa2dI2Q
|
|
||||||
A0RQ==
|
|
||||||
X-Google-Smtp-Source: AGHT+IGmC5/03nTVMeYJBoq1R/BiA19iH0DFaZyyImB3W8mtgjdn+XqIFK1fC8aTwWRXQmsr71Xo0cmkgx6hjPvicQ/d
|
|
||||||
MIME-Version: 1.0
|
|
||||||
X-Received: by 2002:a05:6602:380d:b0:887:4c93:f12c with SMTP id
|
|
||||||
ca18e2360f4ac-8903596aca3mr58994639f.17.1757618848817; Thu, 11 Sep 2025
|
|
||||||
12:27:28 -0700 (PDT)
|
|
||||||
Reply-To: tconvertino@gmail.com
|
|
||||||
Sender: Google Calendar <calendar-notification@google.com>
|
|
||||||
Auto-Submitted: auto-generated
|
|
||||||
Message-ID: <calendar-01d5e8a0-fad7-450b-9758-a16472bf2aa8@google.com>
|
|
||||||
Date: Thu, 11 Sep 2025 19:27:28 +0000
|
|
||||||
Subject: Canceled event: Scout Babysits @ Thu Sep 11, 2025 6pm - 9pm (PDT) (Family)
|
|
||||||
From: tconvertino@gmail.com
|
|
||||||
To: couchmoney@gmail.com
|
|
||||||
Content-Type: multipart/mixed; boundary="000000000000226b77063e8b878d"
|
|
||||||
|
|
||||||
--000000000000226b77063e8b878d
|
|
||||||
Content-Type: text/calendar; charset="UTF-8"; method=CANCEL
|
|
||||||
Content-Transfer-Encoding: 7bit
|
|
||||||
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
|
||||||
VERSION:2.0
|
|
||||||
CALSCALE:GREGORIAN
|
|
||||||
METHOD:CANCEL
|
|
||||||
X-GOOGLE-CALID:g66m0feuqsao8l1c767pvvcg4k@group.calendar.google.com
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20250912T010000Z
|
|
||||||
DTEND:20250912T040000Z
|
|
||||||
DTSTAMP:20250911T192728Z
|
|
||||||
UID:4ang6172d1t7782sn2hmi30fgi@google.com
|
|
||||||
CREATED:20250901T224707Z
|
|
||||||
DESCRIPTION:
|
|
||||||
LAST-MODIFIED:20250911T192728Z
|
|
||||||
LOCATION:
|
|
||||||
SEQUENCE:1
|
|
||||||
STATUS:CANCELLED
|
|
||||||
SUMMARY:Scout Babysits
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
|
|
||||||
--000000000000226b77063e8b878d--
|
|
||||||
169
server/testdata/google-calendar-example.eml
vendored
169
server/testdata/google-calendar-example.eml
vendored
@ -1,169 +0,0 @@
|
|||||||
Return-Path: <couchmoney+caf_=gmail=xinu.tv@gmail.com>
|
|
||||||
Delivered-To: bill@xinu.tv
|
|
||||||
Received: from phx.xinu.tv [74.207.253.222]
|
|
||||||
by nixos-01.h.xinu.tv with IMAP (fetchmail-6.4.39)
|
|
||||||
for <wathiede@localhost> (single-drop); Mon, 02 Jun 2025 07:06:34 -0700 (PDT)
|
|
||||||
Received: from phx.xinu.tv
|
|
||||||
by phx.xinu.tv with LMTP
|
|
||||||
id qDo+FuqvPWh51xIAJR8clQ
|
|
||||||
(envelope-from <couchmoney+caf_=gmail=xinu.tv@gmail.com>)
|
|
||||||
for <bill@xinu.tv>; Mon, 02 Jun 2025 07:06:34 -0700
|
|
||||||
X-Original-To: gmail@xinu.tv
|
|
||||||
Received-SPF: Pass (mailfrom) identity=mailfrom; client-ip=2a00:1450:4864:20::130; helo=mail-lf1-x130.google.com; envelope-from=couchmoney+caf_=gmail=xinu.tv@gmail.com; receiver=xinu.tv
|
|
||||||
Authentication-Results: phx.xinu.tv;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=google.com header.i=@google.com header.a=rsa-sha256 header.s=20230601 header.b=zT2yUtVH;
|
|
||||||
dkim=pass (2048-bit key; unprotected) header.d=gmail.com header.i=@gmail.com header.a=rsa-sha256 header.s=20230601 header.b=nmJW8N67
|
|
||||||
Received: from mail-lf1-x130.google.com (mail-lf1-x130.google.com [IPv6:2a00:1450:4864:20::130])
|
|
||||||
by phx.xinu.tv (Postfix) with ESMTPS id 912AC80034
|
|
||||||
for <gmail@xinu.tv>; Mon, 02 Jun 2025 07:06:32 -0700 (PDT)
|
|
||||||
Received: by mail-lf1-x130.google.com with SMTP id 2adb3069b0e04-54e7967cf67so5267078e87.0
|
|
||||||
for <gmail@xinu.tv>; Mon, 02 Jun 2025 07:06:32 -0700 (PDT)
|
|
||||||
ARC-Seal: i=2; a=rsa-sha256; t=1748873190; cv=pass;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=W3s0wT+CV1W21AldY9lfxPlKRbc7XMoorEnilNq5iGjlw18vDM6eFPb+btqaGAPOPe
|
|
||||||
CMyGeinsFPuql+S7u6HgjZcf9ZFH71sKoFoQytm30hAXB76GO06qi1jRW6o0miuGt/j/
|
|
||||||
bb8qWAiAsGr34mHIbE5fBdkNOGcqW85oI78GolLqpROgn/42boEYxiGAQjybPtO4L84J
|
|
||||||
wP2RBkHiQQGXUjL6b02tozCji1w2XdfYqtW8RteUs1pqYdXl4GUilMLt5C0d2bhSGksS
|
|
||||||
3tMTFjuycbaj+F6QFCkQfEsHx/I7GjuD4mToLcYpzrNnmZZUidAoKuh+uin0cEVvnQ1j
|
|
||||||
V8aA==
|
|
||||||
ARC-Message-Signature: i=2; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to;
|
|
||||||
bh=dgRmOj3aABlB3SNw+xxlI8L9ugJFZ1WMJrtLw/W8tnA=;
|
|
||||||
fh=5zy5Gi9ngAea7dC9ZKKPh/BZlFmotJq74g9KHrEIwaE=;
|
|
||||||
b=QTAjqit0gYnuGa1lbO9RUXOVpyutliNo+tG6irWFsjGhnvMkis2KdLb6saYPnLCG7F
|
|
||||||
rSRXvw0HwuaJfXAV3XvIT0pxTg3PXYnc8kt/F8OtG+LiakJbMV1soj8OJ+5lZPKFmvna
|
|
||||||
i2T5mJjEknZsc9qWYmaAEVqIg71jhPH5CjJyehNhsIJ1/O9CH4VF8L0yv9KUMAA4tzog
|
|
||||||
LfI+SpOE2z/wYuMDxi2Ld3FgaVCQgkMM2Tlys8P0DjCaewWeaZFmZKIEEZUbKWbrivTa
|
|
||||||
RSO+Us+9yrt8hDdJuvtf9eXsGvuZtdj/2APRts/0cd7SFAQqRd0DnhGIHoXR74YVHaqi
|
|
||||||
U7IQ==;
|
|
||||||
darn=xinu.tv
|
|
||||||
ARC-Authentication-Results: i=2; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=zT2yUtVH;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=nmJW8N67;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=1e100.net; s=20230601; t=1748873190; x=1749477990;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature:delivered-to
|
|
||||||
:x-forwarded-for:x-forwarded-to:x-gm-message-state:from:to:cc
|
|
||||||
:subject:date:message-id:reply-to;
|
|
||||||
bh=dgRmOj3aABlB3SNw+xxlI8L9ugJFZ1WMJrtLw/W8tnA=;
|
|
||||||
b=dBjp6JdmFUj0jKPDo9r2/xvfVSvxKaF15UYwYU7itdM18qpCnrgQdHMP2ST7EQBxou
|
|
||||||
58yZfVjrx84gg9phedpVSg4SaBaPIhXsLuUeVQZtPd7J3WYiH4+OGcecjV+cD0dG0TUi
|
|
||||||
o/FbZULNl3REysvoAj+AwUL/ny2FnNU4PIhkeSq+d6iNztkexIKLS8qWqHosenPlVX+E
|
|
||||||
Z7OGQZpK6m1LB5UbCsaODQq5wbNIxlOxqTP1rCHe/hHk53ljiNegzaOS31mVvp1n8/g1
|
|
||||||
pWIZltyZORs0zi6U9+mNd9ZbaeQjHqBrcb2bsTxCD+u0DBuF2RjLguS/feaB25TG8LAg
|
|
||||||
szYg==
|
|
||||||
X-Forwarded-Encrypted: i=2; AJvYcCXfGRAIDqrPsT1vzTMSiuMrlTj/DbRrr+8w7X+iLRH2XK/n8MZhV3UaT0Zia6c6jMrf3s3eHA==@xinu.tv
|
|
||||||
X-Gm-Message-State: AOJu0YxOQEmNiUg4NKf4NM1BgQMqTJaFM6txPnL6u74ff1dZvoSgTC4d
|
|
||||||
TtJJqfdHsajxloSGDsSPqIQ/M/Se/sfymEExFQxDXYA/XasA6+sdye/Ihl9QekGJK9jet1VtQ3r
|
|
||||||
dcg89xnFcxezg3ji6xH8jnSULlp350K9K7LR0LfTQqg6e/BEKEF8XDaNgmJC+RQ==
|
|
||||||
X-Received: by 2002:a05:6512:2246:b0:553:35bb:f7b7 with SMTP id 2adb3069b0e04-55342f92776mr2472199e87.32.1748873190333;
|
|
||||||
Mon, 02 Jun 2025 07:06:30 -0700 (PDT)
|
|
||||||
X-Forwarded-To: gmail@xinu.tv
|
|
||||||
X-Forwarded-For: couchmoney@gmail.com gmail@xinu.tv
|
|
||||||
Delivered-To: couchmoney@gmail.com
|
|
||||||
Received: by 2002:ab3:7457:0:b0:2b1:14e:dc2b with SMTP id g23csp2818972lti;
|
|
||||||
Mon, 2 Jun 2025 07:06:29 -0700 (PDT)
|
|
||||||
X-Received: by 2002:a05:6602:6a8b:b0:86c:f898:74b8 with SMTP id ca18e2360f4ac-86d0521552emr1082401939f.10.1748873188734;
|
|
||||||
Mon, 02 Jun 2025 07:06:28 -0700 (PDT)
|
|
||||||
ARC-Seal: i=1; a=rsa-sha256; t=1748873188; cv=none;
|
|
||||||
d=google.com; s=arc-20240605;
|
|
||||||
b=d2PNXrTE3VYjml3FmbC5rBW6XnsyuyVO3lPyM6VoVKFcvZ7a8tDRB+sh1ibo0D5Nvg
|
|
||||||
3i/Qon0RV401WFb9NQf5P048wpj19G8bOGPZUKMioBZcSxkr1RwH/GW6GBvGS+d+iqbW
|
|
||||||
43KWc6Px7RGOEeYfp8D88CuJ/5kMcsLMfDV1FRHo6T+chVY6c9fQkHjRreSGQcFXglt5
|
|
||||||
yaCpFKkAODO7rSHl2OW2kQ6eGgR0tUjb95+jdZXoU0GS3119CBYK9n9UhNaeXHIk/Zyy
|
|
||||||
f08r4Ce/m3Y6ISr4ovXxDeYNpeeUN1HT3XVyCVQJHjfWrHypKTiOt4q6yBhCgOgZTXJq
|
|
||||||
pL5A==;
|
|
||||||
ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20240605;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:dkim-signature:dkim-signature;
|
|
||||||
bh=dgRmOj3aABlB3SNw+xxlI8L9ugJFZ1WMJrtLw/W8tnA=;
|
|
||||||
fh=mbzrMIWIgWMC0ni1xEx+ViW4J0RLAdLdPT2cX81nTlk=;
|
|
||||||
b=YiMakYeE05UctWy9sW90/a3l1Hk1pAPv0+fpk5vmWrADcMwwI8cHVqBp+Nxds5psWa
|
|
||||||
a/zrw9UlxV4HgjLUP+ella/pK8XxK+sitKg0IhPOntwKbq1KfTNheufh4HtWj5yWedHE
|
|
||||||
sO/dVs6z/EW/gWrfBK/3JMgsnz3HrHmaoJ6caCaGI6t5jHxEXI+eJc5zILY+n0MdivkX
|
|
||||||
tJOo0L1s/k6MAdyLr4/IVqpxdhXbUPq44twCBNheHd8T5w1DC9ZXcr54X79fW8Vzbm8/
|
|
||||||
A++H3gnZRGtOayRySYQl04LFLk4YsisdhsKuaJV+WKYCW58wQqJT04mrVkx+m96qr1q0
|
|
||||||
BQtw==;
|
|
||||||
dara=google.com
|
|
||||||
ARC-Authentication-Results: i=1; mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=zT2yUtVH;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=nmJW8N67;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
Received: from mail-sor-f73.google.com (mail-sor-f73.google.com. [209.85.220.73])
|
|
||||||
by mx.google.com with SMTPS id ca18e2360f4ac-86d0213d491sor465078439f.8.2025.06.02.07.06.28
|
|
||||||
for <couchmoney@gmail.com>
|
|
||||||
(Google Transport Security);
|
|
||||||
Mon, 02 Jun 2025 07:06:28 -0700 (PDT)
|
|
||||||
Received-SPF: pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) client-ip=209.85.220.73;
|
|
||||||
Authentication-Results: mx.google.com;
|
|
||||||
dkim=pass header.i=@google.com header.s=20230601 header.b=zT2yUtVH;
|
|
||||||
dkim=pass header.i=@gmail.com header.s=20230601 header.b=nmJW8N67;
|
|
||||||
spf=pass (google.com: domain of tconvertino@gmail.com designates 209.85.220.73 as permitted sender) smtp.mailfrom=tconvertino@gmail.com;
|
|
||||||
dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com;
|
|
||||||
dara=pass header.i=@gmail.com
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=google.com; s=20230601; t=1748873188; x=1749477988; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=dgRmOj3aABlB3SNw+xxlI8L9ugJFZ1WMJrtLw/W8tnA=;
|
|
||||||
b=zT2yUtVHhNy5fFiy6YKzfYCQPlCnufAEoWmbvjvj7mFNYUlLJHZ5FUeNnDs06Z1icR
|
|
||||||
bSVtejKixrz4hjFh9KeKvV9EQNGU7UFgySwqdy6szm+sHZQj+iJAXy85A1QaL6+0Swup
|
|
||||||
2y8QsjVJ96uugM0SaAYZqe+lmLBk6zFWqkg0U37vgwOupAcNsNBd7tos7cxO5eK6Aops
|
|
||||||
FJjr9JAD+ddX03ngH9zfnvlNV/+qbmiP6Hs8OmaJtZof2GLucpHgqUpIdolCh7F72v4p
|
|
||||||
DibO4RShI/IQCw9ejZxhRPBPWQwIdOYLjD/sDunX63M4NCS/63jZfhwqsAVgtmN/cUGq
|
|
||||||
spHQ==
|
|
||||||
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
|
|
||||||
d=gmail.com; s=20230601; t=1748873188; x=1749477988; dara=google.com;
|
|
||||||
h=to:from:subject:date:message-id:auto-submitted:sender:reply-to
|
|
||||||
:mime-version:from:to:cc:subject:date:message-id:reply-to;
|
|
||||||
bh=dgRmOj3aABlB3SNw+xxlI8L9ugJFZ1WMJrtLw/W8tnA=;
|
|
||||||
b=nmJW8N67IylgMNprzzf/IC7V2r7xeY0+8Bl0KcAak6Xly+IhVv3nyccvgdKsp+8Ccd
|
|
||||||
NcikfVOtCsE3gTqviReUbTAKy7PyClAbBTEHC0Ne71549BN+v8zX64RpGDFJGX5pJMG5
|
|
||||||
r0Ak88nxzjWkvDLhlnHmWdt/NggdQEI6T7oP4VZo0f0/Ym7g1WJhSItfdIhSRDNzK3ed
|
|
||||||
WPRXUIb1sW3+N0My4Os6L4IA9kdRk5z0qpQxtsIL9N0dzv4q18q6eH3KfTzVPr59PsYT
|
|
||||||
uSgkWoLQZdfA70MMlIRU5CnGbVDRH4TO/ib433vIblOmtLTkQ4EaOTzncbs0tovVes4z
|
|
||||||
evsQ==
|
|
||||||
X-Google-Smtp-Source: AGHT+IETNpLvkLm7t8VAdDcEcVtxFCttPh/uVZhoQCRlhUNlx9bmg67olJiD9EOND8g0z43NnM8iK4FxezZondExIawx
|
|
||||||
MIME-Version: 1.0
|
|
||||||
X-Received: by 2002:a05:6602:4183:b0:864:4a1b:dfc5 with SMTP id
|
|
||||||
ca18e2360f4ac-86d052154eamr1431889339f.9.1748873188195; Mon, 02 Jun 2025
|
|
||||||
07:06:28 -0700 (PDT)
|
|
||||||
Reply-To: tconvertino@gmail.com
|
|
||||||
Sender: Google Calendar <calendar-notification@google.com>
|
|
||||||
Auto-Submitted: auto-generated
|
|
||||||
Message-ID: <calendar-093be1c9-5d94-4994-8bc5-7daa1cfae47b@google.com>
|
|
||||||
Date: Mon, 02 Jun 2025 14:06:28 +0000
|
|
||||||
Subject: New event: Tamara and Scout in Alaska @ Tue Jun 24 - Mon Jun 30, 2025 (tconvertino@gmail.com)
|
|
||||||
From: tconvertino@gmail.com
|
|
||||||
To: couchmoney@gmail.com
|
|
||||||
Content-Type: multipart/alternative; boundary="00000000000023c70606369745e9"
|
|
||||||
|
|
||||||
--00000000000023c70606369745e9
|
|
||||||
Content-Type: text/plain; charset="UTF-8"; format=flowed; delsp=yes
|
|
||||||
Content-Transfer-Encoding: base64
|
|
||||||
|
|
||||||
VGFtYXJhIGFuZCBTY291dCBpbiBBbGFza2ENClR1ZXNkYXkgSnVuIDI0IOKAkyBNb25kYXkgSnVu
|
|
||||||
IDMwLCAyMDI1DQoNCg0KDQpPcmdhbml6ZXINCnRjb252ZXJ0aW5vQGdtYWlsLmNvbQ0KdGNvbnZl
|
|
||||||
cnRpbm9AZ21haWwuY29tDQoNCn5+Ly9+fg0KSW52aXRhdGlvbiBmcm9tIEdvb2dsZSBDYWxlbmRh
|
|
||||||
cjogaHR0cHM6Ly9jYWxlbmRhci5nb29nbGUuY29tL2NhbGVuZGFyLw0KDQpZb3UgYXJlIHJlY2Vp
|
|
||||||
dmluZyB0aGlzIGVtYWlsIGJlY2F1c2UgeW91IGFyZSBzdWJzY3JpYmVkIHRvIGNhbGVuZGFyICAN
|
|
||||||
Cm5vdGlmaWNhdGlvbnMuIFRvIHN0b3AgcmVjZWl2aW5nIHRoZXNlIGVtYWlscywgZ28gdG8gIA0K
|
|
||||||
aHR0cHM6Ly9jYWxlbmRhci5nb29nbGUuY29tL2NhbGVuZGFyL3Ivc2V0dGluZ3MsIHNlbGVjdCB0
|
|
||||||
aGlzIGNhbGVuZGFyLCBhbmQgIA0KY2hhbmdlICJPdGhlciBub3RpZmljYXRpb25zIi4NCg0KRm9y
|
|
||||||
d2FyZGluZyB0aGlzIGludml0YXRpb24gY291bGQgYWxsb3cgYW55IHJlY2lwaWVudCB0byBzZW5k
|
|
||||||
IGEgcmVzcG9uc2UgdG8gIA0KdGhlIG9yZ2FuaXplciwgYmUgYWRkZWQgdG8gdGhlIGd1ZXN0IGxp
|
|
||||||
c3QsIGludml0ZSBvdGhlcnMgcmVnYXJkbGVzcyBvZiAgDQp0aGVpciBvd24gaW52aXRhdGlvbiBz
|
|
||||||
dGF0dXMsIG9yIG1vZGlmeSB5b3VyIFJTVlAuDQoNCkxlYXJuIG1vcmUgaHR0cHM6Ly9zdXBwb3J0
|
|
||||||
Lmdvb2dsZS5jb20vY2FsZW5kYXIvYW5zd2VyLzM3MTM1I2ZvcndhcmRpbmcNCg==
|
|
||||||
--00000000000023c70606369745e9
|
|
||||||
Content-Type: text/html; charset="UTF-8"
|
|
||||||
Content-Transfer-Encoding: quoted-printable
|
|
||||||
|
|
||||||
<!doctype html><html xmlns=3D"http://www.w3.org/1999/xhtml" xmlns:v=3D"urn:="...truncated for brevity...
|
|
||||||
57
server/testdata/ical-example-1.ics
vendored
57
server/testdata/ical-example-1.ics
vendored
@ -1,57 +0,0 @@
|
|||||||
BEGIN:VCALENDAR
|
|
||||||
METHOD:REQUEST
|
|
||||||
PRODID:Microsoft Exchange Server 2010
|
|
||||||
VERSION:2.0
|
|
||||||
BEGIN:VTIMEZONE
|
|
||||||
TZID:Pacific Standard Time
|
|
||||||
BEGIN:STANDARD
|
|
||||||
DTSTART:16010101T020000
|
|
||||||
TZOFFSETFROM:-0700
|
|
||||||
TZOFFSETTO:-0800
|
|
||||||
RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=1SU;BYMONTH=11
|
|
||||||
END:STANDARD
|
|
||||||
BEGIN:DAYLIGHT
|
|
||||||
DTSTART:16010101T020000
|
|
||||||
TZOFFSETFROM:-0800
|
|
||||||
TZOFFSETTO:-0700
|
|
||||||
RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=2SU;BYMONTH=3
|
|
||||||
END:DAYLIGHT
|
|
||||||
END:VTIMEZONE
|
|
||||||
BEGIN:VEVENT
|
|
||||||
ORGANIZER;CN=Bill Thiede:mailto:wthiede@nvidia.com
|
|
||||||
ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN=Bill:mailt
|
|
||||||
o:couchmoney@gmail.com
|
|
||||||
DESCRIPTION;LANGUAGE=en-US:\n
|
|
||||||
UID:040000008200E00074C5B7101A82E00800000000A1458AEA8E4DDB01000000000000000
|
|
||||||
010000000988BC323BE65A8458B718B5EF8FE8152
|
|
||||||
SUMMARY;LANGUAGE=en-US:dentist night guard
|
|
||||||
DTSTART;TZID=Pacific Standard Time:20250108T080000
|
|
||||||
DTEND;TZID=Pacific Standard Time:20250108T090000
|
|
||||||
CLASS:PUBLIC
|
|
||||||
PRIORITY:5
|
|
||||||
DTSTAMP:20241213T184408Z
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SEQUENCE:0
|
|
||||||
LOCATION;LANGUAGE=en-US:
|
|
||||||
X-MICROSOFT-CDO-APPT-SEQUENCE:0
|
|
||||||
X-MICROSOFT-CDO-OWNERAPPTID:2123132523
|
|
||||||
X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE
|
|
||||||
X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY
|
|
||||||
X-MICROSOFT-CDO-ALLDAYEVENT:FALSE
|
|
||||||
X-MICROSOFT-CDO-IMPORTANCE:1
|
|
||||||
X-MICROSOFT-CDO-INSTTYPE:0
|
|
||||||
X-MICROSOFT-ONLINEMEETINGEXTERNALLINK:
|
|
||||||
X-MICROSOFT-ONLINEMEETINGCONFLINK:
|
|
||||||
X-MICROSOFT-DONOTFORWARDMEETING:FALSE
|
|
||||||
X-MICROSOFT-DISALLOW-COUNTER:FALSE
|
|
||||||
X-MICROSOFT-REQUESTEDATTENDANCEMODE:DEFAULT
|
|
||||||
X-MICROSOFT-ISRESPONSEREQUESTED:TRUE
|
|
||||||
X-MICROSOFT-LOCATIONS:[]
|
|
||||||
BEGIN:VALARM
|
|
||||||
DESCRIPTION:REMINDER
|
|
||||||
TRIGGER;RELATED=START:-PT5M
|
|
||||||
ACTION:DISPLAY
|
|
||||||
END:VALARM
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
30
server/testdata/ical-example-2.ics
vendored
30
server/testdata/ical-example-2.ics
vendored
@ -1,30 +0,0 @@
|
|||||||
BEGIN:VCALENDAR
|
|
||||||
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
|
||||||
VERSION:2.0
|
|
||||||
CALSCALE:GREGORIAN
|
|
||||||
METHOD:REPLY
|
|
||||||
X-GOOGLE-CALID:g66m0feuqsao8l1c767pvvcg4k@group.calendar.google.com
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20250813T010000Z
|
|
||||||
DTEND:20250813T030000Z
|
|
||||||
DTSTAMP:20250801T022550Z
|
|
||||||
ORGANIZER;CN=Family:mailto:g66m0feuqsao8l1c767pvvcg4k@group.calendar.google
|
|
||||||
.com
|
|
||||||
UID:6os3ap346th6ab9nckp30b9kc8sm2bb160q3gb9l6lgm6or160rjee1mco@google.com
|
|
||||||
ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;CN=superm
|
|
||||||
atute@gmail.com;X-NUM-GUESTS=0:mailto:supermatute@gmail.com
|
|
||||||
X-GOOGLE-CONFERENCE:https://meet.google.com/dcu-hykx-vym
|
|
||||||
CREATED:20250801T015712Z
|
|
||||||
DESCRIPTION:-::~:~::~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~
|
|
||||||
:~:~:~:~:~:~:~:~::~:~::-\nJoin with Google Meet: https://meet.google.com/dc
|
|
||||||
u-hykx-vym\n\nLearn more about Meet at: https://support.google.com/a/users/
|
|
||||||
answer/9282720\n\nPlease do not edit this section.\n-::~:~::~:~:~:~:~:~:~:~
|
|
||||||
:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~:~::~:~::-
|
|
||||||
LAST-MODIFIED:20250801T022549Z
|
|
||||||
LOCATION:
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:[tenative] dinner w/ amatute
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
9
server/testdata/ical-multiday.ics
vendored
9
server/testdata/ical-multiday.ics
vendored
@ -1,9 +0,0 @@
|
|||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
BEGIN:VEVENT
|
|
||||||
SUMMARY:Multi-day Event
|
|
||||||
DTSTART;VALUE=DATE:20250828
|
|
||||||
DTEND;VALUE=DATE:20250831
|
|
||||||
DESCRIPTION:This event spans multiple days.
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
36
server/testdata/ical-straddle-real.ics
vendored
36
server/testdata/ical-straddle-real.ics
vendored
@ -1,36 +0,0 @@
|
|||||||
BEGIN:VCALENDAR
|
|
||||||
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
|
||||||
VERSION:2.0
|
|
||||||
CALSCALE:GREGORIAN
|
|
||||||
METHOD:REQUEST
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART;VALUE=DATE:20250830
|
|
||||||
DTEND;VALUE=DATE:20250902
|
|
||||||
DTSTAMP:20250819T183713Z
|
|
||||||
ORGANIZER;CN=Bill Thiede:mailto:couchmoney@gmail.com
|
|
||||||
UID:37kplskaimjnhdnt8r5ui9pv7f@google.com
|
|
||||||
ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=
|
|
||||||
TRUE;CN=bill@xinu.tv;X-NUM-GUESTS=0:mailto:bill@xinu.tv
|
|
||||||
ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE
|
|
||||||
;CN=Bill Thiede;X-NUM-GUESTS=0:mailto:couchmoney@gmail.com
|
|
||||||
X-MICROSOFT-CDO-OWNERAPPTID:1427505964
|
|
||||||
CREATED:20250819T183709Z
|
|
||||||
DESCRIPTION:
|
|
||||||
LAST-MODIFIED:20250819T183709Z
|
|
||||||
LOCATION:
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Test Straddle Month
|
|
||||||
TRANSP:TRANSPARENT
|
|
||||||
BEGIN:VALARM
|
|
||||||
ACTION:DISPLAY
|
|
||||||
DESCRIPTION:This is an event reminder
|
|
||||||
TRIGGER:-P0DT0H30M0S
|
|
||||||
END:VALARM
|
|
||||||
BEGIN:VALARM
|
|
||||||
ACTION:DISPLAY
|
|
||||||
DESCRIPTION:This is an event reminder
|
|
||||||
TRIGGER:-P0DT7H30M0S
|
|
||||||
END:VALARM
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
13
server/testdata/ical-straddle.ics
vendored
13
server/testdata/ical-straddle.ics
vendored
@ -1,13 +0,0 @@
|
|||||||
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
PRODID:-//Test Recurring Event//EN
|
|
||||||
BEGIN:VEVENT
|
|
||||||
UID:recurring-test-1@example.com
|
|
||||||
DTSTART;VALUE=DATE:20250804
|
|
||||||
DTEND;VALUE=DATE:20250805
|
|
||||||
RRULE:FREQ=WEEKLY;BYDAY=MO,WE,FR;UNTIL=20250825T000000Z
|
|
||||||
SUMMARY:Test Recurring Event (Mon, Wed, Fri)
|
|
||||||
DESCRIPTION:This event recurs every Monday, Wednesday, and Friday in August 2025.
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
@ -1,20 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "letterbox-shared"
|
name = "shared"
|
||||||
description = "Shared module for letterbox"
|
version = "0.0.29"
|
||||||
authors.workspace = true
|
edition = "2021"
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
publish.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
version.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
build-info = "0.0.42"
|
build-info = "0.0.38"
|
||||||
letterbox-notmuch = { path = "../notmuch", version = "0.17", registry = "xinu" }
|
notmuch = { path = "../notmuch" }
|
||||||
regex = "1.11.1"
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
|
||||||
sqlx = "0.8.5"
|
|
||||||
strum_macros = "0.27.1"
|
|
||||||
tracing = "0.1.41"
|
|
||||||
|
|||||||
@ -1,14 +1,8 @@
|
|||||||
use std::{
|
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||||
convert::Infallible,
|
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use build_info::{BuildInfo, VersionControl};
|
use build_info::{BuildInfo, VersionControl};
|
||||||
use letterbox_notmuch::SearchSummary;
|
use notmuch::SearchSummary;
|
||||||
use regex::{RegexBuilder, RegexSetBuilder};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SearchResult {
|
pub struct SearchResult {
|
||||||
@ -19,20 +13,11 @@ pub struct SearchResult {
|
|||||||
pub total: usize,
|
pub total: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, strum_macros::Display)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub enum WebsocketMessage {
|
pub struct Message {}
|
||||||
RefreshMessages,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod urls {
|
pub mod urls {
|
||||||
pub const MOUNT_POINT: &'static str = "/api";
|
pub const MOUNT_POINT: &'static str = "/api";
|
||||||
pub fn view_original(host: Option<&str>, id: &str) -> String {
|
|
||||||
if let Some(host) = host {
|
|
||||||
format!("//{host}/api/original/{id}")
|
|
||||||
} else {
|
|
||||||
format!("/api/original/{id}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||||
if let Some(host) = host {
|
if let Some(host) = host {
|
||||||
format!("//{host}/api/cid/{cid}/")
|
format!("//{host}/api/cid/{cid}/")
|
||||||
@ -71,198 +56,3 @@ pub fn compute_color(data: &str) -> String {
|
|||||||
data.hash(&mut hasher);
|
data.hash(&mut hasher);
|
||||||
format!("#{:06x}", hasher.finish() % (1 << 24))
|
format!("#{:06x}", hasher.finish() % (1 << 24))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
pub enum MatchType {
|
|
||||||
From,
|
|
||||||
Sender,
|
|
||||||
To,
|
|
||||||
Cc,
|
|
||||||
Subject,
|
|
||||||
ListId,
|
|
||||||
DeliveredTo,
|
|
||||||
XForwardedTo,
|
|
||||||
ReplyTo,
|
|
||||||
XOriginalTo,
|
|
||||||
XSpam,
|
|
||||||
Body,
|
|
||||||
#[default]
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct Match {
|
|
||||||
pub match_type: MatchType,
|
|
||||||
pub needle: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct Rule {
|
|
||||||
pub stop_on_match: bool,
|
|
||||||
pub matches: Vec<Match>,
|
|
||||||
pub tag: String,
|
|
||||||
}
|
|
||||||
impl Rule {
|
|
||||||
pub fn is_match(&self, header_key: &str, header_value: &str) -> bool {
|
|
||||||
let pats: Vec<_> = self
|
|
||||||
.matches
|
|
||||||
.iter()
|
|
||||||
.filter_map(|m| match m.match_type {
|
|
||||||
MatchType::To => Some("^(to|cc|bcc|x-original-to)$"),
|
|
||||||
MatchType::From => Some("^from$"),
|
|
||||||
MatchType::Sender => Some("^sender$"),
|
|
||||||
MatchType::Subject => Some("^subject$"),
|
|
||||||
MatchType::ListId => Some("^list-id$"),
|
|
||||||
MatchType::XOriginalTo => Some("^x-original-to$"),
|
|
||||||
MatchType::ReplyTo => Some("^reply-to$"),
|
|
||||||
MatchType::XSpam => Some("^x-spam$"),
|
|
||||||
MatchType::Body => None,
|
|
||||||
c => panic!("TODO handle '{c:?}' match type"),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let set = RegexSetBuilder::new(&pats)
|
|
||||||
.case_insensitive(true)
|
|
||||||
.build()
|
|
||||||
.expect("failed to compile regex for matches");
|
|
||||||
let matches: Vec<_> = set.matches(header_key).into_iter().collect();
|
|
||||||
if !matches.is_empty() {
|
|
||||||
//info!("matched key '{header_key}' '{header_value}'");
|
|
||||||
for m_idx in matches {
|
|
||||||
let needle = regex::escape(&self.matches[m_idx].needle);
|
|
||||||
let pat = RegexBuilder::new(&needle)
|
|
||||||
.case_insensitive(true)
|
|
||||||
.build()
|
|
||||||
.expect("failed to compile regex for needle");
|
|
||||||
if pat.is_match(header_value) {
|
|
||||||
debug!("{header_key} matched {header_value} against {needle}");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod matches {
|
|
||||||
// From https://linux.die.net/man/5/procmailrc
|
|
||||||
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
|
||||||
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
|
||||||
|
|
||||||
pub const TO: &'static str = "TO";
|
|
||||||
pub const CC: &'static str = "Cc";
|
|
||||||
pub const TOCC: &'static str = "(TO|Cc)";
|
|
||||||
pub const FROM: &'static str = "From";
|
|
||||||
pub const SENDER: &'static str = "Sender";
|
|
||||||
pub const SUBJECT: &'static str = "Subject";
|
|
||||||
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
|
||||||
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
|
||||||
pub const REPLY_TO: &'static str = "Reply-To";
|
|
||||||
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
|
||||||
pub const LIST_ID: &'static str = "List-ID";
|
|
||||||
pub const X_SPAM: &'static str = "X-Spam";
|
|
||||||
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Match {
|
|
||||||
type Err = Infallible;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
// Examples:
|
|
||||||
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
|
||||||
// "* ^TOsonyrewards.com@xinu.tv"
|
|
||||||
let mut it = s.split_whitespace().skip(1);
|
|
||||||
let mut needle = it.next().unwrap();
|
|
||||||
if needle == "1^0" {
|
|
||||||
needle = it.next().unwrap();
|
|
||||||
}
|
|
||||||
let mut needle = vec![needle];
|
|
||||||
needle.extend(it);
|
|
||||||
let needle = needle.join(" ");
|
|
||||||
let first = needle.chars().nth(0).unwrap_or(' ');
|
|
||||||
use matches::*;
|
|
||||||
if first == '^' {
|
|
||||||
let needle = &needle[1..];
|
|
||||||
if needle.starts_with(TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(FROM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::From,
|
|
||||||
needle: cleanup_match(FROM, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(CC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Cc,
|
|
||||||
needle: cleanup_match(CC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(TOCC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TOCC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SENDER) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Sender,
|
|
||||||
needle: cleanup_match(SENDER, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SUBJECT) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Subject,
|
|
||||||
needle: cleanup_match(SUBJECT, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_ORIGINAL_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XOriginalTo,
|
|
||||||
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(LIST_ID) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::ListId,
|
|
||||||
needle: cleanup_match(LIST_ID, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(REPLY_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::ReplyTo,
|
|
||||||
needle: cleanup_match(REPLY_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM_FLAG) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(DELIVERED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::DeliveredTo,
|
|
||||||
needle: cleanup_match(DELIVERED_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_FORWARDED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XForwardedTo,
|
|
||||||
needle: cleanup_match(X_FORWARDED_TO, needle),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
unreachable!("needle: '{needle}'")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Body,
|
|
||||||
needle: cleanup_match("", &needle),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn unescape(s: &str) -> String {
|
|
||||||
s.replace('\\', "")
|
|
||||||
}
|
|
||||||
pub fn cleanup_match(prefix: &str, s: &str) -> String {
|
|
||||||
unescape(&s[prefix.len()..]).replace(".*", "")
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,58 +1,49 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "letterbox-web"
|
version = "0.0.29"
|
||||||
description = "Web frontend for letterbox"
|
name = "letterbox"
|
||||||
authors.workspace = true
|
repository = "https://github.com/seed-rs/seed-quickstart"
|
||||||
edition.workspace = true
|
authors = ["Bill Thiede <git@xinu.tv>"]
|
||||||
license.workspace = true
|
description = "App Description"
|
||||||
publish.workspace = true
|
categories = ["category"]
|
||||||
repository.workspace = true
|
license = "MIT"
|
||||||
version.workspace = true
|
readme = "./README.md"
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build-info-build = "0.0.42"
|
build-info-build = "0.0.38"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
#wasm-bindgen-test = "0.3.50"
|
wasm-bindgen-test = "0.3.33"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
console_error_panic_hook = "0.1.7"
|
console_error_panic_hook = "0.1.7"
|
||||||
log = "0.4.27"
|
log = "0.4.17"
|
||||||
seed = { version = "0.10.0", features = ["routing"] }
|
seed = { version = "0.10.0", features = ["routing"] }
|
||||||
#seed = "0.9.2"
|
#seed = "0.9.2"
|
||||||
console_log = { version = "0.1.4", registry = "xinu" }
|
console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"}
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
itertools = "0.14.0"
|
notmuch = {path = "../notmuch"}
|
||||||
serde_json = { version = "1.0.140", features = ["unbounded_depth"] }
|
shared = {path = "../shared"}
|
||||||
chrono = "0.4.40"
|
itertools = "0.10.5"
|
||||||
graphql_client = "0.15.0"
|
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||||
thiserror = "2.0.12"
|
chrono = "0.4.31"
|
||||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
graphql_client = "0.13.0"
|
||||||
|
thiserror = "1.0.50"
|
||||||
|
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
|
||||||
|
gloo-net = { version = "0.4.0", features = ["json", "serde_json"] }
|
||||||
human_format = "1.1.0"
|
human_format = "1.1.0"
|
||||||
build-info = "0.0.42"
|
build-info = "0.0.38"
|
||||||
wasm-bindgen = "=0.2.100"
|
|
||||||
uuid = { version = "1.16.0", features = [
|
|
||||||
"js",
|
|
||||||
] } # direct dep to set js feature, prevents Rng issues
|
|
||||||
letterbox-shared = { path = "../shared/", version = "0.17", registry = "xinu" }
|
|
||||||
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
|
||||||
strum_macros = "0.27.1"
|
|
||||||
gloo-console = "0.3.0"
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
|
||||||
wasm-sockets = "1.0.0"
|
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.release]
|
[package.metadata.wasm-pack.profile.release]
|
||||||
wasm-opt = ['-Os']
|
wasm-opt = ['-Os']
|
||||||
|
|
||||||
[dependencies.web-sys]
|
[dependencies.web-sys]
|
||||||
version = "0.3.77"
|
version = "0.3.58"
|
||||||
features = [
|
features = [
|
||||||
"Clipboard",
|
"Clipboard",
|
||||||
"DomRect",
|
"DomRect",
|
||||||
"Element",
|
"Element",
|
||||||
"History",
|
|
||||||
"MediaQueryList",
|
"MediaQueryList",
|
||||||
"Navigator",
|
"Navigator",
|
||||||
"Performance",
|
|
||||||
"ScrollRestoration",
|
|
||||||
"Window",
|
"Window",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -1,26 +1,14 @@
|
|||||||
[build]
|
[build]
|
||||||
release = false
|
release = true
|
||||||
|
|
||||||
[serve]
|
[serve]
|
||||||
# The address to serve on.
|
# The address to serve on.
|
||||||
address = "0.0.0.0"
|
address = "0.0.0.0"
|
||||||
port = 6758
|
port = 6758
|
||||||
|
|
||||||
[[proxy]]
|
|
||||||
ws = true
|
|
||||||
backend = "ws://localhost:9345/api/ws"
|
|
||||||
|
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
backend = "http://localhost:9345/api/"
|
backend = "http://localhost:9345/api/"
|
||||||
|
|
||||||
[[proxy]]
|
|
||||||
backend = "http://localhost:9345/notification/"
|
|
||||||
|
|
||||||
[[hooks]]
|
|
||||||
stage = "pre_build"
|
|
||||||
command = "printf"
|
|
||||||
command_arguments = ["\\033c"]
|
|
||||||
|
|
||||||
#[[hooks]]
|
#[[hooks]]
|
||||||
#stage = "pre_build"
|
#stage = "pre_build"
|
||||||
#command = "cargo"
|
#command = "cargo"
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
fn main() {
|
fn main() {
|
||||||
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
// and `build_info::format!` in the main program.
|
// and `build_info::format!` in the main program.
|
||||||
build_info_build::build_script();
|
build_info_build::build_script();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
query CatchupQuery($query: String!) {
|
|
||||||
catchup(query: $query)
|
|
||||||
}
|
|
||||||
@ -14,7 +14,6 @@ query FrontPageQuery($query: String!, $after: String $before: String, $first: In
|
|||||||
subject
|
subject
|
||||||
authors
|
authors
|
||||||
tags
|
tags
|
||||||
corpus
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tags {
|
tags {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user