Compare commits
164 Commits
a33e1f5d3c
...
slurp-toml
| Author | SHA1 | Date | |
|---|---|---|---|
| 86805f38e3 | |||
| 62b17bd6a6 | |||
| c0bac99d5a | |||
| 3b69c5e74b | |||
| 539fd469cc | |||
| 442688c35c | |||
| da27f02237 | |||
| 9460e354b7 | |||
| 6bab128ed9 | |||
| 3856b4ca5a | |||
| bef39eefa5 | |||
| b0366c7b4d | |||
| ca02d84d63 | |||
| 461d5de886 | |||
| f8134dad7a | |||
| 30f510bb03 | |||
| e7cbf9cc45 | |||
| 5108213af5 | |||
| d148f625ac | |||
| a9b8f5a88f | |||
| 539b584d9b | |||
| 2f8d83fc4b | |||
| 86ee1257fa | |||
| 03f1035e0e | |||
| bd578191a8 | |||
| d4fc2e2ef1 | |||
| cde30de81c | |||
| 96be74e3ee | |||
| b78d34b27e | |||
| b4b64c33a6 | |||
| 47b1875022 | |||
| b06cbd1381 | |||
| 9e35f8ca6c | |||
| 8eaefde67d | |||
| d5a3324837 | |||
| f5c90d8770 | |||
| 825a125a62 | |||
| da7cf37dae | |||
| 1985ae1f49 | |||
| 91eb3019f9 | |||
| 66e8e00a9b | |||
| 4b8923d852 | |||
| baba720749 | |||
| 1ec22599cc | |||
| c69017bc36 | |||
| 48bf57fbbe | |||
| 3491856784 | |||
| f887c15b46 | |||
| 7786f850d1 | |||
| cad778734e | |||
| 1210f7038a | |||
| f9ab7284a3 | |||
| 100865c923 | |||
| b8c1710a83 | |||
| 215b8cd41d | |||
| 487d7084c3 | |||
| b1e761b26f | |||
| 3efe90ca21 | |||
| 61649e1e04 | |||
| 13ac352a10 | |||
| 5ca7a25e8d | |||
| 7bb8ef0938 | |||
| 5c55a290ac | |||
| 4e3e1b075d | |||
| a8c5a164ff | |||
| 1f393f1c7f | |||
| fdaff70231 | |||
| 7218c13b9e | |||
| 934cb9d91b | |||
| 4faef5e017 | |||
| 5c813e7350 | |||
| fb754469ce | |||
| 548b5a0ab0 | |||
| f77d0776c4 | |||
| e73f70af8f | |||
| a9e6120f81 | |||
| 090a010a63 | |||
| 85c762a297 | |||
| a8d5617cf2 | |||
| 760cec01a8 | |||
| 446fcfe37f | |||
| 71de3ef8ae | |||
| d98d429b5c | |||
| cf5a6fadfd | |||
| 9a078cd238 | |||
| a81a803cca | |||
| 816587b688 | |||
| 4083c58bbd | |||
| 8769e5acd4 | |||
| 3edf9fdb5d | |||
| ac0ce29c76 | |||
| 5279578c64 | |||
| 632f64261e | |||
| b5e25eef78 | |||
| 8a237bf8e1 | |||
| c5def6c0e3 | |||
| d1cfc77148 | |||
| c314e3c798 | |||
| 7c5ef96ff0 | |||
| 474cf38180 | |||
| e81a452dfb | |||
| e570202ba2 | |||
| a84c9f0eaf | |||
| 530bd8e350 | |||
| 359e798cfa | |||
| d7d257a6b5 | |||
| 9ad9ff6879 | |||
| 56bc1cf7ed | |||
| e0863ac085 | |||
| d5fa89b38c | |||
| 605af13a37 | |||
| 3838cbd6e2 | |||
| c76df0ef90 | |||
| cd77d302df | |||
| 71348d562d | |||
| b6ae46db93 | |||
| 6cb84054ed | |||
| 7b511c1673 | |||
| bfd5e12bea | |||
| ad8fb77857 | |||
| 831466ddda | |||
| 4ee34444ae | |||
| 879ddb112e | |||
| 331fb4f11b | |||
| 4e5275ca0e | |||
| 1106377550 | |||
| b5468bced2 | |||
| 01cbe6c037 | |||
| d0a02c2f61 | |||
| c499672dde | |||
| 3aa0b94db4 | |||
| cdb64ed952 | |||
| 834efc5c94 | |||
| 79db94f67f | |||
| ec41f840d5 | |||
| d9d57c66f8 | |||
| 9746c9912b | |||
| abaaddae3a | |||
| 0bf64004ff | |||
| 6fae9cd018 | |||
| 65fcbd4b77 | |||
| dd09bc3168 | |||
| 0bf865fdef | |||
| 5c0c45b99f | |||
| 221f046664 | |||
| 2a9d5b393e | |||
| 90860e5511 | |||
| 0b1f806276 | |||
| 0482713241 | |||
| bb3e18519f | |||
| 3a4d08facc | |||
| 30064d5904 | |||
| c288b7fd67 | |||
| b4d1528612 | |||
| 5fc272054c | |||
| 714e73aeb1 | |||
| 3dfd2d48b3 | |||
| 3a5a9bd66a | |||
| 55d7aec516 | |||
| 96d3e4a7d6 | |||
| beb96aba14 | |||
| 48f66c7096 | |||
| a96b553b08 | |||
| 31a3ac66b6 |
4
.cargo/config.toml
Normal file
4
.cargo/config.toml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
|
||||||
|
[build]
|
||||||
|
rustflags = [ "--cfg=web_sys_unstable_apis" ]
|
||||||
|
|
||||||
10
.envrc
Normal file
10
.envrc
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
source_up
|
||||||
|
|
||||||
|
export DATABASE_USER="newsreader";
|
||||||
|
export DATABASE_NAME="newsreader";
|
||||||
|
export DATABASE_HOST="nixos-07.h.xinu.tv";
|
||||||
|
export DATABASE_URL="postgres://${DATABASE_USER}@${DATABASE_HOST}/${DATABASE_NAME}";
|
||||||
|
export PROD_DATABASE_USER="newsreader";
|
||||||
|
export PROD_DATABASE_NAME="newsreader";
|
||||||
|
export PROD_DATABASE_HOST="postgres.h.xinu.tv";
|
||||||
|
export PROD_DATABASE_URL="postgres://${PROD_DATABASE_USER}@${PROD_DATABASE_HOST}/${PROD_DATABASE_NAME}";
|
||||||
3903
Cargo.lock
generated
3903
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,8 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
default-members = [
|
||||||
|
"server"
|
||||||
|
]
|
||||||
members = [
|
members = [
|
||||||
"web",
|
"web",
|
||||||
"server",
|
"server",
|
||||||
|
|||||||
6
Makefile
Normal file
6
Makefile
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
.PHONEY: release
|
||||||
|
release:
|
||||||
|
bash scripts/update-crate-version.sh
|
||||||
|
git push
|
||||||
|
|
||||||
|
all: release
|
||||||
4
dev.sh
4
dev.sh
@@ -1,7 +1,7 @@
|
|||||||
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
|
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
|
||||||
tmux new-session -d -s letterbox-dev
|
tmux new-session -d -s letterbox-dev
|
||||||
tmux rename-window web
|
tmux rename-window web
|
||||||
tmux send-keys "cd web; trunk serve -w ../shared -w ../notmuch -w ./" C-m
|
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||||
tmux new-window -n server
|
tmux new-window -n server
|
||||||
tmux send-keys "cd server; cargo watch -c -x run -w ../shared -w ../notmuch -w ./" C-m
|
tmux send-keys "cd server; cargo watch -c -x run -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||||
tmux attach -d -t letterbox-dev
|
tmux attach -d -t letterbox-dev
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "notmuch"
|
name = "notmuch"
|
||||||
version = "0.1.0"
|
version = "0.0.29"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|||||||
@@ -518,7 +518,8 @@ impl Notmuch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
||||||
let res = self.run_notmuch(["count", query])?;
|
// TODO: compare speed of notmuch count for * w/ and w/o --output=threads
|
||||||
|
let res = self.run_notmuch(["count", "--output=threads", query])?;
|
||||||
// Strip '\n' from res.
|
// Strip '\n' from res.
|
||||||
let s = std::str::from_utf8(&res[..res.len() - 1])?;
|
let s = std::str::from_utf8(&res[..res.len() - 1])?;
|
||||||
Ok(s.parse()?)
|
Ok(s.parse()?)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "procmail2notmuch"
|
name = "procmail2notmuch"
|
||||||
version = "0.1.0"
|
version = "0.0.29"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|||||||
5
scripts/update-crate-version.sh
Executable file
5
scripts/update-crate-version.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#!env bash
|
||||||
|
set -e -x
|
||||||
|
cargo-set-version set-version --bump patch
|
||||||
|
VERSION="$(awk -F\" '/^version/ {print $2}' server/Cargo.toml)"
|
||||||
|
git commit Cargo.lock */Cargo.toml -m "Bumping version to ${VERSION:?}"
|
||||||
64
server/.sqlx/query-113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb.json
generated
Normal file
64
server/.sqlx/query-113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb.json
generated
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n title,\n name,\n homepage\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n uid = $1\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "link",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "homepage",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb"
|
||||||
|
}
|
||||||
62
server/.sqlx/query-1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7.json
generated
Normal file
62
server/.sqlx/query-1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7.json
generated
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n title,\n summary,\n link,\n date,\n is_read,\n uid,\n id\nFROM post\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "link",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "uid",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "1b2244c9b9b64a1395d8d266f5df5352242bbe5efe481b0852e1c1d4b40584a7"
|
||||||
|
}
|
||||||
55
server/.sqlx/query-2c1954b6db3cbcabf9b878cd1c8ea01c607f46dc43a85b58e19217e7633cf337.json
generated
Normal file
55
server/.sqlx/query-2c1954b6db3cbcabf9b878cd1c8ea01c607f46dc43a85b58e19217e7633cf337.json
generated
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "uid",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Bool",
|
||||||
|
"Int8",
|
||||||
|
"Int8"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "2c1954b6db3cbcabf9b878cd1c8ea01c607f46dc43a85b58e19217e7633cf337"
|
||||||
|
}
|
||||||
32
server/.sqlx/query-2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270.json
generated
Normal file
32
server/.sqlx/query-2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270.json
generated
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n name,\n count (\n NOT is_read\n OR NULL\n ) unread\nFROM\n post AS p\n JOIN feed AS f ON p.site = f.slug --\n -- TODO: figure this out to make the query faster when only looking for unread\n --WHERE\n -- (\n -- NOT $1\n -- OR NOT is_read\n -- )\nGROUP BY\n 1,\n 2\nORDER BY\n site\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "unread",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270"
|
||||||
|
}
|
||||||
15
server/.sqlx/query-b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9.json
generated
Normal file
15
server/.sqlx/query-b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9.json
generated
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "UPDATE\n post\nSET\n is_read = $1\nWHERE\n uid = $2\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Bool",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9"
|
||||||
|
}
|
||||||
23
server/.sqlx/query-e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419.json
generated
Normal file
23
server/.sqlx/query-e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419.json
generated
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n COUNT(*) count\nFROM\n post\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "count",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Bool"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "e28b890e308f483aa6bd08617548ae66294ae1e99b1cab49f5f4211e0fd7d419"
|
||||||
|
}
|
||||||
@@ -1,35 +1,42 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "server"
|
name = "server"
|
||||||
version = "0.1.0"
|
version = "0.0.29"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
default-run = "server"
|
default-run = "server"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
|
ammonia = "3.3.0"
|
||||||
notmuch = { path = "../notmuch" }
|
anyhow = "1.0.79"
|
||||||
shared = { path = "../shared" }
|
|
||||||
serde_json = "1.0.87"
|
|
||||||
thiserror = "1.0.37"
|
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
|
||||||
log = "0.4.17"
|
|
||||||
tokio = "1.26.0"
|
|
||||||
glog = "0.1.0"
|
|
||||||
urlencoding = "2.1.3"
|
|
||||||
async-graphql = { version = "6.0.11", features = ["log"] }
|
async-graphql = { version = "6.0.11", features = ["log"] }
|
||||||
async-graphql-rocket = "6.0.11"
|
async-graphql-rocket = "6.0.11"
|
||||||
rocket_cors = "0.6.0"
|
async-trait = "0.1.81"
|
||||||
memmap = "0.7.0"
|
build-info = "0.0.38"
|
||||||
mailparse = "0.14.0"
|
cacher = {git = "http://git-private.h.xinu.tv/wathiede/cacher.git"}
|
||||||
ammonia = "3.3.0"
|
|
||||||
lol_html = "1.2.0"
|
|
||||||
css-inline = "0.13.0"
|
css-inline = "0.13.0"
|
||||||
anyhow = "1.0.79"
|
glog = "0.1.0"
|
||||||
maplit = "1.0.2"
|
html-escape = "0.2.13"
|
||||||
linkify = "0.10.0"
|
linkify = "0.10.0"
|
||||||
|
log = "0.4.17"
|
||||||
|
lol_html = "1.2.0"
|
||||||
|
mailparse = "0.15.0"
|
||||||
|
maplit = "1.0.2"
|
||||||
|
memmap = "0.7.0"
|
||||||
|
notmuch = { path = "../notmuch" }
|
||||||
|
reqwest = { version = "0.12.7", features = ["blocking"] }
|
||||||
|
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
|
||||||
|
rocket_cors = "0.6.0"
|
||||||
|
scraper = "0.20.0"
|
||||||
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
|
serde_json = "1.0.87"
|
||||||
|
shared = { path = "../shared" }
|
||||||
|
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
|
||||||
|
tantivy = "0.22.0"
|
||||||
|
thiserror = "1.0.37"
|
||||||
|
tokio = "1.26.0"
|
||||||
|
url = "2.5.2"
|
||||||
|
urlencoding = "2.1.3"
|
||||||
|
|
||||||
[dependencies.rocket_contrib]
|
[build-dependencies]
|
||||||
version = "0.4.11"
|
build-info-build = "0.0.38"
|
||||||
default-features = false
|
|
||||||
features = ["json"]
|
|
||||||
|
|||||||
@@ -1,9 +1,54 @@
|
|||||||
[release]
|
[release]
|
||||||
address = "0.0.0.0"
|
address = "0.0.0.0"
|
||||||
port = 9345
|
port = 9345
|
||||||
|
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||||
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
|
|
||||||
[debug]
|
[debug]
|
||||||
address = "0.0.0.0"
|
address = "0.0.0.0"
|
||||||
port = 9345
|
port = 9345
|
||||||
# Uncomment to make it production like.
|
# Uncomment to make it production like.
|
||||||
#log_level = "critical"
|
#log_level = "critical"
|
||||||
|
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||||
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
|
slurp_cache_path = "/net/nasx/x/letterbox/slurp"
|
||||||
|
|
||||||
|
[debug.slurp_site_selectors]
|
||||||
|
"atmeta.com" = [
|
||||||
|
"div.entry-content"
|
||||||
|
]
|
||||||
|
"blog.prusa3d.com" = [
|
||||||
|
"article.content .post-block"
|
||||||
|
]
|
||||||
|
"blog.cloudflare.com" = [
|
||||||
|
".author-lists .author-name-tooltip",
|
||||||
|
".post-full-content"
|
||||||
|
]
|
||||||
|
"blog.zsa.io" = [
|
||||||
|
"section.blog-article"
|
||||||
|
]
|
||||||
|
"engineering.fb.com" = [
|
||||||
|
"article"
|
||||||
|
]
|
||||||
|
"hackaday.com" = [
|
||||||
|
"div.entry-featured-image",
|
||||||
|
"div.entry-content"
|
||||||
|
]
|
||||||
|
"mitchellh.com" = [
|
||||||
|
"div.w-full"
|
||||||
|
]
|
||||||
|
"natwelch.com" = [
|
||||||
|
"article div.prose"
|
||||||
|
]
|
||||||
|
"slashdot.org" = [
|
||||||
|
"span.story-byline",
|
||||||
|
"div.p"
|
||||||
|
]
|
||||||
|
"www.redox-os.org" = [
|
||||||
|
"div.content"
|
||||||
|
]
|
||||||
|
"www.smbc-comics.com" = [
|
||||||
|
"img#cc-comic",
|
||||||
|
"div#aftercomic img"
|
||||||
|
]
|
||||||
|
|
||||||
|
|||||||
5
server/build.rs
Normal file
5
server/build.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
fn main() {
|
||||||
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
|
// and `build_info::format!` in the main program.
|
||||||
|
build_info_build::build_script();
|
||||||
|
}
|
||||||
10
server/sql/all-posts.sql
Normal file
10
server/sql/all-posts.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
title,
|
||||||
|
summary,
|
||||||
|
link,
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
uid,
|
||||||
|
id
|
||||||
|
FROM post
|
||||||
10
server/sql/count.sql
Normal file
10
server/sql/count.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
SELECT
|
||||||
|
COUNT(*) count
|
||||||
|
FROM
|
||||||
|
post
|
||||||
|
WHERE
|
||||||
|
($1::text IS NULL OR site = $1)
|
||||||
|
AND (
|
||||||
|
NOT $2
|
||||||
|
OR NOT is_read
|
||||||
|
)
|
||||||
6
server/sql/set_unread.sql
Normal file
6
server/sql/set_unread.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
UPDATE
|
||||||
|
post
|
||||||
|
SET
|
||||||
|
is_read = $1
|
||||||
|
WHERE
|
||||||
|
uid = $2
|
||||||
21
server/sql/tags.sql
Normal file
21
server/sql/tags.sql
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
name,
|
||||||
|
count (
|
||||||
|
NOT is_read
|
||||||
|
OR NULL
|
||||||
|
) unread
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
JOIN feed AS f ON p.site = f.slug --
|
||||||
|
-- TODO: figure this out to make the query faster when only looking for unread
|
||||||
|
--WHERE
|
||||||
|
-- (
|
||||||
|
-- NOT $1
|
||||||
|
-- OR NOT is_read
|
||||||
|
-- )
|
||||||
|
GROUP BY
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
ORDER BY
|
||||||
|
site
|
||||||
14
server/sql/thread.sql
Normal file
14
server/sql/thread.sql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
SELECT
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
link,
|
||||||
|
site,
|
||||||
|
summary,
|
||||||
|
title,
|
||||||
|
name,
|
||||||
|
homepage
|
||||||
|
FROM
|
||||||
|
post p
|
||||||
|
JOIN feed f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
uid = $1
|
||||||
21
server/sql/threads.sql
Normal file
21
server/sql/threads.sql
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
title,
|
||||||
|
uid,
|
||||||
|
name
|
||||||
|
FROM
|
||||||
|
post p
|
||||||
|
JOIN feed f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
($1::text IS NULL OR site = $1)
|
||||||
|
AND (
|
||||||
|
NOT $2
|
||||||
|
OR NOT is_read
|
||||||
|
)
|
||||||
|
ORDER BY
|
||||||
|
date DESC,
|
||||||
|
title OFFSET $3
|
||||||
|
LIMIT
|
||||||
|
$4
|
||||||
@@ -9,7 +9,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
println!("Sanitizing {src} into {dst}");
|
println!("Sanitizing {src} into {dst}");
|
||||||
let bytes = fs::read(src)?;
|
let bytes = fs::read(src)?;
|
||||||
let html = String::from_utf8_lossy(&bytes);
|
let html = String::from_utf8_lossy(&bytes);
|
||||||
let html = sanitize_html(&html)?;
|
let html = sanitize_html(&html, "", &None)?;
|
||||||
fs::write(dst, html)?;
|
fs::write(dst, html)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
// Rocket generates a lot of warnings for handlers
|
||||||
|
// TODO: figure out why
|
||||||
|
#![allow(unreachable_patterns)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rocket;
|
extern crate rocket;
|
||||||
use std::{error::Error, io::Cursor, str::FromStr};
|
use std::{error::Error, io::Cursor, str::FromStr};
|
||||||
@@ -7,6 +10,7 @@ use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
|
|||||||
use glog::Flags;
|
use glog::Flags;
|
||||||
use notmuch::{Notmuch, NotmuchError, ThreadSet};
|
use notmuch::{Notmuch, NotmuchError, ThreadSet};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
|
fairing::AdHoc,
|
||||||
http::{ContentType, Header},
|
http::{ContentType, Header},
|
||||||
request::Request,
|
request::Request,
|
||||||
response::{content, Debug, Responder},
|
response::{content, Debug, Responder},
|
||||||
@@ -15,43 +19,19 @@ use rocket::{
|
|||||||
};
|
};
|
||||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||||
use server::{
|
use server::{
|
||||||
|
config::Config,
|
||||||
error::ServerError,
|
error::ServerError,
|
||||||
graphql::{attachment_bytes, Attachment, GraphqlSchema, Mutation, QueryRoot},
|
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||||
|
nm::{attachment_bytes, cid_attachment_bytes},
|
||||||
};
|
};
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tantivy::{Index, IndexWriter};
|
||||||
|
|
||||||
#[get("/refresh")]
|
#[get("/refresh")]
|
||||||
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
||||||
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/search")]
|
|
||||||
async fn search_all(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
|
||||||
search(nm, "*", None, None).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/search/<query>?<page>&<results_per_page>")]
|
|
||||||
async fn search(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
query: &str,
|
|
||||||
page: Option<usize>,
|
|
||||||
results_per_page: Option<usize>,
|
|
||||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
|
||||||
let page = page.unwrap_or(0);
|
|
||||||
let results_per_page = results_per_page.unwrap_or(20);
|
|
||||||
let query = urlencoding::decode(query).map_err(NotmuchError::from)?;
|
|
||||||
info!(" search '{query}'");
|
|
||||||
let res = shared::SearchResult {
|
|
||||||
summary: nm.search(&query, page * results_per_page, results_per_page)?,
|
|
||||||
query: query.to_string(),
|
|
||||||
page,
|
|
||||||
results_per_page,
|
|
||||||
total: nm.count(&query)?,
|
|
||||||
};
|
|
||||||
Ok(Json(res))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/show/<query>/pretty")]
|
#[get("/show/<query>/pretty")]
|
||||||
async fn show_pretty(
|
async fn show_pretty(
|
||||||
nm: &State<Notmuch>,
|
nm: &State<Notmuch>,
|
||||||
@@ -111,6 +91,22 @@ impl<'r, 'o: 'r> Responder<'r, 'o> for DownloadAttachmentResponder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/cid/<id>/<cid>")]
|
||||||
|
async fn view_cid(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
cid: &str,
|
||||||
|
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("view cid attachment {mid} {cid}");
|
||||||
|
let attachment = cid_attachment_bytes(nm, &mid, &cid)?;
|
||||||
|
Ok(InlineAttachmentResponder(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/view/attachment/<id>/<idx>/<_>")]
|
#[get("/view/attachment/<id>/<idx>/<_>")]
|
||||||
async fn view_attachment(
|
async fn view_attachment(
|
||||||
nm: &State<Notmuch>,
|
nm: &State<Notmuch>,
|
||||||
@@ -167,7 +163,127 @@ async fn original(
|
|||||||
|
|
||||||
#[rocket::get("/")]
|
#[rocket::get("/")]
|
||||||
fn graphiql() -> content::RawHtml<String> {
|
fn graphiql() -> content::RawHtml<String> {
|
||||||
content::RawHtml(GraphiQLSource::build().endpoint("/graphql").finish())
|
content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::post("/create-news-db")]
|
||||||
|
fn create_news_db(config: &State<Config>) -> Result<String, Debug<ServerError>> {
|
||||||
|
create_news_db_impl(config)?;
|
||||||
|
Ok(format!(
|
||||||
|
"DB created in {}\n",
|
||||||
|
config.newsreader_tantivy_db_path
|
||||||
|
))
|
||||||
|
}
|
||||||
|
fn create_news_db_impl(config: &Config) -> Result<(), ServerError> {
|
||||||
|
std::fs::remove_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
std::fs::create_dir_all(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
use tantivy::schema::*;
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
schema_builder.add_text_field("site", STRING | STORED);
|
||||||
|
schema_builder.add_text_field("title", TEXT | STORED);
|
||||||
|
schema_builder.add_text_field("summary", TEXT);
|
||||||
|
schema_builder.add_text_field("link", STRING | STORED);
|
||||||
|
schema_builder.add_date_field("date", FAST);
|
||||||
|
schema_builder.add_bool_field("is_read", FAST);
|
||||||
|
schema_builder.add_text_field("uid", STRING | STORED);
|
||||||
|
schema_builder.add_i64_field("id", FAST);
|
||||||
|
|
||||||
|
let schema = schema_builder.build();
|
||||||
|
Index::create_in_dir(&config.newsreader_tantivy_db_path, schema).map_err(ServerError::from)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::post("/reindex-news-db")]
|
||||||
|
async fn reindex_news_db(
|
||||||
|
pool: &State<PgPool>,
|
||||||
|
config: &State<Config>,
|
||||||
|
) -> Result<String, Debug<ServerError>> {
|
||||||
|
use tantivy::{doc, Term};
|
||||||
|
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let pool: &PgPool = pool;
|
||||||
|
|
||||||
|
let index =
|
||||||
|
Index::open_in_dir(&config.newsreader_tantivy_db_path).map_err(ServerError::from)?;
|
||||||
|
let mut index_writer = index.writer(50_000_000).map_err(ServerError::from)?;
|
||||||
|
let schema = index.schema();
|
||||||
|
let site = schema.get_field("site").map_err(ServerError::from)?;
|
||||||
|
let title = schema.get_field("title").map_err(ServerError::from)?;
|
||||||
|
let summary = schema.get_field("summary").map_err(ServerError::from)?;
|
||||||
|
let link = schema.get_field("link").map_err(ServerError::from)?;
|
||||||
|
let date = schema.get_field("date").map_err(ServerError::from)?;
|
||||||
|
let is_read = schema.get_field("is_read").map_err(ServerError::from)?;
|
||||||
|
let uid = schema.get_field("uid").map_err(ServerError::from)?;
|
||||||
|
let id = schema.get_field("id").map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
let rows = sqlx::query_file!("sql/all-posts.sql")
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
let total = rows.len();
|
||||||
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
if i % 10_000 == 0 {
|
||||||
|
info!(
|
||||||
|
"{i}/{total} processed, elapsed {:.2}s",
|
||||||
|
start_time.elapsed().as_secs_f32()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let id_term = Term::from_field_text(uid, &r.uid);
|
||||||
|
index_writer.delete_term(id_term);
|
||||||
|
index_writer
|
||||||
|
.add_document(doc!(
|
||||||
|
site => r.site.expect("UNKOWN_SITE"),
|
||||||
|
title => r.title.expect("UNKOWN_TITLE"),
|
||||||
|
// TODO: clean and extract text from HTML
|
||||||
|
summary => r.summary.expect("UNKNOWN_SUMMARY"),
|
||||||
|
link => r.link.expect("link"),
|
||||||
|
date => tantivy::DateTime::from_primitive(r.date.expect("date")),
|
||||||
|
is_read => r.is_read.expect("is_read"),
|
||||||
|
uid => r.uid,
|
||||||
|
id => r.id as i64,
|
||||||
|
))
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
index_writer.commit().map_err(ServerError::from)?;
|
||||||
|
|
||||||
|
info!("took {:.2}s to reindex", start_time.elapsed().as_secs_f32());
|
||||||
|
Ok(format!(
|
||||||
|
"DB openned in {}\n",
|
||||||
|
config.newsreader_tantivy_db_path
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rocket::get("/search-news-db")]
|
||||||
|
fn search_news_db(
|
||||||
|
index: &State<tantivy::Index>,
|
||||||
|
reader: &State<tantivy::IndexReader>,
|
||||||
|
) -> Result<String, Debug<ServerError>> {
|
||||||
|
use tantivy::{collector::TopDocs, query::QueryParser, Document, TantivyDocument};
|
||||||
|
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let schema = index.schema();
|
||||||
|
let site = schema.get_field("site").map_err(ServerError::from)?;
|
||||||
|
let title = schema.get_field("title").map_err(ServerError::from)?;
|
||||||
|
let summary = schema.get_field("summary").map_err(ServerError::from)?;
|
||||||
|
let query_parser = QueryParser::for_index(&index, vec![site, title, summary]);
|
||||||
|
|
||||||
|
let query = query_parser
|
||||||
|
.parse_query("grapheme")
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
let top_docs = searcher
|
||||||
|
.search(&query, &TopDocs::with_limit(10))
|
||||||
|
.map_err(ServerError::from)?;
|
||||||
|
let mut results = vec![];
|
||||||
|
info!("search found {} docs", top_docs.len());
|
||||||
|
for (_score, doc_address) in top_docs {
|
||||||
|
let retrieved_doc: TantivyDocument =
|
||||||
|
searcher.doc(doc_address).map_err(ServerError::from)?;
|
||||||
|
results.push(format!("{}", retrieved_doc.to_json(&schema)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(format!("{}", results.join(" ")))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rocket::get("/graphql?<query..>")]
|
#[rocket::get("/graphql?<query..>")]
|
||||||
@@ -193,6 +309,8 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
info!("Build Info: {}", shared::build_version(bi));
|
||||||
let allowed_origins = AllowedOrigins::all();
|
let allowed_origins = AllowedOrigins::all();
|
||||||
let cors = rocket_cors::CorsOptions {
|
let cors = rocket_cors::CorsOptions {
|
||||||
allowed_origins,
|
allowed_origins,
|
||||||
@@ -206,34 +324,58 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
|||||||
}
|
}
|
||||||
.to_cors()?;
|
.to_cors()?;
|
||||||
|
|
||||||
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
let rkt = rocket::build()
|
||||||
.data(Notmuch::default())
|
|
||||||
.extension(async_graphql::extensions::Logger)
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
let _ = rocket::build()
|
|
||||||
.mount(
|
.mount(
|
||||||
"/",
|
shared::urls::MOUNT_POINT,
|
||||||
routes![
|
routes![
|
||||||
|
create_news_db,
|
||||||
|
reindex_news_db,
|
||||||
|
search_news_db,
|
||||||
original,
|
original,
|
||||||
refresh,
|
refresh,
|
||||||
search_all,
|
|
||||||
search,
|
|
||||||
show_pretty,
|
show_pretty,
|
||||||
show,
|
show,
|
||||||
graphql_query,
|
graphql_query,
|
||||||
graphql_request,
|
graphql_request,
|
||||||
graphiql,
|
graphiql,
|
||||||
|
view_cid,
|
||||||
view_attachment,
|
view_attachment,
|
||||||
download_attachment,
|
download_attachment,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.attach(cors)
|
.attach(cors)
|
||||||
.manage(schema)
|
.attach(AdHoc::config::<Config>());
|
||||||
.manage(Notmuch::default())
|
|
||||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
|
||||||
.launch()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
|
let config: Config = rkt.figment().extract()?;
|
||||||
|
info!("Config:\n{config:#?}");
|
||||||
|
if !std::fs::exists(&config.slurp_cache_path)? {
|
||||||
|
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
|
||||||
|
std::fs::create_dir_all(&config.slurp_cache_path)?;
|
||||||
|
}
|
||||||
|
let pool = PgPool::connect(&config.newsreader_database_url).await?;
|
||||||
|
let tantivy_newsreader_index = match Index::open_in_dir(&config.newsreader_tantivy_db_path) {
|
||||||
|
Ok(idx) => idx,
|
||||||
|
Err(_) => {
|
||||||
|
create_news_db_impl(&config)?;
|
||||||
|
Index::open_in_dir(&config.newsreader_tantivy_db_path)?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let tantivy_newsreader_reader = tantivy_newsreader_index.reader()?;
|
||||||
|
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||||
|
.data(Notmuch::default())
|
||||||
|
.data(config)
|
||||||
|
.data(pool.clone())
|
||||||
|
.extension(async_graphql::extensions::Logger)
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
let rkt = rkt
|
||||||
|
.manage(schema)
|
||||||
|
.manage(pool)
|
||||||
|
.manage(Notmuch::default())
|
||||||
|
.manage(tantivy_newsreader_index)
|
||||||
|
.manage(tantivy_newsreader_reader);
|
||||||
|
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||||
|
|
||||||
|
rkt.launch().await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
1454
server/src/chrome-default.css
Normal file
1454
server/src/chrome-default.css
Normal file
File diff suppressed because it is too large
Load Diff
23
server/src/config.rs
Normal file
23
server/src/config.rs
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
use std::{collections::HashMap, fmt::Display, str::FromStr};
|
||||||
|
|
||||||
|
use scraper::Selector;
|
||||||
|
use serde::{de, Deserialize, Deserializer};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DeSelector(pub Selector);
|
||||||
|
impl<'de> Deserialize<'de> for DeSelector {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let s = String::deserialize(deserializer)?;
|
||||||
|
Ok(DeSelector(Selector::parse(&s).map_err(de::Error::custom)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct Config {
|
||||||
|
pub newsreader_database_url: String,
|
||||||
|
pub newsreader_tantivy_db_path: String,
|
||||||
|
pub slurp_cache_path: String,
|
||||||
|
pub slurp_site_selectors: HashMap<String, Vec<DeSelector>>,
|
||||||
|
}
|
||||||
8
server/src/custom.css
Normal file
8
server/src/custom.css
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
pre {
|
||||||
|
background-color: var(--color-bg);
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
@@ -1,16 +1,40 @@
|
|||||||
|
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
use mailparse::MailParseError;
|
use mailparse::MailParseError;
|
||||||
|
use tantivy::TantivyError;
|
||||||
|
use tantivy::query::QueryParserError;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::TransformError;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum ServerError {
|
pub enum ServerError {
|
||||||
#[error("notmuch")]
|
#[error("notmuch: {0}")]
|
||||||
NotmuchError(#[from] notmuch::NotmuchError),
|
NotmuchError(#[from] notmuch::NotmuchError),
|
||||||
#[error("flatten")]
|
#[error("flatten")]
|
||||||
FlattenError,
|
FlattenError,
|
||||||
#[error("mail parse error")]
|
#[error("mail parse error: {0}")]
|
||||||
MailParseError(#[from] MailParseError),
|
MailParseError(#[from] MailParseError),
|
||||||
#[error("IO error")]
|
#[error("IO error: {0}")]
|
||||||
IoError(#[from] std::io::Error),
|
IoError(#[from] std::io::Error),
|
||||||
#[error("attachement not found")]
|
#[error("attachement not found")]
|
||||||
PartNotFound,
|
PartNotFound,
|
||||||
|
#[error("sqlx error: {0}")]
|
||||||
|
SQLXError(#[from] sqlx::Error),
|
||||||
|
#[error("html transform error: {0}")]
|
||||||
|
TransformError(#[from] TransformError),
|
||||||
|
#[error("UTF8 error: {0}")]
|
||||||
|
Utf8Error(#[from] Utf8Error),
|
||||||
|
#[error("FromUTF8 error: {0}")]
|
||||||
|
FromUtf8Error(#[from] FromUtf8Error),
|
||||||
|
#[error("error: {0}")]
|
||||||
|
StringError(String),
|
||||||
|
#[error("invalid url: {0}")]
|
||||||
|
UrlParseError(#[from] url::ParseError),
|
||||||
|
#[error("tantivy error: {0}")]
|
||||||
|
TantivyError(#[from] TantivyError),
|
||||||
|
#[error("tantivy query parse error: {0}")]
|
||||||
|
QueryParseError(#[from] QueryParserError),
|
||||||
|
#[error("impossible: {0}")]
|
||||||
|
InfaillibleError(#[from] Infallible),
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,20 +1,275 @@
|
|||||||
|
pub mod config;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod graphql;
|
pub mod graphql;
|
||||||
|
pub mod newsreader;
|
||||||
pub mod nm;
|
pub mod nm;
|
||||||
|
|
||||||
|
use std::{collections::HashMap, convert::Infallible, str::FromStr, sync::Arc};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use cacher::{Cacher, FilesystemCacher};
|
||||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||||
use linkify::{LinkFinder, LinkKind};
|
use linkify::{LinkFinder, LinkKind};
|
||||||
use log::error;
|
use log::{error, info, warn};
|
||||||
use lol_html::{element, errors::RewritingError, rewrite_str, RewriteStrSettings};
|
use lol_html::{
|
||||||
|
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
|
||||||
|
RewriteStrSettings,
|
||||||
|
};
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
|
use scraper::{Html, Selector};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
config::DeSelector,
|
||||||
|
newsreader::{extract_thread_id, is_newsreader_thread},
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: figure out how to use Cow
|
||||||
|
#[async_trait]
|
||||||
|
trait Transformer: Send + Sync {
|
||||||
|
fn should_run(&self, _addr: &Option<Url>, _html: &str) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
// TODO: should html be something like `html_escape` uses:
|
||||||
|
// <S: ?Sized + AsRef<str>>(text: &S) -> Cow<str>
|
||||||
|
async fn transform(&self, addr: &Option<Url>, html: &str) -> Result<String, TransformError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: how would we make this more generic to allow good implementations of Transformer outside
|
||||||
|
// of this module?
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum SanitizeError {
|
pub enum TransformError {
|
||||||
#[error("lol-html rewrite error")]
|
#[error("lol-html rewrite error: {0}")]
|
||||||
RewritingError(#[from] RewritingError),
|
RewritingError(#[from] RewritingError),
|
||||||
#[error("css inline error")]
|
#[error("css inline error: {0}")]
|
||||||
InlineError(#[from] InlineError),
|
InlineError(#[from] InlineError),
|
||||||
|
#[error("failed to fetch url error: {0}")]
|
||||||
|
ReqwestError(#[from] reqwest::Error),
|
||||||
|
#[error("failed to parse HTML: {0}")]
|
||||||
|
HtmlParsingError(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SanitizeHtml<'a> {
|
||||||
|
cid_prefix: &'a str,
|
||||||
|
base_url: &'a Option<Url>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<'a> Transformer for SanitizeHtml<'a> {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(sanitize_html(html, self.cid_prefix, self.base_url)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct EscapeHtml;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for EscapeHtml {
|
||||||
|
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
|
||||||
|
html.contains("&")
|
||||||
|
}
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(html_escape::decode_html_entities(html).to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StripHtml;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for StripHtml {
|
||||||
|
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
|
||||||
|
// Lame test
|
||||||
|
html.contains("<")
|
||||||
|
}
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
let mut text = String::new();
|
||||||
|
let element_content_handlers = vec![text!("*", |t| {
|
||||||
|
text += t.as_str();
|
||||||
|
Ok(())
|
||||||
|
})];
|
||||||
|
let _ = rewrite_str(
|
||||||
|
html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers,
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct InlineStyle;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for InlineStyle {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
let css = concat!(
|
||||||
|
"/* chrome-default.css */\n",
|
||||||
|
include_str!("chrome-default.css"),
|
||||||
|
"\n/* mvp.css */\n",
|
||||||
|
include_str!("mvp.css"),
|
||||||
|
"\n/* Xinu Specific overrides */\n",
|
||||||
|
include_str!("custom.css"),
|
||||||
|
);
|
||||||
|
let inline_opts = InlineOptions {
|
||||||
|
inline_style_tags: true,
|
||||||
|
keep_style_tags: false,
|
||||||
|
keep_link_tags: true,
|
||||||
|
base_url: None,
|
||||||
|
load_remote_stylesheets: true,
|
||||||
|
extra_css: Some(css.into()),
|
||||||
|
preallocate_node_capacity: 32,
|
||||||
|
..InlineOptions::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
//info!("HTML:\n{html}");
|
||||||
|
Ok(match CSSInliner::new(inline_opts).inline(&html) {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to inline CSS: {err}");
|
||||||
|
html.to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process images will extract any alt or title tags on images and place them as labels below said
|
||||||
|
/// image. It also handles data-src and data-cfsrc attributes
|
||||||
|
struct FrameImages;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for FrameImages {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(rewrite_str(
|
||||||
|
html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers: vec![
|
||||||
|
element!("img[data-src]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("data-src")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
element!("img[data-cfsrc]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("data-cfsrc")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
element!("img[alt], img[title]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("src")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
let alt = el.get_attribute("alt");
|
||||||
|
let title = el.get_attribute("title");
|
||||||
|
let mut frags =
|
||||||
|
vec!["<figure>".to_string(), format!(r#"<img src="{src}">"#)];
|
||||||
|
alt.map(|t| {
|
||||||
|
if !t.is_empty() {
|
||||||
|
frags.push(format!("<figcaption>Alt: {t}</figcaption>"))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
title.map(|t| {
|
||||||
|
if !t.is_empty() {
|
||||||
|
frags.push(format!("<figcaption>Title: {t}</figcaption>"))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
frags.push("</figure>".to_string());
|
||||||
|
el.replace(&frags.join("\n"), ContentType::Html);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
struct AddOutlink;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for AddOutlink {
|
||||||
|
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
||||||
|
if let Some(link) = link {
|
||||||
|
link.scheme().starts_with("http") && !html.contains(link.as_str())
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
if let Some(link) = link {
|
||||||
|
Ok(format!(
|
||||||
|
r#"
|
||||||
|
{html}
|
||||||
|
<div><a href="{}">View on site</a></div>
|
||||||
|
"#,
|
||||||
|
link
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(html.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SlurpContents<'h> {
|
||||||
|
cacher: Arc<Mutex<FilesystemCacher>>,
|
||||||
|
site_selectors: &'h HashMap<String, Vec<DeSelector>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'h> SlurpContents<'h> {
|
||||||
|
fn get_selectors(&self, link: &Url) -> Option<&[DeSelector]> {
|
||||||
|
for (host, selector) in self.site_selectors.iter() {
|
||||||
|
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
|
||||||
|
return Some(&selector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<'h> Transformer for SlurpContents<'h> {
|
||||||
|
fn should_run(&self, link: &Option<Url>, _: &str) -> bool {
|
||||||
|
if let Some(link) = link {
|
||||||
|
return self.get_selectors(link).is_some();
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
let Some(link) = link else {
|
||||||
|
return Ok(html.to_string());
|
||||||
|
};
|
||||||
|
let Some(selectors) = self.get_selectors(&link) else {
|
||||||
|
return Ok(html.to_string());
|
||||||
|
};
|
||||||
|
let mut cacher = self.cacher.lock().await;
|
||||||
|
let body = if let Some(body) = cacher.get(link.as_str()) {
|
||||||
|
info!("cache hit for {link}");
|
||||||
|
String::from_utf8_lossy(&body).to_string()
|
||||||
|
} else {
|
||||||
|
let body = reqwest::get(link.as_str()).await?.text().await?;
|
||||||
|
cacher.set(link.as_str(), body.as_bytes());
|
||||||
|
body
|
||||||
|
};
|
||||||
|
let doc = Html::parse_document(&body);
|
||||||
|
|
||||||
|
let mut results = Vec::new();
|
||||||
|
for selector in selectors {
|
||||||
|
for frag in doc.select(&selector.0) {
|
||||||
|
results.push(frag.html())
|
||||||
|
// TODO: figure out how to warn if there were no hits
|
||||||
|
//warn!("couldn't find '{:?}' in {}", selector, link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(results.join(""))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn linkify_html(text: &str) -> String {
|
pub fn linkify_html(text: &str) -> String {
|
||||||
@@ -43,14 +298,49 @@ pub fn linkify_html(text: &str) -> String {
|
|||||||
parts.join("")
|
parts.join("")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
// html contains the content to be cleaned, and cid_prefix is used to resolve mixed part image
|
||||||
let element_content_handlers = vec![
|
// referrences
|
||||||
|
pub fn sanitize_html(
|
||||||
|
html: &str,
|
||||||
|
cid_prefix: &str,
|
||||||
|
base_url: &Option<Url>,
|
||||||
|
) -> Result<String, TransformError> {
|
||||||
|
let inline_opts = InlineOptions {
|
||||||
|
inline_style_tags: true,
|
||||||
|
keep_style_tags: true,
|
||||||
|
keep_link_tags: false,
|
||||||
|
base_url: None,
|
||||||
|
load_remote_stylesheets: false,
|
||||||
|
extra_css: None,
|
||||||
|
preallocate_node_capacity: 32,
|
||||||
|
..InlineOptions::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = match CSSInliner::new(inline_opts).inline(&html) {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to inline CSS: {err}");
|
||||||
|
html.to_string()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut element_content_handlers = vec![
|
||||||
// Open links in new tab
|
// Open links in new tab
|
||||||
element!("a[href]", |el| {
|
element!("a[href]", |el| {
|
||||||
el.set_attribute("target", "_blank").unwrap();
|
el.set_attribute("target", "_blank").unwrap();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}),
|
}),
|
||||||
|
// Replace mixed part CID images with URL
|
||||||
|
element!("img[src]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("src")
|
||||||
|
.expect("src was required")
|
||||||
|
.replace("cid:", cid_prefix);
|
||||||
|
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
// Only secure image URLs
|
// Only secure image URLs
|
||||||
element!("img[src]", |el| {
|
element!("img[src]", |el| {
|
||||||
let src = el
|
let src = el
|
||||||
@@ -60,28 +350,60 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
|
|
||||||
el.set_attribute("src", &src)?;
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Add https to href with //<domain name>
|
||||||
|
element!("link[href]", |el| {
|
||||||
|
info!("found link[href] {el:?}");
|
||||||
|
let mut href = el.get_attribute("href").expect("href was required");
|
||||||
|
if href.starts_with("//") {
|
||||||
|
warn!("adding https to {href}");
|
||||||
|
href.insert_str(0, "https:");
|
||||||
|
}
|
||||||
|
|
||||||
|
el.set_attribute("href", &href)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Add https to src with //<domain name>
|
||||||
|
element!("style[src]", |el| {
|
||||||
|
let mut src = el.get_attribute("src").expect("src was required");
|
||||||
|
if src.starts_with("//") {
|
||||||
|
src.insert_str(0, "https:");
|
||||||
|
}
|
||||||
|
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
|
if let Some(base_url) = base_url {
|
||||||
|
element_content_handlers.extend(vec![
|
||||||
|
// Make links with relative URLs absolute
|
||||||
|
element!("a[href]", |el| {
|
||||||
|
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
|
||||||
|
el.set_attribute("href", &href.as_str()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
let inline_opts = InlineOptions {
|
Ok(())
|
||||||
inline_style_tags: true,
|
}),
|
||||||
keep_style_tags: false,
|
// Make images with relative srcs absolute
|
||||||
keep_link_tags: false,
|
element!("img[src]", |el| {
|
||||||
base_url: None,
|
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
|
||||||
load_remote_stylesheets: false,
|
el.set_attribute("src", &src.as_str()).unwrap();
|
||||||
extra_css: None,
|
}
|
||||||
preallocate_node_capacity: 32,
|
|
||||||
..InlineOptions::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let inlined_html = match CSSInliner::new(inline_opts).inline(&html) {
|
Ok(())
|
||||||
Ok(inlined_html) => inlined_html,
|
}),
|
||||||
Err(err) => {
|
]);
|
||||||
error!("failed to inline CSS: {err}");
|
}
|
||||||
html.to_string()
|
let html = rewrite_str(
|
||||||
}
|
&html,
|
||||||
};
|
RewriteStrSettings {
|
||||||
|
element_content_handlers,
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?;
|
||||||
// Default's don't allow style, but we want to preserve that.
|
// Default's don't allow style, but we want to preserve that.
|
||||||
// TODO: remove 'class' if rendering mails moves to a two phase process where abstract message
|
// TODO: remove 'class' if rendering mails moves to a two phase process where abstract message
|
||||||
// types are collected, santized, and then grouped together as one big HTML doc
|
// types are collected, santized, and then grouped together as one big HTML doc
|
||||||
@@ -129,6 +451,7 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
"hgroup",
|
"hgroup",
|
||||||
"hr",
|
"hr",
|
||||||
"i",
|
"i",
|
||||||
|
"iframe", // wathiede
|
||||||
"img",
|
"img",
|
||||||
"ins",
|
"ins",
|
||||||
"kbd",
|
"kbd",
|
||||||
@@ -137,6 +460,7 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
"map",
|
"map",
|
||||||
"mark",
|
"mark",
|
||||||
"nav",
|
"nav",
|
||||||
|
"noscript", // wathiede
|
||||||
"ol",
|
"ol",
|
||||||
"p",
|
"p",
|
||||||
"pre",
|
"pre",
|
||||||
@@ -170,7 +494,7 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
];
|
];
|
||||||
let tag_attributes = hashmap![
|
let tag_attributes = hashmap![
|
||||||
"a" => hashset![
|
"a" => hashset![
|
||||||
"href", "hreflang"
|
"href", "hreflang", "target",
|
||||||
],
|
],
|
||||||
"bdo" => hashset![
|
"bdo" => hashset![
|
||||||
"dir"
|
"dir"
|
||||||
@@ -190,6 +514,9 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
"hr" => hashset![
|
"hr" => hashset![
|
||||||
"align", "size", "width"
|
"align", "size", "width"
|
||||||
],
|
],
|
||||||
|
"iframe" => hashset![
|
||||||
|
"src", "allow", "allowfullscreen"
|
||||||
|
],
|
||||||
"img" => hashset![
|
"img" => hashset![
|
||||||
"align", "alt", "height", "src", "width"
|
"align", "alt", "height", "src", "width"
|
||||||
],
|
],
|
||||||
@@ -225,19 +552,127 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
|||||||
],
|
],
|
||||||
];
|
];
|
||||||
|
|
||||||
let clean_html = ammonia::Builder::default()
|
let html = ammonia::Builder::default()
|
||||||
.tags(tags)
|
.tags(tags)
|
||||||
.tag_attributes(tag_attributes)
|
.tag_attributes(tag_attributes)
|
||||||
.generic_attributes(attributes)
|
.generic_attributes(attributes)
|
||||||
.clean(&inlined_html)
|
.clean(&html)
|
||||||
.to_string();
|
.to_string();
|
||||||
//let clean_html = inlined_html;
|
|
||||||
|
|
||||||
Ok(rewrite_str(
|
Ok(html)
|
||||||
&clean_html,
|
}
|
||||||
RewriteStrSettings {
|
|
||||||
element_content_handlers,
|
fn compute_offset_limit(
|
||||||
..RewriteStrSettings::default()
|
after: Option<i32>,
|
||||||
},
|
before: Option<i32>,
|
||||||
)?)
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
) -> (i32, i32) {
|
||||||
|
let default_page_size = 100;
|
||||||
|
match (after, before, first, last) {
|
||||||
|
// Reasonable defaults
|
||||||
|
(None, None, None, None) => (0, default_page_size),
|
||||||
|
(None, None, Some(first), None) => (0, first),
|
||||||
|
(Some(after), None, None, None) => (after + 1, default_page_size),
|
||||||
|
(Some(after), None, Some(first), None) => (after + 1, first),
|
||||||
|
(None, Some(before), None, None) => (0.max(before - default_page_size), default_page_size),
|
||||||
|
(None, Some(before), None, Some(last)) => (0.max(before - last), last),
|
||||||
|
(None, None, None, Some(_)) => {
|
||||||
|
panic!("specifying last and no before doesn't make sense")
|
||||||
|
}
|
||||||
|
(None, None, Some(_), Some(_)) => {
|
||||||
|
panic!("specifying first and last doesn't make sense")
|
||||||
|
}
|
||||||
|
(None, Some(_), Some(_), _) => {
|
||||||
|
panic!("specifying before and first doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), Some(_), _, _) => {
|
||||||
|
panic!("specifying after and before doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), None, None, Some(_)) => {
|
||||||
|
panic!("specifying after and last doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), None, Some(_), Some(_)) => {
|
||||||
|
panic!("specifying after, first and last doesn't make sense")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Query {
|
||||||
|
pub unread_only: bool,
|
||||||
|
pub tag: Option<String>,
|
||||||
|
pub uid: Option<String>,
|
||||||
|
pub remainder: Vec<String>,
|
||||||
|
pub is_notmuch: bool,
|
||||||
|
pub is_newsreader: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Query {
|
||||||
|
// Converts the internal state of Query to something suitable for notmuch queries. Removes and
|
||||||
|
// letterbox specific '<key>:<value' tags
|
||||||
|
fn to_notmuch(&self) -> String {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
if !self.is_notmuch {
|
||||||
|
return String::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.unread_only {
|
||||||
|
parts.push("is:unread".to_string());
|
||||||
|
}
|
||||||
|
if let Some(site) = &self.tag {
|
||||||
|
parts.push(format!("tag:{site}"));
|
||||||
|
}
|
||||||
|
if let Some(uid) = &self.uid {
|
||||||
|
parts.push(uid.clone());
|
||||||
|
}
|
||||||
|
parts.extend(self.remainder.clone());
|
||||||
|
parts.join(" ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Query {
|
||||||
|
type Err = Infallible;
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
let mut unread_only = false;
|
||||||
|
let mut tag = None;
|
||||||
|
let mut uid = None;
|
||||||
|
let mut remainder = Vec::new();
|
||||||
|
let mut is_notmuch = false;
|
||||||
|
let mut is_newsreader = false;
|
||||||
|
for word in s.split_whitespace() {
|
||||||
|
if word == "is:unread" {
|
||||||
|
unread_only = true
|
||||||
|
} else if word.starts_with("tag:") {
|
||||||
|
tag = Some(word["tag:".len()..].to_string())
|
||||||
|
/*
|
||||||
|
} else if word.starts_with("tag:") {
|
||||||
|
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
|
||||||
|
// database
|
||||||
|
site = Some(NON_EXISTENT_SITE_NAME.to_string());
|
||||||
|
*/
|
||||||
|
} else if is_newsreader_thread(word) {
|
||||||
|
uid = Some(extract_thread_id(word).to_string())
|
||||||
|
} else if word == "is:mail" || word == "is:email" || word == "is:notmuch" {
|
||||||
|
is_notmuch = true;
|
||||||
|
} else if word == "is:news" || word == "is:newsreader" {
|
||||||
|
is_newsreader = true;
|
||||||
|
} else {
|
||||||
|
remainder.push(word.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If we don't see any explicit filters for a corpus, flip them all on
|
||||||
|
if !(is_notmuch || is_newsreader) {
|
||||||
|
is_newsreader = true;
|
||||||
|
is_notmuch = true;
|
||||||
|
}
|
||||||
|
Ok(Query {
|
||||||
|
unread_only,
|
||||||
|
tag,
|
||||||
|
uid,
|
||||||
|
remainder,
|
||||||
|
is_notmuch,
|
||||||
|
is_newsreader,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
498
server/src/mvp.css
Normal file
498
server/src/mvp.css
Normal file
@@ -0,0 +1,498 @@
|
|||||||
|
/* MVP.css v1.15 - https://github.com/andybrewer/mvp */
|
||||||
|
|
||||||
|
/* :root content stored in client side index.html */
|
||||||
|
|
||||||
|
html {
|
||||||
|
scroll-behavior: smooth;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-reduced-motion: reduce) {
|
||||||
|
html {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
article aside {
|
||||||
|
background: var(--color-secondary-accent);
|
||||||
|
border-left: 4px solid var(--color-secondary);
|
||||||
|
padding: 0.01rem 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background: var(--color-bg);
|
||||||
|
color: var(--color-text);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 0;
|
||||||
|
overflow-x: hidden;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer,
|
||||||
|
header,
|
||||||
|
main {
|
||||||
|
margin: 0 auto;
|
||||||
|
max-width: var(--width-content);
|
||||||
|
padding: 3rem 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border: none;
|
||||||
|
height: 1px;
|
||||||
|
margin: 4rem 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
section {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
section img,
|
||||||
|
article img {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
section pre {
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
section aside {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
margin: 1rem;
|
||||||
|
padding: 1.25rem;
|
||||||
|
width: var(--width-card);
|
||||||
|
}
|
||||||
|
|
||||||
|
section aside:hover {
|
||||||
|
box-shadow: var(--box-shadow) var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
[hidden] {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Headers */
|
||||||
|
article header,
|
||||||
|
div header,
|
||||||
|
main header {
|
||||||
|
padding-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
header a b,
|
||||||
|
header a em,
|
||||||
|
header a i,
|
||||||
|
header a strong {
|
||||||
|
margin-left: 0.5rem;
|
||||||
|
margin-right: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
header nav img {
|
||||||
|
margin: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
section header {
|
||||||
|
padding-top: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Nav */
|
||||||
|
nav {
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
font-weight: bold;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-bottom: 7rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li {
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0 0.5rem;
|
||||||
|
position: relative;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Nav Dropdown */
|
||||||
|
nav ul li:hover ul {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul {
|
||||||
|
background: var(--color-bg);
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
display: none;
|
||||||
|
height: auto;
|
||||||
|
left: -2px;
|
||||||
|
padding: .5rem 1rem;
|
||||||
|
position: absolute;
|
||||||
|
top: 1.7rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
width: auto;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul::before {
|
||||||
|
/* fill gap above to make mousing over them easier */
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
top: -0.5rem;
|
||||||
|
height: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul li,
|
||||||
|
nav ul li ul li a {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Typography */
|
||||||
|
code,
|
||||||
|
samp {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-text);
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0 0.1rem;
|
||||||
|
padding: 0 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
details {
|
||||||
|
margin: 1.3rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
details summary {
|
||||||
|
font-weight: bold;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6 {
|
||||||
|
line-height: var(--line-height);
|
||||||
|
text-wrap: balance;
|
||||||
|
}
|
||||||
|
|
||||||
|
mark {
|
||||||
|
padding: 0.1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol li,
|
||||||
|
ul li {
|
||||||
|
padding: 0.2rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0.75rem 0;
|
||||||
|
padding: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
margin: 1rem 0;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
padding: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre code,
|
||||||
|
pre samp {
|
||||||
|
display: block;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
padding: 0.5rem 2rem;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
small {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
sup {
|
||||||
|
background-color: var(--color-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-bg);
|
||||||
|
font-size: xx-small;
|
||||||
|
font-weight: bold;
|
||||||
|
margin: 0.2rem;
|
||||||
|
padding: 0.2rem 0.3rem;
|
||||||
|
position: relative;
|
||||||
|
top: -2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Links */
|
||||||
|
a {
|
||||||
|
color: var(--color-link);
|
||||||
|
display: inline-block;
|
||||||
|
font-weight: bold;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
filter: brightness(var(--hover-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a:active {
|
||||||
|
filter: brightness(var(--active-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a b,
|
||||||
|
a em,
|
||||||
|
a i,
|
||||||
|
a strong,
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
display: inline-block;
|
||||||
|
font-size: medium;
|
||||||
|
font-weight: bold;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 0.5rem 0;
|
||||||
|
padding: 1rem 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
font-family: var(--font-family);
|
||||||
|
}
|
||||||
|
|
||||||
|
button:hover,
|
||||||
|
input[type="submit"]:hover {
|
||||||
|
cursor: pointer;
|
||||||
|
filter: brightness(var(--hover-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
button:active,
|
||||||
|
input[type="submit"]:active {
|
||||||
|
filter: brightness(var(--active-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a b,
|
||||||
|
a strong,
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
background-color: var(--color-link);
|
||||||
|
border: 2px solid var(--color-link);
|
||||||
|
color: var(--color-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
a em,
|
||||||
|
a i {
|
||||||
|
border: 2px solid var(--color-link);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-link);
|
||||||
|
display: inline-block;
|
||||||
|
padding: 1rem 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
article aside a {
|
||||||
|
color: var(--color-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Images */
|
||||||
|
figure {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure img {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure figcaption {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Forms */
|
||||||
|
button:disabled,
|
||||||
|
input:disabled {
|
||||||
|
background: var(--color-bg-secondary);
|
||||||
|
border-color: var(--color-bg-secondary);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
button[disabled]:hover,
|
||||||
|
input[type="submit"][disabled]:hover {
|
||||||
|
filter: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
form {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
display: block;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
min-width: var(--width-card);
|
||||||
|
padding: 1.5rem;
|
||||||
|
text-align: var(--justify-normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
form header {
|
||||||
|
margin: 1.5rem 0;
|
||||||
|
padding: 1.5rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
label,
|
||||||
|
select,
|
||||||
|
textarea {
|
||||||
|
display: block;
|
||||||
|
font-size: inherit;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="checkbox"],
|
||||||
|
input[type="radio"] {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="checkbox"]+label,
|
||||||
|
input[type="radio"]+label {
|
||||||
|
display: inline-block;
|
||||||
|
font-weight: normal;
|
||||||
|
position: relative;
|
||||||
|
top: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="range"] {
|
||||||
|
padding: 0.4rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
select,
|
||||||
|
textarea {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
padding: 0.4rem 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="text"],
|
||||||
|
input[type="password"] textarea {
|
||||||
|
width: calc(100% - 1.6rem);
|
||||||
|
}
|
||||||
|
|
||||||
|
input[readonly],
|
||||||
|
textarea[readonly] {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-weight: bold;
|
||||||
|
margin-bottom: 0.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Popups */
|
||||||
|
dialog {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
position: fixed;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
width: 50%;
|
||||||
|
z-index: 999;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tables */
|
||||||
|
table {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
border-spacing: 0;
|
||||||
|
display: inline-block;
|
||||||
|
max-width: 100%;
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
table td,
|
||||||
|
table th,
|
||||||
|
table tr {
|
||||||
|
padding: 0.4rem 0.8rem;
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead {
|
||||||
|
background-color: var(--color-table);
|
||||||
|
border-collapse: collapse;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-bg);
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead tr:first-child th:first-child {
|
||||||
|
border-top-left-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead tr:first-child th:last-child {
|
||||||
|
border-top-right-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead th:first-child,
|
||||||
|
table tr td:first-child {
|
||||||
|
text-align: var(--justify-normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
table tr:nth-child(even) {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quotes */
|
||||||
|
blockquote {
|
||||||
|
display: block;
|
||||||
|
font-size: x-large;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 1rem auto;
|
||||||
|
max-width: var(--width-card-medium);
|
||||||
|
padding: 1.5rem 1rem;
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote footer {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
display: block;
|
||||||
|
font-size: small;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
padding: 1.5rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
* {
|
||||||
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: var(--color-scrollbar) transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar {
|
||||||
|
width: 5px;
|
||||||
|
height: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar-track {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar-thumb {
|
||||||
|
background-color: var(--color-scrollbar);
|
||||||
|
border-radius: 10px;
|
||||||
|
}
|
||||||
245
server/src/newsreader.rs
Normal file
245
server/src/newsreader.rs
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use cacher::FilesystemCacher;
|
||||||
|
use log::info;
|
||||||
|
use maplit::hashmap;
|
||||||
|
use scraper::Selector;
|
||||||
|
use shared::compute_color;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
compute_offset_limit,
|
||||||
|
config::Config,
|
||||||
|
error::ServerError,
|
||||||
|
graphql::{NewsPost, Tag, Thread, ThreadSummary},
|
||||||
|
AddOutlink, EscapeHtml, FrameImages, InlineStyle, Query, SanitizeHtml, SlurpContents,
|
||||||
|
StripHtml, Transformer,
|
||||||
|
};
|
||||||
|
|
||||||
|
const TAG_PREFIX: &'static str = "News/";
|
||||||
|
const THREAD_PREFIX: &'static str = "news:";
|
||||||
|
|
||||||
|
pub fn is_newsreader_search(query: &str) -> bool {
|
||||||
|
query.contains(TAG_PREFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_newsreader_thread(query: &str) -> bool {
|
||||||
|
query.starts_with(THREAD_PREFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_thread_id(query: &str) -> &str {
|
||||||
|
&query[THREAD_PREFIX.len()..]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_site(tag: &str) -> &str {
|
||||||
|
&tag[TAG_PREFIX.len()..]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make_news_tag(tag: &str) -> String {
|
||||||
|
format!("tag:{TAG_PREFIX}{tag}")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
|
||||||
|
if !query.remainder.is_empty() {
|
||||||
|
// TODO: handle full text search against all sites, for now, early return if search words
|
||||||
|
// are specified.
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let row = sqlx::query_file!("sql/count.sql", query.tag, query.unread_only)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn search(
|
||||||
|
pool: &PgPool,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
||||||
|
if !query.remainder.is_empty() {
|
||||||
|
// TODO: handle full text search against all sites, for now, early return if search words
|
||||||
|
// are specified.
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|
||||||
|
if before.is_none() {
|
||||||
|
// When searching forward, the +1 is to see if there are more pages of data available.
|
||||||
|
// Searching backwards implies there's more pages forward, because the value represented by
|
||||||
|
// `before` is on the next page.
|
||||||
|
limit = limit + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let site = query.tag.as_ref().map(|t| extract_site(&t).to_string());
|
||||||
|
info!(
|
||||||
|
"search offset {offset} limit {limit} site {site:?} unread_only {}",
|
||||||
|
query.unread_only
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: further limit results to include query.remainder if set
|
||||||
|
let rows = sqlx::query_file!(
|
||||||
|
"sql/threads.sql",
|
||||||
|
site,
|
||||||
|
query.unread_only,
|
||||||
|
offset as i64,
|
||||||
|
limit as i64
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut res = Vec::new();
|
||||||
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
|
||||||
|
let mut tags = vec![format!("{TAG_PREFIX}{site}")];
|
||||||
|
if !r.is_read.unwrap_or(true) {
|
||||||
|
tags.push("unread".to_string());
|
||||||
|
};
|
||||||
|
let mut title = r.title.unwrap_or("NO TITLE".to_string());
|
||||||
|
title = clean_title(&title).await.expect("failed to clean title");
|
||||||
|
res.push((
|
||||||
|
i as i32 + offset,
|
||||||
|
ThreadSummary {
|
||||||
|
thread: format!("{THREAD_PREFIX}{}", r.uid),
|
||||||
|
timestamp: r
|
||||||
|
.date
|
||||||
|
.expect("post missing date")
|
||||||
|
.assume_utc()
|
||||||
|
.unix_timestamp() as isize,
|
||||||
|
date_relative: "TODO date_relative".to_string(),
|
||||||
|
matched: 0,
|
||||||
|
total: 1,
|
||||||
|
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||||
|
subject: title,
|
||||||
|
tags,
|
||||||
|
},
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||||
|
// TODO: optimize query by using needs_unread
|
||||||
|
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||||
|
let tags = tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|tag| {
|
||||||
|
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||||
|
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
|
||||||
|
let hex = compute_color(&name);
|
||||||
|
Tag {
|
||||||
|
name,
|
||||||
|
fg_color: "white".to_string(),
|
||||||
|
bg_color: hex,
|
||||||
|
unread,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn thread(
|
||||||
|
config: &Config,
|
||||||
|
pool: &PgPool,
|
||||||
|
thread_id: String,
|
||||||
|
) -> Result<Thread, ServerError> {
|
||||||
|
let id = thread_id
|
||||||
|
.strip_prefix(THREAD_PREFIX)
|
||||||
|
.expect("news thread doesn't start with '{THREAD_PREFIX}'")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let r = sqlx::query_file!("sql/thread.sql", id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let slug = r.site.unwrap_or("no-slug".to_string());
|
||||||
|
let site = r.name.unwrap_or("NO SITE".to_string());
|
||||||
|
let default_homepage = "http://no-homepage";
|
||||||
|
let link = &r
|
||||||
|
.link
|
||||||
|
.as_ref()
|
||||||
|
.map(|h| {
|
||||||
|
if h.is_empty() {
|
||||||
|
default_homepage.to_string()
|
||||||
|
} else {
|
||||||
|
h.to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|h| Url::parse(&h).ok())
|
||||||
|
.flatten();
|
||||||
|
let mut body = r.summary.unwrap_or("NO SUMMARY".to_string());
|
||||||
|
// TODO: add site specific cleanups. For example:
|
||||||
|
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
|
||||||
|
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolent
|
||||||
|
let cacher = Arc::new(Mutex::new(FilesystemCacher::new(&config.slurp_cache_path)?));
|
||||||
|
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
|
||||||
|
Box::new(SlurpContents {
|
||||||
|
cacher,
|
||||||
|
site_selectors: &config.slurp_site_selectors,
|
||||||
|
}),
|
||||||
|
Box::new(FrameImages),
|
||||||
|
Box::new(AddOutlink),
|
||||||
|
Box::new(EscapeHtml),
|
||||||
|
Box::new(SanitizeHtml {
|
||||||
|
cid_prefix: "",
|
||||||
|
base_url: &link,
|
||||||
|
}),
|
||||||
|
Box::new(InlineStyle),
|
||||||
|
];
|
||||||
|
for t in body_tranformers.iter() {
|
||||||
|
if t.should_run(&link, &body) {
|
||||||
|
body = t.transform(&link, &body).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
||||||
|
let is_read = r.is_read.unwrap_or(false);
|
||||||
|
let timestamp = r
|
||||||
|
.date
|
||||||
|
.expect("post missing date")
|
||||||
|
.assume_utc()
|
||||||
|
.unix_timestamp();
|
||||||
|
Ok(Thread::News(NewsPost {
|
||||||
|
thread_id,
|
||||||
|
is_read,
|
||||||
|
slug,
|
||||||
|
site,
|
||||||
|
title,
|
||||||
|
body,
|
||||||
|
url: link
|
||||||
|
.as_ref()
|
||||||
|
.map(|url| url.to_string())
|
||||||
|
.unwrap_or("NO URL".to_string()),
|
||||||
|
timestamp,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
pub async fn set_read_status<'ctx>(
|
||||||
|
pool: &PgPool,
|
||||||
|
query: &str,
|
||||||
|
unread: bool,
|
||||||
|
) -> Result<bool, ServerError> {
|
||||||
|
let query: Query = query.parse()?;
|
||||||
|
sqlx::query_file!("sql/set_unread.sql", !unread, query.uid)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
async fn clean_title(title: &str) -> Result<String, ServerError> {
|
||||||
|
// Make title HTML so html parsers work
|
||||||
|
let mut title = format!("<html>{title}</html>");
|
||||||
|
let title_tranformers: Vec<Box<dyn Transformer>> =
|
||||||
|
vec![Box::new(EscapeHtml), Box::new(StripHtml)];
|
||||||
|
// Make title HTML so html parsers work
|
||||||
|
title = format!("<html>{title}</html>");
|
||||||
|
for t in title_tranformers.iter() {
|
||||||
|
if t.should_run(&None, &title) {
|
||||||
|
title = t.transform(&None, &title).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(title)
|
||||||
|
}
|
||||||
773
server/src/nm.rs
773
server/src/nm.rs
@@ -1,13 +1,776 @@
|
|||||||
use shared::Message;
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fs::File,
|
||||||
|
hash::{DefaultHasher, Hash, Hasher},
|
||||||
|
time::Instant,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::error;
|
use log::{error, info, warn};
|
||||||
|
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||||
|
use memmap::MmapOptions;
|
||||||
|
use notmuch::Notmuch;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
compute_offset_limit,
|
||||||
|
error::ServerError,
|
||||||
|
graphql::{
|
||||||
|
Attachment, Body, DispositionType, Email, EmailThread, Header, Html, Message, PlainText,
|
||||||
|
Tag, Thread, ThreadSummary, UnhandledContentType,
|
||||||
|
},
|
||||||
|
linkify_html, InlineStyle, SanitizeHtml, Transformer,
|
||||||
|
};
|
||||||
|
|
||||||
|
const TEXT_PLAIN: &'static str = "text/plain";
|
||||||
|
const TEXT_HTML: &'static str = "text/html";
|
||||||
|
const IMAGE_JPEG: &'static str = "image/jpeg";
|
||||||
|
const IMAGE_PNG: &'static str = "image/png";
|
||||||
|
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
|
||||||
|
const MULTIPART_MIXED: &'static str = "multipart/mixed";
|
||||||
|
const MULTIPART_RELATED: &'static str = "multipart/related";
|
||||||
|
|
||||||
|
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||||
|
|
||||||
// TODO(wathiede): decide good error type
|
// TODO(wathiede): decide good error type
|
||||||
pub fn threadset_to_messages(
|
pub fn threadset_to_messages(thread_set: notmuch::ThreadSet) -> Result<Vec<Message>, ServerError> {
|
||||||
thread_set: notmuch::ThreadSet,
|
|
||||||
) -> Result<Vec<Message>, error::ServerError> {
|
|
||||||
for t in thread_set.0 {
|
for t in thread_set.0 {
|
||||||
for _tn in t.0 {}
|
for _tn in t.0 {}
|
||||||
}
|
}
|
||||||
Ok(Vec::new())
|
Ok(Vec::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn count(nm: &Notmuch, query: &str) -> Result<usize, ServerError> {
|
||||||
|
Ok(nm.count(query)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn search(
|
||||||
|
nm: &Notmuch,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: String,
|
||||||
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
|
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|
||||||
|
if before.is_none() {
|
||||||
|
// When searching forward, the +1 is to see if there are more pages of data available.
|
||||||
|
// Searching backwards implies there's more pages forward, because the value represented by
|
||||||
|
// `before` is on the next page.
|
||||||
|
limit = limit + 1;
|
||||||
|
}
|
||||||
|
Ok(nm
|
||||||
|
.search(&query, offset as usize, limit as usize)?
|
||||||
|
.0
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, ts)| {
|
||||||
|
(
|
||||||
|
offset + i as i32,
|
||||||
|
ThreadSummary {
|
||||||
|
thread: format!("thread:{}", ts.thread),
|
||||||
|
timestamp: ts.timestamp,
|
||||||
|
date_relative: ts.date_relative,
|
||||||
|
matched: ts.matched,
|
||||||
|
total: ts.total,
|
||||||
|
authors: ts.authors,
|
||||||
|
subject: ts.subject,
|
||||||
|
tags: ts.tags,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||||
|
let now = Instant::now();
|
||||||
|
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||||
|
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||||
|
// get an inaccurate count.
|
||||||
|
nm.search("is:unread", 0, 10000)?
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.fold(HashMap::new(), |mut m, ts| {
|
||||||
|
ts.tags.iter().for_each(|t| {
|
||||||
|
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
||||||
|
});
|
||||||
|
m
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
};
|
||||||
|
let tags = nm
|
||||||
|
.tags()?
|
||||||
|
.into_iter()
|
||||||
|
.map(|tag| {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
tag.hash(&mut hasher);
|
||||||
|
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||||
|
let unread = if needs_unread {
|
||||||
|
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
Tag {
|
||||||
|
name: tag,
|
||||||
|
fg_color: "white".to_string(),
|
||||||
|
bg_color: hex,
|
||||||
|
unread,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
info!("Fetching tags took {} seconds", now.elapsed().as_secs_f32());
|
||||||
|
Ok(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn thread(
|
||||||
|
nm: &Notmuch,
|
||||||
|
thread_id: String,
|
||||||
|
debug_content_tree: bool,
|
||||||
|
) -> Result<Thread, ServerError> {
|
||||||
|
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||||
|
// display names (that default to the most commonly seen name).
|
||||||
|
let mut messages = Vec::new();
|
||||||
|
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
||||||
|
let tags = nm.tags_for_query(&format!("id:{id}"))?;
|
||||||
|
let file = File::open(&path)?;
|
||||||
|
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||||
|
let m = parse_mail(&mmap)?;
|
||||||
|
let from = email_addresses(&path, &m, "from")?;
|
||||||
|
let from = match from.len() {
|
||||||
|
0 => None,
|
||||||
|
1 => from.into_iter().next(),
|
||||||
|
_ => {
|
||||||
|
warn!(
|
||||||
|
"Got {} from addresses in message, truncating: {:?}",
|
||||||
|
from.len(),
|
||||||
|
from
|
||||||
|
);
|
||||||
|
from.into_iter().next()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let to = email_addresses(&path, &m, "to")?;
|
||||||
|
let cc = email_addresses(&path, &m, "cc")?;
|
||||||
|
let subject = m.headers.get_first_value("subject");
|
||||||
|
let timestamp = m
|
||||||
|
.headers
|
||||||
|
.get_first_value("date")
|
||||||
|
.and_then(|d| mailparse::dateparse(&d).ok());
|
||||||
|
let cid_prefix = shared::urls::cid_prefix(None, &id);
|
||||||
|
let base_url = None;
|
||||||
|
let body = match extract_body(&m, &id)? {
|
||||||
|
Body::PlainText(PlainText { text, content_tree }) => {
|
||||||
|
let text = if text.len() > MAX_RAW_MESSAGE_SIZE {
|
||||||
|
format!(
|
||||||
|
"{}...\n\nMESSAGE WAS TRUNCATED @ {} bytes",
|
||||||
|
&text[..MAX_RAW_MESSAGE_SIZE],
|
||||||
|
MAX_RAW_MESSAGE_SIZE
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
text
|
||||||
|
};
|
||||||
|
|
||||||
|
Body::Html(Html {
|
||||||
|
html: {
|
||||||
|
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
|
||||||
|
Box::new(InlineStyle),
|
||||||
|
Box::new(SanitizeHtml {
|
||||||
|
cid_prefix: &cid_prefix,
|
||||||
|
base_url: &base_url,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
let mut html = linkify_html(&text.trim_matches('\n'));
|
||||||
|
for t in body_tranformers.iter() {
|
||||||
|
if t.should_run(&None, &html) {
|
||||||
|
html = t.transform(&None, &html).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
format!(
|
||||||
|
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||||
|
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||||
|
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||||
|
// header on the first line.
|
||||||
|
html
|
||||||
|
)
|
||||||
|
},
|
||||||
|
content_tree: if debug_content_tree {
|
||||||
|
render_content_type_tree(&m)
|
||||||
|
} else {
|
||||||
|
content_tree
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Body::Html(Html {
|
||||||
|
mut html,
|
||||||
|
content_tree,
|
||||||
|
}) => Body::Html(Html {
|
||||||
|
html: {
|
||||||
|
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
|
||||||
|
// TODO: this breaks things like emails from calendar
|
||||||
|
//Box::new(InlineStyle),
|
||||||
|
Box::new(SanitizeHtml {
|
||||||
|
cid_prefix: &cid_prefix,
|
||||||
|
base_url: &base_url,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
for t in body_tranformers.iter() {
|
||||||
|
if t.should_run(&None, &html) {
|
||||||
|
html = t.transform(&None, &html).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
html
|
||||||
|
},
|
||||||
|
|
||||||
|
content_tree: if debug_content_tree {
|
||||||
|
render_content_type_tree(&m)
|
||||||
|
} else {
|
||||||
|
content_tree
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
Body::UnhandledContentType(UnhandledContentType { content_tree, .. }) => {
|
||||||
|
let body_start = mmap
|
||||||
|
.windows(2)
|
||||||
|
.take(20_000)
|
||||||
|
.position(|w| w == b"\n\n")
|
||||||
|
.unwrap_or(0);
|
||||||
|
let body = mmap[body_start + 2..].to_vec();
|
||||||
|
Body::UnhandledContentType(UnhandledContentType {
|
||||||
|
text: String::from_utf8(body)?,
|
||||||
|
content_tree: if debug_content_tree {
|
||||||
|
render_content_type_tree(&m)
|
||||||
|
} else {
|
||||||
|
content_tree
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let headers = m
|
||||||
|
.headers
|
||||||
|
.iter()
|
||||||
|
.map(|h| Header {
|
||||||
|
key: h.get_key(),
|
||||||
|
value: h.get_value(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
// TODO(wathiede): parse message and fill out attachments
|
||||||
|
let attachments = extract_attachments(&m, &id)?;
|
||||||
|
messages.push(Message {
|
||||||
|
id: format!("id:{id}"),
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
cc,
|
||||||
|
subject,
|
||||||
|
tags,
|
||||||
|
timestamp,
|
||||||
|
headers,
|
||||||
|
body,
|
||||||
|
path,
|
||||||
|
attachments,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
messages.reverse();
|
||||||
|
// Find the first subject that's set. After reversing the vec, this should be the oldest
|
||||||
|
// message.
|
||||||
|
let subject: String = messages
|
||||||
|
.iter()
|
||||||
|
.skip_while(|m| m.subject.is_none())
|
||||||
|
.next()
|
||||||
|
.and_then(|m| m.subject.clone())
|
||||||
|
.unwrap_or("(NO SUBJECT)".to_string());
|
||||||
|
Ok(Thread::Email(EmailThread {
|
||||||
|
thread_id,
|
||||||
|
subject,
|
||||||
|
messages,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn email_addresses(
|
||||||
|
path: &str,
|
||||||
|
m: &ParsedMail,
|
||||||
|
header_name: &str,
|
||||||
|
) -> Result<Vec<Email>, ServerError> {
|
||||||
|
let mut addrs = Vec::new();
|
||||||
|
for header_value in m.headers.get_all_values(header_name) {
|
||||||
|
match mailparse::addrparse(&header_value) {
|
||||||
|
Ok(mal) => {
|
||||||
|
for ma in mal.into_inner() {
|
||||||
|
match ma {
|
||||||
|
mailparse::MailAddr::Group(gi) => {
|
||||||
|
if !gi.group_name.contains("ndisclosed") {
|
||||||
|
println!("[{path}][{header_name}] Group: {gi}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mailparse::MailAddr::Single(s) => addrs.push(Email {
|
||||||
|
name: s.display_name,
|
||||||
|
addr: Some(s.addr),
|
||||||
|
}), //println!("Single: {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
let v = header_value;
|
||||||
|
if v.matches('@').count() == 1 {
|
||||||
|
if v.matches('<').count() == 1 && v.ends_with('>') {
|
||||||
|
let idx = v.find('<').unwrap();
|
||||||
|
let addr = &v[idx + 1..v.len() - 1].trim();
|
||||||
|
let name = &v[..idx].trim();
|
||||||
|
addrs.push(Email {
|
||||||
|
name: Some(name.to_string()),
|
||||||
|
addr: Some(addr.to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
addrs.push(Email {
|
||||||
|
name: Some(v),
|
||||||
|
addr: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(addrs)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cid_attachment_bytes(nm: &Notmuch, id: &str, cid: &str) -> Result<Attachment, ServerError> {
|
||||||
|
let files = nm.files(id)?;
|
||||||
|
let Some(path) = files.first() else {
|
||||||
|
warn!("failed to find files for message {id}");
|
||||||
|
return Err(ServerError::PartNotFound);
|
||||||
|
};
|
||||||
|
let file = File::open(&path)?;
|
||||||
|
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||||
|
let m = parse_mail(&mmap)?;
|
||||||
|
if let Some(attachment) = walk_attachments(&m, |sp, _cur_idx| {
|
||||||
|
info!("{cid} {:?}", get_content_id(&sp.headers));
|
||||||
|
if let Some(h_cid) = get_content_id(&sp.headers) {
|
||||||
|
let h_cid = &h_cid[1..h_cid.len() - 1];
|
||||||
|
if h_cid == cid {
|
||||||
|
let attachment = extract_attachment(&sp, id, &[]).unwrap_or(Attachment {
|
||||||
|
..Attachment::default()
|
||||||
|
});
|
||||||
|
return Some(attachment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}) {
|
||||||
|
return Ok(attachment);
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ServerError::PartNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn attachment_bytes(nm: &Notmuch, id: &str, idx: &[usize]) -> Result<Attachment, ServerError> {
|
||||||
|
let files = nm.files(id)?;
|
||||||
|
let Some(path) = files.first() else {
|
||||||
|
warn!("failed to find files for message {id}");
|
||||||
|
return Err(ServerError::PartNotFound);
|
||||||
|
};
|
||||||
|
let file = File::open(&path)?;
|
||||||
|
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||||
|
let m = parse_mail(&mmap)?;
|
||||||
|
if let Some(attachment) = walk_attachments(&m, |sp, cur_idx| {
|
||||||
|
if cur_idx == idx {
|
||||||
|
let attachment = extract_attachment(&sp, id, idx).unwrap_or(Attachment {
|
||||||
|
..Attachment::default()
|
||||||
|
});
|
||||||
|
return Some(attachment);
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}) {
|
||||||
|
return Ok(attachment);
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ServerError::PartNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_body(m: &ParsedMail, id: &str) -> Result<Body, ServerError> {
|
||||||
|
let mut part_addr = Vec::new();
|
||||||
|
part_addr.push(id.to_string());
|
||||||
|
let body = m.get_body()?;
|
||||||
|
let ret = match m.ctype.mimetype.as_str() {
|
||||||
|
TEXT_PLAIN => return Ok(Body::text(body)),
|
||||||
|
TEXT_HTML => return Ok(Body::html(body)),
|
||||||
|
MULTIPART_MIXED => extract_mixed(m, &mut part_addr),
|
||||||
|
MULTIPART_ALTERNATIVE => extract_alternative(m, &mut part_addr),
|
||||||
|
MULTIPART_RELATED => extract_related(m, &mut part_addr),
|
||||||
|
_ => extract_unhandled(m),
|
||||||
|
};
|
||||||
|
if let Err(err) = ret {
|
||||||
|
error!("Failed to extract body: {err:?}");
|
||||||
|
return Ok(extract_unhandled(m)?);
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||||
|
let msg = format!(
|
||||||
|
"Unhandled body content type:\n{}\n{}",
|
||||||
|
render_content_type_tree(m),
|
||||||
|
m.get_body()?,
|
||||||
|
);
|
||||||
|
Ok(Body::UnhandledContentType(UnhandledContentType {
|
||||||
|
text: msg,
|
||||||
|
content_tree: render_content_type_tree(m),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// multipart/alternative defines multiple representations of the same message, and clients should
|
||||||
|
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
||||||
|
// then give up.
|
||||||
|
fn extract_alternative(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||||
|
let handled_types = vec![
|
||||||
|
MULTIPART_ALTERNATIVE,
|
||||||
|
MULTIPART_MIXED,
|
||||||
|
MULTIPART_RELATED,
|
||||||
|
TEXT_HTML,
|
||||||
|
TEXT_PLAIN,
|
||||||
|
];
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype.as_str() == MULTIPART_ALTERNATIVE {
|
||||||
|
return extract_alternative(sp, part_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype.as_str() == MULTIPART_MIXED {
|
||||||
|
return extract_related(sp, part_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype.as_str() == MULTIPART_RELATED {
|
||||||
|
return extract_related(sp, part_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype.as_str() == TEXT_HTML {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
return Ok(Body::html(body));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype.as_str() == TEXT_PLAIN {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
return Ok(Body::text(body));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ServerError::StringError(format!(
|
||||||
|
"extract_alternative failed to find suitable subpart, searched: {:?}",
|
||||||
|
handled_types
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
||||||
|
// 'serially'.
|
||||||
|
fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||||
|
let handled_types = vec![
|
||||||
|
MULTIPART_ALTERNATIVE,
|
||||||
|
MULTIPART_RELATED,
|
||||||
|
TEXT_HTML,
|
||||||
|
TEXT_PLAIN,
|
||||||
|
IMAGE_JPEG,
|
||||||
|
IMAGE_PNG,
|
||||||
|
];
|
||||||
|
let mut unhandled_types: Vec<_> = m
|
||||||
|
.subparts
|
||||||
|
.iter()
|
||||||
|
.map(|sp| sp.ctype.mimetype.as_str())
|
||||||
|
.filter(|mt| !handled_types.contains(&mt))
|
||||||
|
.collect();
|
||||||
|
unhandled_types.sort();
|
||||||
|
if !unhandled_types.is_empty() {
|
||||||
|
warn!("{MULTIPART_MIXED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||||
|
}
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||||
|
part_addr.push(idx.to_string());
|
||||||
|
match sp.ctype.mimetype.as_str() {
|
||||||
|
MULTIPART_RELATED => parts.push(extract_related(sp, part_addr)?),
|
||||||
|
MULTIPART_ALTERNATIVE => parts.push(extract_alternative(sp, part_addr)?),
|
||||||
|
TEXT_PLAIN => parts.push(Body::text(sp.get_body()?)),
|
||||||
|
TEXT_HTML => parts.push(Body::html(sp.get_body()?)),
|
||||||
|
IMAGE_JPEG | IMAGE_PNG => {
|
||||||
|
let pcd = sp.get_content_disposition();
|
||||||
|
let filename = pcd
|
||||||
|
.params
|
||||||
|
.get("filename")
|
||||||
|
.map(|s| s.clone())
|
||||||
|
.unwrap_or("".to_string());
|
||||||
|
// Only add inline images, attachments are handled as an attribute of the top level Message and rendered separate client-side.
|
||||||
|
if pcd.disposition == mailparse::DispositionType::Inline {
|
||||||
|
// TODO: make URL generation more programatic based on what the frontend has
|
||||||
|
// mapped
|
||||||
|
parts.push(Body::html(format!(
|
||||||
|
r#"<img src="/api/view/attachment/{}/{}/{filename}">"#,
|
||||||
|
part_addr[0],
|
||||||
|
part_addr
|
||||||
|
.iter()
|
||||||
|
.skip(1)
|
||||||
|
.map(|i| i.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(".")
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
part_addr.pop();
|
||||||
|
}
|
||||||
|
Ok(flatten_body_parts(&parts))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flatten_body_parts(parts: &[Body]) -> Body {
|
||||||
|
let html = parts
|
||||||
|
.iter()
|
||||||
|
.map(|p| match p {
|
||||||
|
Body::PlainText(PlainText { text, .. }) => {
|
||||||
|
format!(
|
||||||
|
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||||
|
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||||
|
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||||
|
// header on the first line.
|
||||||
|
linkify_html(&text.trim_matches('\n'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Body::Html(Html { html, .. }) => html.clone(),
|
||||||
|
Body::UnhandledContentType(UnhandledContentType { text, .. }) => {
|
||||||
|
error!("text len {}", text.len());
|
||||||
|
format!(
|
||||||
|
r#"<p class="view-part-unhandled">{}</p>"#,
|
||||||
|
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||||
|
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||||
|
// header on the first line.
|
||||||
|
linkify_html(&text.trim_matches('\n'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
info!("flatten_body_parts {} {html}", parts.len());
|
||||||
|
Body::html(html)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_related(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||||
|
// TODO(wathiede): collect related things and change return type to new Body arm.
|
||||||
|
let handled_types = vec![
|
||||||
|
MULTIPART_ALTERNATIVE,
|
||||||
|
TEXT_HTML,
|
||||||
|
TEXT_PLAIN,
|
||||||
|
IMAGE_JPEG,
|
||||||
|
IMAGE_PNG,
|
||||||
|
];
|
||||||
|
let mut unhandled_types: Vec<_> = m
|
||||||
|
.subparts
|
||||||
|
.iter()
|
||||||
|
.map(|sp| sp.ctype.mimetype.as_str())
|
||||||
|
.filter(|mt| !handled_types.contains(&mt))
|
||||||
|
.collect();
|
||||||
|
unhandled_types.sort();
|
||||||
|
if !unhandled_types.is_empty() {
|
||||||
|
warn!("{MULTIPART_RELATED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, sp) in m.subparts.iter().enumerate() {
|
||||||
|
if sp.ctype.mimetype == IMAGE_PNG || sp.ctype.mimetype == IMAGE_JPEG {
|
||||||
|
info!("sp.ctype {:#?}", sp.ctype);
|
||||||
|
//info!("sp.headers {:#?}", sp.headers);
|
||||||
|
if let Some(cid) = sp.headers.get_first_value("Content-Id") {
|
||||||
|
let mut part_id = part_addr.clone();
|
||||||
|
part_id.push(i.to_string());
|
||||||
|
info!("cid: {cid} part_id {part_id:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype == MULTIPART_ALTERNATIVE {
|
||||||
|
return extract_alternative(m, part_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype == TEXT_HTML {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
return Ok(Body::html(body));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
if sp.ctype.mimetype == TEXT_PLAIN {
|
||||||
|
let body = sp.get_body()?;
|
||||||
|
return Ok(Body::text(body));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ServerError::StringError(format!(
|
||||||
|
"extract_related failed to find suitable subpart, searched: {:?}",
|
||||||
|
handled_types
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_attachments<T, F: Fn(&ParsedMail, &[usize]) -> Option<T> + Copy>(
|
||||||
|
m: &ParsedMail,
|
||||||
|
visitor: F,
|
||||||
|
) -> Option<T> {
|
||||||
|
let mut cur_addr = Vec::new();
|
||||||
|
walk_attachments_inner(m, visitor, &mut cur_addr)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_attachments_inner<T, F: Fn(&ParsedMail, &[usize]) -> Option<T> + Copy>(
|
||||||
|
m: &ParsedMail,
|
||||||
|
visitor: F,
|
||||||
|
cur_addr: &mut Vec<usize>,
|
||||||
|
) -> Option<T> {
|
||||||
|
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||||
|
cur_addr.push(idx);
|
||||||
|
let val = visitor(sp, &cur_addr);
|
||||||
|
if val.is_some() {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
let val = walk_attachments_inner(sp, visitor, cur_addr);
|
||||||
|
if val.is_some() {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
cur_addr.pop();
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(wathiede): make this walk_attachments that takes a closure.
|
||||||
|
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
||||||
|
// get the bytes for serving attachments of HTTP
|
||||||
|
fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, ServerError> {
|
||||||
|
let mut attachments = Vec::new();
|
||||||
|
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||||
|
if let Some(attachment) = extract_attachment(sp, id, &[idx]) {
|
||||||
|
// Filter out inline attachements, they're flattened into the body of the message.
|
||||||
|
if attachment.disposition == DispositionType::Attachment {
|
||||||
|
attachments.push(attachment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(attachments)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachment> {
|
||||||
|
let pcd = m.get_content_disposition();
|
||||||
|
// TODO: do we need to handle empty filename attachments, or should we change the definition of
|
||||||
|
// Attachment::filename?
|
||||||
|
let Some(filename) = pcd.params.get("filename").map(|f| f.clone()) else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: grab this from somewhere
|
||||||
|
let content_id = None;
|
||||||
|
let bytes = match m.get_body_raw() {
|
||||||
|
Ok(bytes) => bytes,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to get body for attachment: {err}");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return Some(Attachment {
|
||||||
|
id: id.to_string(),
|
||||||
|
idx: idx
|
||||||
|
.iter()
|
||||||
|
.map(|i| i.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("."),
|
||||||
|
disposition: pcd.disposition.into(),
|
||||||
|
filename: Some(filename),
|
||||||
|
size: bytes.len(),
|
||||||
|
// TODO: what is the default for ctype?
|
||||||
|
// TODO: do we want to use m.ctype.params for anything?
|
||||||
|
content_type: Some(m.ctype.mimetype.clone()),
|
||||||
|
content_id,
|
||||||
|
bytes,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_attachment_filename(header_value: &str) -> &str {
|
||||||
|
info!("get_attachment_filename {header_value}");
|
||||||
|
// Strip last "
|
||||||
|
let v = &header_value[..header_value.len() - 1];
|
||||||
|
if let Some(idx) = v.rfind('"') {
|
||||||
|
&v[idx + 1..]
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||||
|
if let Some(v) = headers.get_first_value("Content-Type") {
|
||||||
|
if let Some(idx) = v.find(';') {
|
||||||
|
return Some(v[..idx].to_string());
|
||||||
|
} else {
|
||||||
|
return Some(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||||
|
headers.get_first_value("Content-Id")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||||
|
const WIDTH: usize = 4;
|
||||||
|
const SKIP_HEADERS: [&str; 4] = [
|
||||||
|
"Authentication-Results",
|
||||||
|
"DKIM-Signature",
|
||||||
|
"Received",
|
||||||
|
"Received-SPF",
|
||||||
|
];
|
||||||
|
fn render_ct_rec(m: &ParsedMail, depth: usize) -> String {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||||
|
parts.push(msg);
|
||||||
|
for sp in &m.subparts {
|
||||||
|
parts.push(render_ct_rec(sp, depth + 1))
|
||||||
|
}
|
||||||
|
parts.join("\n")
|
||||||
|
}
|
||||||
|
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||||
|
parts.push(msg);
|
||||||
|
let indent = " ".repeat(depth * WIDTH);
|
||||||
|
if !m.ctype.charset.is_empty() {
|
||||||
|
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
||||||
|
}
|
||||||
|
for (k, v) in m.ctype.params.iter() {
|
||||||
|
parts.push(format!("{indent} {k}: {v}"));
|
||||||
|
}
|
||||||
|
if !m.headers.is_empty() {
|
||||||
|
parts.push(format!("{indent} == headers =="));
|
||||||
|
for h in &m.headers {
|
||||||
|
if h.get_key().starts_with('X') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if SKIP_HEADERS.contains(&h.get_key().as_str()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
parts.push(format!("{indent} {}: {}", h.get_key_ref(), h.get_value()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for sp in &m.subparts {
|
||||||
|
parts.push(render_rec(sp, depth + 1))
|
||||||
|
}
|
||||||
|
parts.join("\n")
|
||||||
|
}
|
||||||
|
format!(
|
||||||
|
"Outline:\n{}\n\nDetailed:\n{}\n\nNot showing headers:\n {}\n X.*",
|
||||||
|
render_ct_rec(m, 1),
|
||||||
|
render_rec(m, 1),
|
||||||
|
SKIP_HEADERS.join("\n ")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_read_status<'ctx>(
|
||||||
|
nm: &Notmuch,
|
||||||
|
query: &str,
|
||||||
|
unread: bool,
|
||||||
|
) -> Result<bool, ServerError> {
|
||||||
|
if unread {
|
||||||
|
nm.tag_add("unread", &format!("{query}"))?;
|
||||||
|
} else {
|
||||||
|
nm.tag_remove("unread", &format!("{query}"))?;
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "shared"
|
name = "shared"
|
||||||
version = "0.1.0"
|
version = "0.0.29"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
build-info = "0.0.38"
|
||||||
notmuch = { path = "../notmuch" }
|
notmuch = { path = "../notmuch" }
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||||
|
|
||||||
|
use build_info::{BuildInfo, VersionControl};
|
||||||
use notmuch::SearchSummary;
|
use notmuch::SearchSummary;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -12,3 +15,44 @@ pub struct SearchResult {
|
|||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct Message {}
|
pub struct Message {}
|
||||||
|
|
||||||
|
pub mod urls {
|
||||||
|
pub const MOUNT_POINT: &'static str = "/api";
|
||||||
|
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||||
|
if let Some(host) = host {
|
||||||
|
format!("//{host}/api/cid/{cid}/")
|
||||||
|
} else {
|
||||||
|
format!("/api/cid/{cid}/")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn download_attachment(host: Option<&str>, id: &str, idx: &str, filename: &str) -> String {
|
||||||
|
if let Some(host) = host {
|
||||||
|
format!(
|
||||||
|
"//{host}/api/download/attachment/{}/{}/{}",
|
||||||
|
id, idx, filename
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!("/api/download/attachment/{}/{}/{}", id, idx, filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn build_version(bi: fn() -> &'static BuildInfo) -> String {
|
||||||
|
fn commit(git: &Option<VersionControl>) -> String {
|
||||||
|
let Some(VersionControl::Git(git)) = git else {
|
||||||
|
return String::new();
|
||||||
|
};
|
||||||
|
let mut s = vec!["-".to_string(), git.commit_short_id.clone()];
|
||||||
|
if let Some(branch) = &git.branch {
|
||||||
|
s.push(format!(" ({branch})"));
|
||||||
|
}
|
||||||
|
s.join("")
|
||||||
|
}
|
||||||
|
let bi = bi();
|
||||||
|
|
||||||
|
format!("v{}{}", bi.crate_info.version, commit(&bi.version_control)).to_string()
|
||||||
|
}
|
||||||
|
pub fn compute_color(data: &str) -> String {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
data.hash(&mut hasher);
|
||||||
|
format!("#{:06x}", hasher.finish() % (1 << 24))
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
version = "0.1.0"
|
version = "0.0.29"
|
||||||
name = "letterbox"
|
name = "letterbox"
|
||||||
repository = "https://github.com/seed-rs/seed-quickstart"
|
repository = "https://github.com/seed-rs/seed-quickstart"
|
||||||
authors = ["Bill Thiede <git@xinu.tv>"]
|
authors = ["Bill Thiede <git@xinu.tv>"]
|
||||||
@@ -9,8 +9,8 @@ license = "MIT"
|
|||||||
readme = "./README.md"
|
readme = "./README.md"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[lib]
|
[build-dependencies]
|
||||||
crate-type = ["cdylib"]
|
build-info-build = "0.0.38"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
wasm-bindgen-test = "0.3.33"
|
wasm-bindgen-test = "0.3.33"
|
||||||
@@ -26,13 +26,13 @@ notmuch = {path = "../notmuch"}
|
|||||||
shared = {path = "../shared"}
|
shared = {path = "../shared"}
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||||
wasm-timer = "0.2.5"
|
|
||||||
chrono = "0.4.31"
|
chrono = "0.4.31"
|
||||||
graphql_client = "0.13.0"
|
graphql_client = "0.13.0"
|
||||||
thiserror = "1.0.50"
|
thiserror = "1.0.50"
|
||||||
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
|
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
|
||||||
gloo-net = { version = "0.4.0", features = ["json", "serde_json"] }
|
gloo-net = { version = "0.4.0", features = ["json", "serde_json"] }
|
||||||
human_format = "1.1.0"
|
human_format = "1.1.0"
|
||||||
|
build-info = "0.0.38"
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.release]
|
[package.metadata.wasm-pack.profile.release]
|
||||||
wasm-opt = ['-Os']
|
wasm-opt = ['-Os']
|
||||||
@@ -40,11 +40,10 @@ wasm-opt = ['-Os']
|
|||||||
[dependencies.web-sys]
|
[dependencies.web-sys]
|
||||||
version = "0.3.58"
|
version = "0.3.58"
|
||||||
features = [
|
features = [
|
||||||
|
"Clipboard",
|
||||||
|
"DomRect",
|
||||||
|
"Element",
|
||||||
"MediaQueryList",
|
"MediaQueryList",
|
||||||
"Window"
|
"Navigator",
|
||||||
|
"Window",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
debug = true
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
.PHONY: all
|
|
||||||
APP=letterbox
|
|
||||||
|
|
||||||
# Build in release mode and push to minio for serving.
|
|
||||||
all:
|
|
||||||
trunk build --release
|
|
||||||
mc mirror m/$(APP)/ /tmp/$(APP)-$(shell date +%s)
|
|
||||||
mc mirror --overwrite --remove dist/ m/$(APP)/
|
|
||||||
@@ -7,20 +7,9 @@ address = "0.0.0.0"
|
|||||||
port = 6758
|
port = 6758
|
||||||
|
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
backend = "http://localhost:9345/"
|
backend = "http://localhost:9345/api/"
|
||||||
rewrite= "/api/"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/original"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/graphiql"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/graphql"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/download"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/view"
|
|
||||||
|
|
||||||
[[hooks]]
|
#[[hooks]]
|
||||||
stage = "pre_build"
|
#stage = "pre_build"
|
||||||
command = "cargo"
|
#command = "cargo"
|
||||||
command_arguments = [ "test" ]
|
#command_arguments = [ "test" ]
|
||||||
|
|||||||
5
web/build.rs
Normal file
5
web/build.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
fn main() {
|
||||||
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
|
// and `build_info::format!` in the main program.
|
||||||
|
build_info_build::build_script();
|
||||||
|
}
|
||||||
@@ -22,4 +22,5 @@ query FrontPageQuery($query: String!, $after: String $before: String, $first: In
|
|||||||
fgColor
|
fgColor
|
||||||
unread
|
unread
|
||||||
}
|
}
|
||||||
|
version
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -290,6 +290,73 @@
|
|||||||
"name": "Email",
|
"name": "Email",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": null,
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "threadId",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "subject",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "messages",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "LIST",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Message",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": [],
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "EmailThread",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
|
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@@ -791,6 +858,145 @@
|
|||||||
"name": "Mutation",
|
"name": "Mutation",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": null,
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "threadId",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "isRead",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "Boolean",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "slug",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "site",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "title",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "body",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "url",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "timestamp",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "Int",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": [],
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "NewsPost",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "Information about pagination in a connection",
|
"description": "Information about pagination in a connection",
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@@ -905,6 +1111,22 @@
|
|||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
"fields": [
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "version",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"args": [
|
"args": [
|
||||||
{
|
{
|
||||||
@@ -1056,7 +1278,7 @@
|
|||||||
"kind": "NON_NULL",
|
"kind": "NON_NULL",
|
||||||
"name": null,
|
"name": null,
|
||||||
"ofType": {
|
"ofType": {
|
||||||
"kind": "OBJECT",
|
"kind": "UNION",
|
||||||
"name": "Thread",
|
"name": "Thread",
|
||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
@@ -1157,69 +1379,23 @@
|
|||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
"fields": [
|
"fields": null,
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "threadId",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "subject",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "messages",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "LIST",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Message",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
"interfaces": [],
|
"interfaces": null,
|
||||||
"kind": "OBJECT",
|
"kind": "UNION",
|
||||||
"name": "Thread",
|
"name": "Thread",
|
||||||
"possibleTypes": null
|
"possibleTypes": [
|
||||||
|
{
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "EmailThread",
|
||||||
|
"ofType": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "NewsPost",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
|
|||||||
@@ -1,47 +1,60 @@
|
|||||||
query ShowThreadQuery($threadId: String!) {
|
query ShowThreadQuery($threadId: String!) {
|
||||||
thread(threadId: $threadId) {
|
thread(threadId: $threadId) {
|
||||||
threadId,
|
__typename ... on NewsPost{
|
||||||
subject
|
threadId
|
||||||
messages {
|
isRead
|
||||||
id
|
slug
|
||||||
subject
|
site
|
||||||
tags
|
title
|
||||||
from {
|
body
|
||||||
name
|
url
|
||||||
addr
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
name
|
|
||||||
addr
|
|
||||||
}
|
|
||||||
cc {
|
|
||||||
name
|
|
||||||
addr
|
|
||||||
}
|
|
||||||
timestamp
|
timestamp
|
||||||
body {
|
# TODO: unread
|
||||||
__typename
|
}
|
||||||
... on UnhandledContentType {
|
__typename ... on EmailThread{
|
||||||
contents
|
threadId,
|
||||||
contentTree
|
subject
|
||||||
}
|
messages {
|
||||||
... on PlainText {
|
|
||||||
contents
|
|
||||||
contentTree
|
|
||||||
}
|
|
||||||
... on Html {
|
|
||||||
contents
|
|
||||||
contentTree
|
|
||||||
}
|
|
||||||
}
|
|
||||||
path
|
|
||||||
attachments {
|
|
||||||
id
|
id
|
||||||
idx
|
subject
|
||||||
filename
|
tags
|
||||||
contentType
|
from {
|
||||||
contentId
|
name
|
||||||
size
|
addr
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
cc {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
timestamp
|
||||||
|
body {
|
||||||
|
__typename
|
||||||
|
... on UnhandledContentType {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
... on PlainText {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
... on Html {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path
|
||||||
|
attachments {
|
||||||
|
id
|
||||||
|
idx
|
||||||
|
filename
|
||||||
|
contentType
|
||||||
|
contentId
|
||||||
|
size
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -51,4 +64,5 @@ query ShowThreadQuery($threadId: String!) {
|
|||||||
fgColor
|
fgColor
|
||||||
unread
|
unread
|
||||||
}
|
}
|
||||||
|
version
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
DEV_HOST=localhost
|
DEV_HOST=localhost
|
||||||
DEV_PORT=9345
|
DEV_PORT=9345
|
||||||
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/graphql --output schema.json
|
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
|
||||||
git diff schema.json
|
git diff schema.json
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet">
|
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet">
|
||||||
|
<link data-trunk rel="css" href="static/site-specific.css" />
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ where
|
|||||||
{
|
{
|
||||||
use web_sys::RequestMode;
|
use web_sys::RequestMode;
|
||||||
|
|
||||||
Request::post("/graphql/")
|
Request::post("/api/graphql/")
|
||||||
.mode(RequestMode::Cors)
|
.mode(RequestMode::Cors)
|
||||||
.json(&body)?
|
.json(&body)?
|
||||||
.send()
|
.send()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
#![allow(clippy::wildcard_imports)]
|
#![allow(clippy::wildcard_imports)]
|
||||||
|
|
||||||
use log::Level;
|
use log::Level;
|
||||||
use seed::{prelude::wasm_bindgen, App};
|
use seed::App;
|
||||||
|
|
||||||
mod api;
|
mod api;
|
||||||
mod consts;
|
mod consts;
|
||||||
@@ -12,9 +12,7 @@ mod graphql;
|
|||||||
mod state;
|
mod state;
|
||||||
mod view;
|
mod view;
|
||||||
|
|
||||||
// (This function is invoked by `init` function in `index.html`.)
|
fn main() {
|
||||||
#[wasm_bindgen(start)]
|
|
||||||
pub fn start() {
|
|
||||||
// This provides better error messages in debug mode.
|
// This provides better error messages in debug mode.
|
||||||
// It's disabled in release mode so it doesn't bloat up the file size.
|
// It's disabled in release mode so it doesn't bloat up the file size.
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
189
web/src/state.rs
189
web/src/state.rs
@@ -1,9 +1,10 @@
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use graphql_client::GraphQLQuery;
|
use graphql_client::GraphQLQuery;
|
||||||
use log::{error, info};
|
use log::{debug, error, info, warn};
|
||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
use web_sys::HtmlElement;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api,
|
api,
|
||||||
@@ -27,6 +28,8 @@ pub fn unread_query() -> &'static str {
|
|||||||
|
|
||||||
// `init` describes what should happen when your app started.
|
// `init` describes what should happen when your app started.
|
||||||
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||||
|
let version = shared::build_version(bi);
|
||||||
|
info!("Build Info: {}", version);
|
||||||
if url.hash().is_none() {
|
if url.hash().is_none() {
|
||||||
orders.request_url(urls::search(unread_query(), 0));
|
orders.request_url(urls::search(unread_query(), 0));
|
||||||
} else {
|
} else {
|
||||||
@@ -37,12 +40,20 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|||||||
// 'notmuch new' on the server periodically?
|
// 'notmuch new' on the server periodically?
|
||||||
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||||
orders.subscribe(on_url_changed);
|
orders.subscribe(on_url_changed);
|
||||||
|
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
|
||||||
|
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
Model {
|
Model {
|
||||||
context: Context::None,
|
context: Context::None,
|
||||||
query: "".to_string(),
|
query: "".to_string(),
|
||||||
refreshing_state: RefreshingState::None,
|
refreshing_state: RefreshingState::None,
|
||||||
tags: None,
|
tags: None,
|
||||||
|
read_completion_ratio: 0.,
|
||||||
|
content_el: ElRef::<HtmlElement>::default(),
|
||||||
|
versions: Version {
|
||||||
|
client: version,
|
||||||
|
server: None,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,10 +119,16 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
} else {
|
} else {
|
||||||
RefreshingState::None
|
RefreshingState::None
|
||||||
};
|
};
|
||||||
orders.perform_cmd(async move { Msg::Reload });
|
orders.perform_cmd(async move { Msg::Refresh });
|
||||||
|
}
|
||||||
|
Msg::Refresh => {
|
||||||
|
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||||
}
|
}
|
||||||
Msg::Reload => {
|
Msg::Reload => {
|
||||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
window()
|
||||||
|
.location()
|
||||||
|
.reload()
|
||||||
|
.expect("failed to reload window");
|
||||||
}
|
}
|
||||||
Msg::OnResize => (),
|
Msg::OnResize => (),
|
||||||
|
|
||||||
@@ -238,6 +255,14 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
first,
|
first,
|
||||||
last,
|
last,
|
||||||
} => {
|
} => {
|
||||||
|
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
|
||||||
|
{
|
||||||
|
// If no pagination set, set reasonable defaults
|
||||||
|
(None, None, None, None) => {
|
||||||
|
(None, None, Some(SEARCH_RESULTS_PER_PAGE as i64), None)
|
||||||
|
}
|
||||||
|
_ => (after, before, first, last),
|
||||||
|
};
|
||||||
model.query = query.clone();
|
model.query = query.clone();
|
||||||
orders.skip().perform_cmd(async move {
|
orders.skip().perform_cmd(async move {
|
||||||
Msg::FrontPageResult(
|
Msg::FrontPageResult(
|
||||||
@@ -278,18 +303,37 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
.map(|t| Tag {
|
.map(|t| Tag {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
bg_color: t.bg_color,
|
bg_color: t.bg_color,
|
||||||
fg_color: t.fg_color,
|
|
||||||
unread: t.unread,
|
unread: t.unread,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
|
info!("pager {:#?}", data.search.page_info);
|
||||||
|
let selected_threads = 'context: {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
results,
|
||||||
|
selected_threads,
|
||||||
|
..
|
||||||
|
} = &model.context
|
||||||
|
{
|
||||||
|
let old: HashSet<_> = results.iter().map(|n| &n.thread).collect();
|
||||||
|
let new: HashSet<_> = data.search.nodes.iter().map(|n| &n.thread).collect();
|
||||||
|
|
||||||
|
if old == new {
|
||||||
|
break 'context selected_threads.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HashSet::new()
|
||||||
|
};
|
||||||
model.context = Context::SearchResult {
|
model.context = Context::SearchResult {
|
||||||
query: model.query.clone(),
|
query: model.query.clone(),
|
||||||
results: data.search.nodes,
|
results: data.search.nodes,
|
||||||
count: data.count as usize,
|
count: data.count as usize,
|
||||||
pager: data.search.page_info,
|
pager: data.search.page_info,
|
||||||
selected_threads: HashSet::new(),
|
selected_threads,
|
||||||
};
|
};
|
||||||
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
||||||
|
// Generate signal so progress bar is reset
|
||||||
|
orders.send_msg(Msg::WindowScrolled);
|
||||||
}
|
}
|
||||||
|
|
||||||
Msg::ShowThreadRequest { thread_id } => {
|
Msg::ShowThreadRequest { thread_id } => {
|
||||||
@@ -311,30 +355,37 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
.map(|t| Tag {
|
.map(|t| Tag {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
bg_color: t.bg_color,
|
bg_color: t.bg_color,
|
||||||
fg_color: t.fg_color,
|
|
||||||
unread: t.unread,
|
unread: t.unread,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
let mut open_messages: HashSet<_> = data
|
match &data.thread {
|
||||||
.thread
|
graphql::show_thread_query::ShowThreadQueryThread::EmailThread(
|
||||||
.messages
|
ShowThreadQueryThreadOnEmailThread { messages, .. },
|
||||||
.iter()
|
) => {
|
||||||
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
let mut open_messages: HashSet<_> = messages
|
||||||
.map(|msg| msg.id.clone())
|
.iter()
|
||||||
.collect();
|
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
||||||
if open_messages.is_empty() {
|
.map(|msg| msg.id.clone())
|
||||||
open_messages = data
|
.collect();
|
||||||
.thread
|
if open_messages.is_empty() {
|
||||||
.messages
|
open_messages = messages.iter().map(|msg| msg.id.clone()).collect();
|
||||||
.iter()
|
}
|
||||||
.map(|msg| msg.id.clone())
|
model.context = Context::ThreadResult {
|
||||||
.collect();
|
thread: data.thread,
|
||||||
|
open_messages,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
graphql::show_thread_query::ShowThreadQueryThread::NewsPost(..) => {
|
||||||
|
model.context = Context::ThreadResult {
|
||||||
|
thread: data.thread,
|
||||||
|
open_messages: HashSet::new(),
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
model.context = Context::ThreadResult {
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
||||||
thread: data.thread,
|
// Generate signal so progress bar is reset
|
||||||
open_messages,
|
orders.send_msg(Msg::WindowScrolled);
|
||||||
};
|
|
||||||
}
|
}
|
||||||
Msg::ShowThreadResult(bad) => {
|
Msg::ShowThreadResult(bad) => {
|
||||||
error!("show_thread_query error: {bad:#?}");
|
error!("show_thread_query error: {bad:#?}");
|
||||||
@@ -364,7 +415,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
{
|
{
|
||||||
let threads = selected_threads
|
let threads = selected_threads
|
||||||
.iter()
|
.iter()
|
||||||
.map(|tid| format!("thread:{tid}"))
|
.map(|tid| tid.to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" ");
|
.join(" ");
|
||||||
orders
|
orders
|
||||||
@@ -379,7 +430,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
{
|
{
|
||||||
let threads = selected_threads
|
let threads = selected_threads
|
||||||
.iter()
|
.iter()
|
||||||
.map(|tid| format!("thread:{tid}"))
|
.map(|tid| tid.to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" ");
|
.join(" ");
|
||||||
orders
|
orders
|
||||||
@@ -394,7 +445,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
{
|
{
|
||||||
let threads = selected_threads
|
let threads = selected_threads
|
||||||
.iter()
|
.iter()
|
||||||
.map(|tid| format!("thread:{tid}"))
|
.map(|tid| tid.to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" ");
|
.join(" ");
|
||||||
orders
|
orders
|
||||||
@@ -409,7 +460,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
{
|
{
|
||||||
let threads = selected_threads
|
let threads = selected_threads
|
||||||
.iter()
|
.iter()
|
||||||
.map(|tid| format!("thread:{tid}"))
|
.map(|tid| tid.to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" ");
|
.join(" ");
|
||||||
orders
|
orders
|
||||||
@@ -443,6 +494,67 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
open_messages.insert(id);
|
open_messages.insert(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)),
|
||||||
|
Msg::CopyToClipboard(text) => {
|
||||||
|
let clipboard = seed::window()
|
||||||
|
.navigator()
|
||||||
|
.clipboard()
|
||||||
|
.expect("couldn't get clipboard");
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
wasm_bindgen_futures::JsFuture::from(clipboard.write_text(&text))
|
||||||
|
.await
|
||||||
|
.expect("failed to copy to clipboard");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::WindowScrolled => {
|
||||||
|
if let Some(el) = model.content_el.get() {
|
||||||
|
let ih = window()
|
||||||
|
.inner_height()
|
||||||
|
.expect("window height")
|
||||||
|
.unchecked_into::<js_sys::Number>()
|
||||||
|
.value_of();
|
||||||
|
|
||||||
|
let r = el.get_bounding_client_rect();
|
||||||
|
if r.height() < ih {
|
||||||
|
// The whole content fits in the window, no scrollbar
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let end: f64 = r.height() - ih;
|
||||||
|
if end < 0. {
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Flip Y, normally it's 0-point when the top of the content hits the top of the
|
||||||
|
// screen and goes negative from there.
|
||||||
|
let y = -r.y();
|
||||||
|
let ratio: f64 = (y / end).max(0.);
|
||||||
|
debug!(
|
||||||
|
"WindowScrolled ih {ih} end {end} ratio {ratio:.02} {}x{} @ {},{}",
|
||||||
|
r.width(),
|
||||||
|
r.height(),
|
||||||
|
r.x(),
|
||||||
|
r.y()
|
||||||
|
);
|
||||||
|
|
||||||
|
orders.send_msg(Msg::SetProgress(ratio));
|
||||||
|
} else {
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SetProgress(ratio) => {
|
||||||
|
model.read_completion_ratio = ratio;
|
||||||
|
}
|
||||||
|
Msg::UpdateServerVersion(version) => {
|
||||||
|
if version != model.versions.client {
|
||||||
|
warn!(
|
||||||
|
"Server ({}) and client ({}) version mismatch, reloading",
|
||||||
|
version, model.versions.client
|
||||||
|
);
|
||||||
|
orders.send_msg(Msg::Reload);
|
||||||
|
}
|
||||||
|
model.versions.server = Some(version);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// `Model` describes our app state.
|
// `Model` describes our app state.
|
||||||
@@ -451,6 +563,15 @@ pub struct Model {
|
|||||||
pub context: Context,
|
pub context: Context,
|
||||||
pub refreshing_state: RefreshingState,
|
pub refreshing_state: RefreshingState,
|
||||||
pub tags: Option<Vec<Tag>>,
|
pub tags: Option<Vec<Tag>>,
|
||||||
|
pub read_completion_ratio: f64,
|
||||||
|
pub content_el: ElRef<HtmlElement>,
|
||||||
|
pub versions: Version,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Version {
|
||||||
|
pub client: String,
|
||||||
|
pub server: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
@@ -484,7 +605,6 @@ pub enum Context {
|
|||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub bg_color: String,
|
pub bg_color: String,
|
||||||
pub fg_color: String,
|
|
||||||
pub unread: i64,
|
pub unread: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -498,6 +618,8 @@ pub enum RefreshingState {
|
|||||||
pub enum Msg {
|
pub enum Msg {
|
||||||
Noop,
|
Noop,
|
||||||
// Tell the client to refresh its state
|
// Tell the client to refresh its state
|
||||||
|
Refresh,
|
||||||
|
// Tell the client to reload whole page from server
|
||||||
Reload,
|
Reload,
|
||||||
// Window has changed size
|
// Window has changed size
|
||||||
OnResize,
|
OnResize,
|
||||||
@@ -533,6 +655,8 @@ pub enum Msg {
|
|||||||
SelectionSetNone,
|
SelectionSetNone,
|
||||||
SelectionSetAll,
|
SelectionSetAll,
|
||||||
SelectionAddTag(String),
|
SelectionAddTag(String),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
// TODO
|
||||||
SelectionRemoveTag(String),
|
SelectionRemoveTag(String),
|
||||||
SelectionMarkAsRead,
|
SelectionMarkAsRead,
|
||||||
SelectionMarkAsUnread,
|
SelectionMarkAsUnread,
|
||||||
@@ -541,4 +665,11 @@ pub enum Msg {
|
|||||||
|
|
||||||
MessageCollapse(String),
|
MessageCollapse(String),
|
||||||
MessageExpand(String),
|
MessageExpand(String),
|
||||||
|
MultiMsg(Vec<Msg>),
|
||||||
|
|
||||||
|
CopyToClipboard(String),
|
||||||
|
|
||||||
|
WindowScrolled,
|
||||||
|
SetProgress(f64),
|
||||||
|
UpdateServerVersion(String),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,23 +1,26 @@
|
|||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
use seed_hooks::{state_access::CloneState, topo, use_state};
|
use seed_hooks::topo;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::urls,
|
graphql::show_thread_query::*,
|
||||||
state::{Context, Model, Msg, Tag},
|
state::{Context, Model, Msg},
|
||||||
view::{self, view_header, view_search_results},
|
view::{self, reading_progress, view_header, view_search_results},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[topo::nested]
|
#[topo::nested]
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
log::info!("tablet::view");
|
|
||||||
let show_icon_text = true;
|
let show_icon_text = true;
|
||||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::ThreadResult {
|
Context::ThreadResult {
|
||||||
thread,
|
thread: ShowThreadQueryThread::EmailThread(thread),
|
||||||
open_messages,
|
open_messages,
|
||||||
} => view::thread(thread, open_messages, show_icon_text),
|
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
|
||||||
|
Context::ThreadResult {
|
||||||
|
thread: ShowThreadQueryThread::NewsPost(post),
|
||||||
|
..
|
||||||
|
} => view::news_post(post, show_icon_text, &model.content_el),
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
@@ -33,100 +36,10 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
show_icon_text,
|
show_icon_text,
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
|
|
||||||
let href = if search_unread {
|
|
||||||
urls::search(&format!("is:unread tag:{}", t.name), 0)
|
|
||||||
} else {
|
|
||||||
urls::search(&format!("tag:{}", t.name), 0)
|
|
||||||
};
|
|
||||||
li![a![
|
|
||||||
attrs! {
|
|
||||||
At::Href => href
|
|
||||||
},
|
|
||||||
(0..indent).map(|_| span![C!["tag-indent"], ""]),
|
|
||||||
i![
|
|
||||||
C!["tag-tag", "fa-solid", "fa-tag"],
|
|
||||||
style! {
|
|
||||||
//"--fa-primary-color" => t.fg_color,
|
|
||||||
St::Color => t.bg_color,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
display_name,
|
|
||||||
IF!(t.unread>0 => format!(" ({})", t.unread))
|
|
||||||
]]
|
|
||||||
}
|
|
||||||
fn matches(a: &[&str], b: &[&str]) -> usize {
|
|
||||||
std::iter::zip(a.iter(), b.iter())
|
|
||||||
.take_while(|(a, b)| a == b)
|
|
||||||
.count()
|
|
||||||
}
|
|
||||||
fn view_tag_list<'a>(
|
|
||||||
tags: impl Iterator<Item = &'a Tag>,
|
|
||||||
search_unread: bool,
|
|
||||||
) -> Vec<Node<Msg>> {
|
|
||||||
let mut lis = Vec::new();
|
|
||||||
let mut last = Vec::new();
|
|
||||||
for t in tags {
|
|
||||||
let parts: Vec<_> = t.name.split('/').collect();
|
|
||||||
let mut n = matches(&last, &parts);
|
|
||||||
if n <= parts.len() - 2 && parts.len() > 1 {
|
|
||||||
// Synthesize fake tags for proper indenting.
|
|
||||||
for i in n..parts.len() - 1 {
|
|
||||||
let display_name = parts[n];
|
|
||||||
lis.push(view_tag_li(
|
|
||||||
&display_name,
|
|
||||||
n,
|
|
||||||
&Tag {
|
|
||||||
name: parts[..i + 1].join("/"),
|
|
||||||
bg_color: "#fff".to_string(),
|
|
||||||
fg_color: "#000".to_string(),
|
|
||||||
unread: 0,
|
|
||||||
},
|
|
||||||
search_unread,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
n = parts.len() - 1;
|
|
||||||
}
|
|
||||||
let display_name = parts[n];
|
|
||||||
lis.push(view_tag_li(&display_name, n, t, search_unread));
|
|
||||||
last = parts;
|
|
||||||
}
|
|
||||||
lis
|
|
||||||
}
|
|
||||||
let unread = model
|
|
||||||
.tags
|
|
||||||
.as_ref()
|
|
||||||
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
|
|
||||||
.unwrap_or(Vec::new());
|
|
||||||
let tags_open = use_state(|| false);
|
|
||||||
let force_tags_open = unread.is_empty();
|
|
||||||
div![
|
div![
|
||||||
C!["main-content"],
|
C!["main-content"],
|
||||||
aside![
|
reading_progress(model.read_completion_ratio),
|
||||||
C!["tags-menu", "menu"],
|
div![view::tags(model), view::versions(&model.versions)],
|
||||||
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
|
||||||
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter(),true)]),
|
|
||||||
p![
|
|
||||||
C!["menu-label"],
|
|
||||||
IF!(!force_tags_open =>
|
|
||||||
i![C![
|
|
||||||
"fa-solid",
|
|
||||||
if tags_open.get() {
|
|
||||||
"fa-angle-up"
|
|
||||||
} else {
|
|
||||||
"fa-angle-down"
|
|
||||||
}
|
|
||||||
]]),
|
|
||||||
" Tags",
|
|
||||||
ev(Ev::Click, move |_| {
|
|
||||||
tags_open.set(!tags_open.get());
|
|
||||||
})
|
|
||||||
],
|
|
||||||
ul![
|
|
||||||
C!["menu-list"],
|
|
||||||
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter(),false))),
|
|
||||||
]
|
|
||||||
],
|
|
||||||
div![
|
div![
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
content,
|
content,
|
||||||
|
|||||||
@@ -4,20 +4,26 @@ use seed::{prelude::*, *};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::urls,
|
api::urls,
|
||||||
graphql::front_page_query::*,
|
graphql::{front_page_query::*, show_thread_query::*},
|
||||||
state::{Context, Model, Msg},
|
state::{Context, Model, Msg},
|
||||||
view::{self, human_age, pretty_authors, search_toolbar, set_title, tags_chiclet, view_header},
|
view::{
|
||||||
|
self, human_age, pretty_authors, reading_progress, search_toolbar, set_title, tags_chiclet,
|
||||||
|
view_header,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
log::info!("tablet::view");
|
|
||||||
let show_icon_text = false;
|
let show_icon_text = false;
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::ThreadResult {
|
Context::ThreadResult {
|
||||||
thread,
|
thread: ShowThreadQueryThread::EmailThread(thread),
|
||||||
open_messages,
|
open_messages,
|
||||||
} => view::thread(thread, open_messages, show_icon_text),
|
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
|
||||||
|
Context::ThreadResult {
|
||||||
|
thread: ShowThreadQueryThread::NewsPost(post),
|
||||||
|
..
|
||||||
|
} => view::news_post(post, show_icon_text, &model.content_el),
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
@@ -34,9 +40,11 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
div![
|
div![
|
||||||
|
reading_progress(model.read_completion_ratio),
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
content,
|
content,
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
|
div![view::tags(model), view::versions(&model.versions)]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
use std::{
|
use std::collections::HashSet;
|
||||||
collections::{hash_map::DefaultHasher, HashSet},
|
|
||||||
hash::{Hash, Hasher},
|
|
||||||
};
|
|
||||||
|
|
||||||
use chrono::{DateTime, Datelike, Duration, Local, Utc};
|
use chrono::{DateTime, Datelike, Duration, Local, Utc};
|
||||||
use human_format::{Formatter, Scales};
|
use human_format::{Formatter, Scales};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::error;
|
use log::{debug, error, info};
|
||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
use seed_hooks::{state_access::CloneState, topo, use_state};
|
use seed_hooks::{state_access::CloneState, topo, use_state};
|
||||||
|
use shared::compute_color;
|
||||||
|
use web_sys::HtmlElement;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::urls,
|
api::urls,
|
||||||
consts::SEARCH_RESULTS_PER_PAGE,
|
|
||||||
graphql::{front_page_query::*, show_thread_query::*},
|
graphql::{front_page_query::*, show_thread_query::*},
|
||||||
state::{unread_query, Model, Msg, RefreshingState},
|
state::{unread_query, Model, Msg, RefreshingState, Tag},
|
||||||
};
|
};
|
||||||
|
|
||||||
mod desktop;
|
mod desktop;
|
||||||
@@ -29,35 +27,17 @@ fn set_title(title: &str) {
|
|||||||
seed::document().set_title(&format!("lb: {}", title));
|
seed::document().set_title(&format!("lb: {}", title));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_color(data: &str) -> String {
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
data.hash(&mut hasher);
|
|
||||||
format!("#{:06x}", hasher.finish() % (1 << 24))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
|
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
|
||||||
tags.iter().map(move |tag| {
|
tags.iter().map(move |tag| {
|
||||||
let hex = compute_color(tag);
|
let hex = compute_color(tag);
|
||||||
let style = style! {St::BackgroundColor=>hex};
|
let style = style! {St::BackgroundColor=>hex};
|
||||||
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
||||||
let tag = tag.clone();
|
let tag = tag.clone();
|
||||||
a![
|
a![match tag.as_str() {
|
||||||
attrs! {
|
"attachment" => span![classes, style, "📎"],
|
||||||
At::Href => urls::search(&format!("tag:{tag}"), 0)
|
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
||||||
},
|
_ => span![classes, style, &tag],
|
||||||
match tag.as_str() {
|
},]
|
||||||
"attachment" => span![classes, style, "📎"],
|
|
||||||
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
|
||||||
_ => span![classes, style, &tag],
|
|
||||||
},
|
|
||||||
ev(Ev::Click, move |_| Msg::FrontPageRequest {
|
|
||||||
query: format!("tag:{tag}"),
|
|
||||||
after: None,
|
|
||||||
before: None,
|
|
||||||
first: None,
|
|
||||||
last: None,
|
|
||||||
})
|
|
||||||
]
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,6 +54,7 @@ fn removable_tags_chiclet<'a>(
|
|||||||
"is-grouped-multiline"
|
"is-grouped-multiline"
|
||||||
],
|
],
|
||||||
tags.iter().map(move |tag| {
|
tags.iter().map(move |tag| {
|
||||||
|
let thread_id = thread_id.to_string();
|
||||||
let hex = compute_color(tag);
|
let hex = compute_color(tag);
|
||||||
let style = style! {St::BackgroundColor=>hex};
|
let style = style! {St::BackgroundColor=>hex};
|
||||||
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
||||||
@@ -82,7 +63,6 @@ fn removable_tags_chiclet<'a>(
|
|||||||
};
|
};
|
||||||
let tag = tag.clone();
|
let tag = tag.clone();
|
||||||
let rm_tag = tag.clone();
|
let rm_tag = tag.clone();
|
||||||
let thread_id = format!("thread:{thread_id}");
|
|
||||||
div![
|
div![
|
||||||
C!["control"],
|
C!["control"],
|
||||||
div![
|
div![
|
||||||
@@ -123,14 +103,16 @@ fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
|
|||||||
if one_person {
|
if one_person {
|
||||||
return Some(span![
|
return Some(span![
|
||||||
attrs! {
|
attrs! {
|
||||||
At::Title => author.trim()},
|
At::Title => author.trim()
|
||||||
|
},
|
||||||
author
|
author
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
author.split_whitespace().nth(0).map(|first| {
|
author.split_whitespace().nth(0).map(|first| {
|
||||||
span![
|
span![
|
||||||
attrs! {
|
attrs! {
|
||||||
At::Title => author.trim()},
|
At::Title => author.trim()
|
||||||
|
},
|
||||||
first
|
first
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
@@ -147,7 +129,7 @@ fn human_age(timestamp: i64) -> String {
|
|||||||
.with_timezone(&Local);
|
.with_timezone(&Local);
|
||||||
let age = now - ts;
|
let age = now - ts;
|
||||||
let datetime = if age < Duration::minutes(1) {
|
let datetime = if age < Duration::minutes(1) {
|
||||||
format!("{} min. ago", age.num_seconds())
|
format!("{} secs. ago", age.num_seconds())
|
||||||
} else if age < Duration::hours(1) {
|
} else if age < Duration::hours(1) {
|
||||||
format!("{} min. ago", age.num_minutes())
|
format!("{} min. ago", age.num_minutes())
|
||||||
} else if ts.date_naive() == now.date_naive() {
|
} else if ts.date_naive() == now.date_naive() {
|
||||||
@@ -233,10 +215,10 @@ fn view_search_results(
|
|||||||
],
|
],
|
||||||
td![
|
td![
|
||||||
C!["subject"],
|
C!["subject"],
|
||||||
tags_chiclet(&tags, false),
|
|
||||||
" ",
|
|
||||||
a![
|
a![
|
||||||
C!["has-text-light", "text"],
|
tags_chiclet(&tags, false),
|
||||||
|
" ",
|
||||||
|
C!["has-text-light", "text", "subject-link"],
|
||||||
attrs! {
|
attrs! {
|
||||||
At::Href => urls::thread(&tid)
|
At::Href => urls::thread(&tid)
|
||||||
},
|
},
|
||||||
@@ -304,31 +286,14 @@ fn search_toolbar(
|
|||||||
show_bulk_edit: bool,
|
show_bulk_edit: bool,
|
||||||
show_icon_text: bool,
|
show_icon_text: bool,
|
||||||
) -> Node<Msg> {
|
) -> Node<Msg> {
|
||||||
let start = pager
|
|
||||||
.start_cursor
|
|
||||||
.as_ref()
|
|
||||||
.map(|i| i.parse().unwrap_or(0))
|
|
||||||
.unwrap_or(0);
|
|
||||||
nav![
|
nav![
|
||||||
C!["level", "is-mobile"],
|
C!["level", "is-mobile"],
|
||||||
IF!(show_bulk_edit =>
|
div![
|
||||||
|
C!["level-left"],
|
||||||
|
IF!(show_bulk_edit =>
|
||||||
div![
|
div![
|
||||||
C!["level-left"],
|
C!["level-item"],
|
||||||
div![
|
div![C!["buttons", "has-addons"],
|
||||||
C!["level-item"],
|
|
||||||
div![C!["buttons", "has-addons"],
|
|
||||||
button![
|
|
||||||
C!["button", "spam"],
|
|
||||||
attrs!{At::Title => "Mark as spam"},
|
|
||||||
span![C!["icon", "is-small"], i![C!["far", "fa-hand"]]],
|
|
||||||
IF!(show_icon_text=>span!["Spam"]),
|
|
||||||
ev(Ev::Click, |_| Msg::SelectionAddTag("Spam".to_string()))
|
|
||||||
],
|
|
||||||
],
|
|
||||||
],
|
|
||||||
div![
|
|
||||||
C!["level-item"],
|
|
||||||
div![C!["buttons", "has-addons"],
|
|
||||||
button![
|
button![
|
||||||
C!["button", "mark-read"],
|
C!["button", "mark-read"],
|
||||||
attrs!{At::Title => "Mark as read"},
|
attrs!{At::Title => "Mark as read"},
|
||||||
@@ -344,8 +309,26 @@ fn search_toolbar(
|
|||||||
ev(Ev::Click, |_| Msg::SelectionMarkAsUnread)
|
ev(Ev::Click, |_| Msg::SelectionMarkAsUnread)
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
]
|
]),
|
||||||
]),
|
IF!(show_bulk_edit =>
|
||||||
|
div![
|
||||||
|
C!["level-item"],
|
||||||
|
div![C!["buttons", "has-addons"],
|
||||||
|
button![
|
||||||
|
C!["button", "spam"],
|
||||||
|
attrs!{At::Title => "Mark as spam"},
|
||||||
|
span![C!["icon", "is-small"], i![C!["far", "fa-hand"]]],
|
||||||
|
IF!(show_icon_text=>span!["Spam"]),
|
||||||
|
ev(Ev::Click, |_|
|
||||||
|
Msg::MultiMsg(vec![
|
||||||
|
Msg::SelectionAddTag("Spam".to_string()),
|
||||||
|
Msg::SelectionMarkAsRead
|
||||||
|
])
|
||||||
|
)
|
||||||
|
],
|
||||||
|
],
|
||||||
|
])
|
||||||
|
],
|
||||||
div![
|
div![
|
||||||
C!["level-right"],
|
C!["level-right"],
|
||||||
nav![
|
nav![
|
||||||
@@ -370,15 +353,7 @@ fn search_toolbar(
|
|||||||
">",
|
">",
|
||||||
IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage))
|
IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage))
|
||||||
],
|
],
|
||||||
ul![
|
ul![C!["pagination-list"], li![format!("{count} results")],],
|
||||||
C!["pagination-list"],
|
|
||||||
li![format!(
|
|
||||||
"{} - {} of {}",
|
|
||||||
start,
|
|
||||||
count.min(start + SEARCH_RESULTS_PER_PAGE),
|
|
||||||
count
|
|
||||||
)],
|
|
||||||
],
|
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
@@ -412,9 +387,9 @@ macro_rules! implement_email {
|
|||||||
}
|
}
|
||||||
|
|
||||||
implement_email!(
|
implement_email!(
|
||||||
ShowThreadQueryThreadMessagesTo,
|
ShowThreadQueryThreadOnEmailThreadMessagesTo,
|
||||||
ShowThreadQueryThreadMessagesCc,
|
ShowThreadQueryThreadOnEmailThreadMessagesCc,
|
||||||
ShowThreadQueryThreadMessagesFrom
|
ShowThreadQueryThreadOnEmailThreadMessagesFrom
|
||||||
);
|
);
|
||||||
|
|
||||||
fn raw_text_message(contents: &str) -> Node<Msg> {
|
fn raw_text_message(contents: &str) -> Node<Msg> {
|
||||||
@@ -436,8 +411,10 @@ fn has_unread(tags: &[String]) -> bool {
|
|||||||
fn render_avatar(avatar: Option<String>, from: &str) -> Node<Msg> {
|
fn render_avatar(avatar: Option<String>, from: &str) -> Node<Msg> {
|
||||||
let initials: String = from
|
let initials: String = from
|
||||||
.to_lowercase()
|
.to_lowercase()
|
||||||
|
.trim()
|
||||||
.split(" ")
|
.split(" ")
|
||||||
.map(|word| word.chars().next().unwrap())
|
.map(|word| word.chars().next().unwrap())
|
||||||
|
.filter(|c| c.is_alphanumeric())
|
||||||
// Limit to 2 characters because more characters don't fit in the box
|
// Limit to 2 characters because more characters don't fit in the box
|
||||||
.take(2)
|
.take(2)
|
||||||
.collect();
|
.collect();
|
||||||
@@ -483,13 +460,13 @@ fn render_avatar(avatar: Option<String>, from: &str) -> Node<Msg> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
fn render_open_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<Msg> {
|
||||||
let (from, from_detail) = match &msg.from {
|
let (from, from_detail) = match &msg.from {
|
||||||
Some(ShowThreadQueryThreadMessagesFrom {
|
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
|
||||||
name: Some(name),
|
name: Some(name),
|
||||||
addr,
|
addr,
|
||||||
}) => (name.to_string(), addr.clone()),
|
}) => (name.to_string(), addr.clone()),
|
||||||
Some(ShowThreadQueryThreadMessagesFrom {
|
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
|
||||||
addr: Some(addr), ..
|
addr: Some(addr), ..
|
||||||
}) => (addr.to_string(), None),
|
}) => (addr.to_string(), None),
|
||||||
_ => (String::from("UNKNOWN"), None),
|
_ => (String::from("UNKNOWN"), None),
|
||||||
@@ -510,7 +487,17 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
p![
|
p![
|
||||||
strong![from],
|
strong![from],
|
||||||
br![],
|
br![],
|
||||||
small![from_detail],
|
small![
|
||||||
|
&from_detail,
|
||||||
|
" ",
|
||||||
|
from_detail.map(|detail| span![
|
||||||
|
i![C!["far", "fa-clone"]],
|
||||||
|
ev(Ev::Click, move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::CopyToClipboard(detail.to_string())
|
||||||
|
})
|
||||||
|
])
|
||||||
|
],
|
||||||
table![
|
table![
|
||||||
IF!(!msg.to.is_empty() =>
|
IF!(!msg.to.is_empty() =>
|
||||||
tr![
|
tr![
|
||||||
@@ -520,19 +507,31 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
msg.to.iter().enumerate().map(|(i, to)|
|
msg.to.iter().enumerate().map(|(i, to)|
|
||||||
small![
|
small![
|
||||||
if i>0 { ", " }else { "" },
|
if i>0 { ", " }else { "" },
|
||||||
match to {
|
{
|
||||||
ShowThreadQueryThreadMessagesTo {
|
let to = match to {
|
||||||
name: Some(name),
|
ShowThreadQueryThreadOnEmailThreadMessagesTo {
|
||||||
addr:Some(addr),
|
name: Some(name),
|
||||||
} => format!("{name} <{addr}>"),
|
addr:Some(addr),
|
||||||
ShowThreadQueryThreadMessagesTo {
|
} => format!("{name} <{addr}>"),
|
||||||
name: Some(name),
|
ShowThreadQueryThreadOnEmailThreadMessagesTo {
|
||||||
addr:None
|
name: Some(name),
|
||||||
} => format!("{name}"),
|
addr:None
|
||||||
ShowThreadQueryThreadMessagesTo {
|
} => format!("{name}"),
|
||||||
addr: Some(addr), ..
|
ShowThreadQueryThreadOnEmailThreadMessagesTo {
|
||||||
} => format!("{addr}"),
|
addr: Some(addr), ..
|
||||||
_ => String::from("UNKNOWN"),
|
} => format!("{addr}"),
|
||||||
|
_ => String::from("UNKNOWN"),
|
||||||
|
};
|
||||||
|
span![
|
||||||
|
&to, " ",
|
||||||
|
span![
|
||||||
|
i![C!["far", "fa-clone"]],
|
||||||
|
ev(Ev::Click, move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::CopyToClipboard(to)
|
||||||
|
})
|
||||||
|
]
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
])
|
])
|
||||||
@@ -545,21 +544,32 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
msg.cc.iter().enumerate().map(|(i, cc)|
|
msg.cc.iter().enumerate().map(|(i, cc)|
|
||||||
small![
|
small![
|
||||||
if i>0 { ", " }else { "" },
|
if i>0 { ", " }else { "" },
|
||||||
match cc {
|
{
|
||||||
ShowThreadQueryThreadMessagesCc {
|
let cc = match cc {
|
||||||
name: Some(name),
|
ShowThreadQueryThreadOnEmailThreadMessagesCc {
|
||||||
addr:Some(addr),
|
name: Some(name),
|
||||||
} => format!("{name} <{addr}>"),
|
addr:Some(addr),
|
||||||
ShowThreadQueryThreadMessagesCc {
|
} => format!("{name} <{addr}>"),
|
||||||
name: Some(name),
|
ShowThreadQueryThreadOnEmailThreadMessagesCc {
|
||||||
addr:None
|
name: Some(name),
|
||||||
} => format!("{name}"),
|
addr:None
|
||||||
ShowThreadQueryThreadMessagesCc {
|
} => format!("{name}"),
|
||||||
addr: Some(addr), ..
|
ShowThreadQueryThreadOnEmailThreadMessagesCc {
|
||||||
} => format!("<{addr}>"),
|
addr: Some(addr), ..
|
||||||
_ => String::from("UNKNOWN"),
|
} => format!("<{addr}>"),
|
||||||
|
_ => String::from("UNKNOWN"),
|
||||||
|
};
|
||||||
|
span![
|
||||||
|
&cc, " ",
|
||||||
|
span![
|
||||||
|
i![C!["far", "fa-clone"]],
|
||||||
|
ev(Ev::Click, move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::CopyToClipboard(cc)
|
||||||
|
})
|
||||||
|
]
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
])
|
])
|
||||||
]
|
]
|
||||||
]),
|
]),
|
||||||
@@ -586,18 +596,18 @@ fn render_open_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
],
|
],
|
||||||
ev(Ev::Click, move |e| {
|
ev(Ev::Click, move |e| {
|
||||||
e.stop_propagation();
|
e.stop_propagation();
|
||||||
Msg::SetUnread(format!("id:{id}"), !is_unread)
|
Msg::SetUnread(id, !is_unread)
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
fn render_closed_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Node<Msg> {
|
||||||
let from: String = match &msg.from {
|
let from: String = match &msg.from {
|
||||||
Some(ShowThreadQueryThreadMessagesFrom {
|
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
|
||||||
name: Some(name), ..
|
name: Some(name), ..
|
||||||
}) => name.to_string(),
|
}) => name.to_string(),
|
||||||
Some(ShowThreadQueryThreadMessagesFrom {
|
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
|
||||||
addr: Some(addr), ..
|
addr: Some(addr), ..
|
||||||
}) => addr.to_string(),
|
}) => addr.to_string(),
|
||||||
_ => String::from("UNKNOWN"),
|
_ => String::from("UNKNOWN"),
|
||||||
@@ -658,7 +668,7 @@ fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
],
|
],
|
||||||
ev(Ev::Click, move |e| {
|
ev(Ev::Click, move |e| {
|
||||||
e.stop_propagation();
|
e.stop_propagation();
|
||||||
Msg::SetUnread(format!("id:{id}"), !is_unread)
|
Msg::SetUnread(id, !is_unread)
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
@@ -666,7 +676,7 @@ fn render_closed_header(msg: &ShowThreadQueryThreadMessages) -> Node<Msg> {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg> {
|
fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool) -> Node<Msg> {
|
||||||
let expand_id = msg.id.clone();
|
let expand_id = msg.id.clone();
|
||||||
div![
|
div![
|
||||||
C!["message"],
|
C!["message"],
|
||||||
@@ -688,18 +698,18 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
|
|||||||
],
|
],
|
||||||
IF!(open =>
|
IF!(open =>
|
||||||
div![
|
div![
|
||||||
C!["body"],
|
C!["body", "mail"],
|
||||||
match &msg.body {
|
match &msg.body {
|
||||||
ShowThreadQueryThreadMessagesBody::UnhandledContentType(
|
ShowThreadQueryThreadOnEmailThreadMessagesBody::UnhandledContentType(
|
||||||
ShowThreadQueryThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
|
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
|
||||||
) => div![
|
) => div![
|
||||||
raw_text_message(&contents),
|
raw_text_message(&contents),
|
||||||
div![C!["error"],
|
div![C!["error"],
|
||||||
view_content_tree(&content_tree),
|
view_content_tree(&content_tree),
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
ShowThreadQueryThreadMessagesBody::PlainText(
|
ShowThreadQueryThreadOnEmailThreadMessagesBody::PlainText(
|
||||||
ShowThreadQueryThreadMessagesBodyOnPlainText {
|
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnPlainText {
|
||||||
contents,
|
contents,
|
||||||
content_tree,
|
content_tree,
|
||||||
},
|
},
|
||||||
@@ -707,8 +717,8 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
|
|||||||
raw_text_message(&contents),
|
raw_text_message(&contents),
|
||||||
view_content_tree(&content_tree),
|
view_content_tree(&content_tree),
|
||||||
],
|
],
|
||||||
ShowThreadQueryThreadMessagesBody::Html(
|
ShowThreadQueryThreadOnEmailThreadMessagesBody::Html(
|
||||||
ShowThreadQueryThreadMessagesBodyOnHtml {
|
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnHtml {
|
||||||
contents,
|
contents,
|
||||||
content_tree,
|
content_tree,
|
||||||
},
|
},
|
||||||
@@ -727,7 +737,7 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
|
|||||||
let default = "UNKNOWN_FILE".to_string();
|
let default = "UNKNOWN_FILE".to_string();
|
||||||
let filename = a.filename.as_ref().unwrap_or(&default);
|
let filename = a.filename.as_ref().unwrap_or(&default);
|
||||||
let host = seed::window().location().host().expect("couldn't get host");
|
let host = seed::window().location().host().expect("couldn't get host");
|
||||||
let url = format!("//{host}/download/attachment/{}/{}/{}", a.id,a.idx, filename);
|
let url = shared::urls::download_attachment(Some(&host), &a.id, &a.idx, filename);
|
||||||
let mut fmtr = Formatter::new();
|
let mut fmtr = Formatter::new();
|
||||||
fmtr.with_separator(" ");
|
fmtr.with_separator(" ");
|
||||||
fmtr.with_scales(Scales::Binary());
|
fmtr.with_scales(Scales::Binary());
|
||||||
@@ -775,9 +785,10 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
|
|||||||
|
|
||||||
#[topo::nested]
|
#[topo::nested]
|
||||||
fn thread(
|
fn thread(
|
||||||
thread: &ShowThreadQueryThread,
|
thread: &ShowThreadQueryThreadOnEmailThread,
|
||||||
open_messages: &HashSet<String>,
|
open_messages: &HashSet<String>,
|
||||||
show_icon_text: bool,
|
show_icon_text: bool,
|
||||||
|
content_el: &ElRef<HtmlElement>,
|
||||||
) -> Node<Msg> {
|
) -> Node<Msg> {
|
||||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||||
let subject = if thread.subject.is_empty() {
|
let subject = if thread.subject.is_empty() {
|
||||||
@@ -802,7 +813,8 @@ fn thread(
|
|||||||
});
|
});
|
||||||
let read_thread_id = thread.thread_id.clone();
|
let read_thread_id = thread.thread_id.clone();
|
||||||
let unread_thread_id = thread.thread_id.clone();
|
let unread_thread_id = thread.thread_id.clone();
|
||||||
let spam_thread_id = thread.thread_id.clone();
|
let spam_add_thread_id = thread.thread_id.clone();
|
||||||
|
let spam_unread_thread_id = thread.thread_id.clone();
|
||||||
div![
|
div![
|
||||||
C!["thread"],
|
C!["thread"],
|
||||||
h3![C!["is-size-5"], subject],
|
h3![C!["is-size-5"], subject],
|
||||||
@@ -817,14 +829,18 @@ fn thread(
|
|||||||
div![
|
div![
|
||||||
C!["buttons", "has-addons"],
|
C!["buttons", "has-addons"],
|
||||||
button![
|
button![
|
||||||
C!["button", "spam"],
|
C!["button", "mark-read"],
|
||||||
attrs! {At::Title => "Spam"},
|
attrs! {At::Title => "Mark as read"},
|
||||||
span![C!["icon", "is-small"], i![C!["far", "fa-hand"]]],
|
span![C!["icon", "is-small"], i![C!["far", "fa-envelope-open"]]],
|
||||||
IF!(show_icon_text=>span!["Spam"]),
|
IF!(show_icon_text=>span!["Read"]),
|
||||||
ev(Ev::Click, move |_| Msg::AddTag(
|
ev(Ev::Click, move |_| Msg::SetUnread(read_thread_id, false)),
|
||||||
format!("thread:{spam_thread_id}"),
|
],
|
||||||
"Spam".to_string()
|
button![
|
||||||
)),
|
C!["button", "mark-unread"],
|
||||||
|
attrs! {At::Title => "Mark as unread"},
|
||||||
|
span![C!["icon", "is-small"], i![C!["far", "fa-envelope"]]],
|
||||||
|
IF!(show_icon_text=>span!["Unread"]),
|
||||||
|
ev(Ev::Click, move |_| Msg::SetUnread(unread_thread_id, true)),
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
@@ -833,29 +849,19 @@ fn thread(
|
|||||||
div![
|
div![
|
||||||
C!["buttons", "has-addons"],
|
C!["buttons", "has-addons"],
|
||||||
button![
|
button![
|
||||||
C!["button", "mark-read"],
|
C!["button", "spam"],
|
||||||
attrs! {At::Title => "Mark as read"},
|
attrs! {At::Title => "Spam"},
|
||||||
span![C!["icon", "is-small"], i![C!["far", "fa-envelope-open"]]],
|
span![C!["icon", "is-small"], i![C!["far", "fa-hand"]]],
|
||||||
IF!(show_icon_text=>span!["Read"]),
|
IF!(show_icon_text=>span!["Spam"]),
|
||||||
ev(Ev::Click, move |_| Msg::SetUnread(
|
ev(Ev::Click, move |_| Msg::MultiMsg(vec![
|
||||||
format!("thread:{read_thread_id}"),
|
Msg::AddTag(spam_add_thread_id, "Spam".to_string()),
|
||||||
false
|
Msg::SetUnread(spam_unread_thread_id, false)
|
||||||
)),
|
])),
|
||||||
],
|
|
||||||
button![
|
|
||||||
C!["button", "mark-unread"],
|
|
||||||
attrs! {At::Title => "Mark as unread"},
|
|
||||||
span![C!["icon", "is-small"], i![C!["far", "fa-envelope"]]],
|
|
||||||
IF!(show_icon_text=>span!["Unread"]),
|
|
||||||
ev(Ev::Click, move |_| Msg::SetUnread(
|
|
||||||
format!("thread:{unread_thread_id}"),
|
|
||||||
true
|
|
||||||
)),
|
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
messages,
|
div![el_ref(content_el), messages, click_to_top()],
|
||||||
/* TODO(wathiede): plumb in orignal id
|
/* TODO(wathiede): plumb in orignal id
|
||||||
a![
|
a![
|
||||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||||
@@ -964,3 +970,259 @@ pub fn view(model: &Model) -> Node<Msg> {
|
|||||||
_ => div![C!["desktop"], desktop::view(model)],
|
_ => div![C!["desktop"], desktop::view(model)],
|
||||||
},]
|
},]
|
||||||
}
|
}
|
||||||
|
pub fn tags(model: &Model) -> Node<Msg> {
|
||||||
|
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
|
||||||
|
let href = if search_unread {
|
||||||
|
urls::search(&format!("is:unread tag:{}", t.name), 0)
|
||||||
|
} else {
|
||||||
|
urls::search(&format!("tag:{}", t.name), 0)
|
||||||
|
};
|
||||||
|
li![a![
|
||||||
|
attrs! {
|
||||||
|
At::Href => href
|
||||||
|
},
|
||||||
|
(0..indent).map(|_| span![C!["tag-indent"], ""]),
|
||||||
|
i![
|
||||||
|
C!["tag-tag", "fa-solid", "fa-tag"],
|
||||||
|
style! {
|
||||||
|
//"--fa-primary-color" => t.fg_color,
|
||||||
|
St::Color => t.bg_color,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
display_name,
|
||||||
|
IF!(t.unread>0 => format!(" ({})", t.unread)),
|
||||||
|
ev(Ev::Click, |_| {
|
||||||
|
// Scroll window to the top when searching for a tag.
|
||||||
|
info!("scrolling to the top because you clicked a tag");
|
||||||
|
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||||
|
})
|
||||||
|
]]
|
||||||
|
}
|
||||||
|
fn matches(a: &[&str], b: &[&str]) -> usize {
|
||||||
|
std::iter::zip(a.iter(), b.iter())
|
||||||
|
.take_while(|(a, b)| a == b)
|
||||||
|
.count()
|
||||||
|
}
|
||||||
|
fn view_tag_list<'a>(
|
||||||
|
tags: impl Iterator<Item = &'a Tag>,
|
||||||
|
search_unread: bool,
|
||||||
|
) -> Vec<Node<Msg>> {
|
||||||
|
let mut lis = Vec::new();
|
||||||
|
let mut last = Vec::new();
|
||||||
|
for t in tags {
|
||||||
|
let parts: Vec<_> = t.name.split('/').collect();
|
||||||
|
let mut n = matches(&last, &parts);
|
||||||
|
if n <= parts.len() - 2 && parts.len() > 1 {
|
||||||
|
// Synthesize fake tags for proper indenting.
|
||||||
|
for i in n..parts.len() - 1 {
|
||||||
|
let display_name = parts[n];
|
||||||
|
lis.push(view_tag_li(
|
||||||
|
&display_name,
|
||||||
|
n,
|
||||||
|
&Tag {
|
||||||
|
name: parts[..i + 1].join("/"),
|
||||||
|
bg_color: "#fff".to_string(),
|
||||||
|
unread: 0,
|
||||||
|
},
|
||||||
|
search_unread,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
n = parts.len() - 1;
|
||||||
|
}
|
||||||
|
let display_name = parts[n];
|
||||||
|
lis.push(view_tag_li(&display_name, n, t, search_unread));
|
||||||
|
last = parts;
|
||||||
|
}
|
||||||
|
lis
|
||||||
|
}
|
||||||
|
let unread = model
|
||||||
|
.tags
|
||||||
|
.as_ref()
|
||||||
|
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
|
||||||
|
.unwrap_or(Vec::new());
|
||||||
|
let tags_open = use_state(|| false);
|
||||||
|
let force_tags_open = unread.is_empty();
|
||||||
|
aside![
|
||||||
|
C!["tags-menu", "menu"],
|
||||||
|
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
||||||
|
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter(),true)]),
|
||||||
|
p![
|
||||||
|
C!["menu-label"],
|
||||||
|
IF!(!force_tags_open =>
|
||||||
|
i![C![
|
||||||
|
"fa-solid",
|
||||||
|
if tags_open.get() {
|
||||||
|
"fa-angle-up"
|
||||||
|
} else {
|
||||||
|
"fa-angle-down"
|
||||||
|
}
|
||||||
|
]]),
|
||||||
|
" Tags",
|
||||||
|
ev(Ev::Click, move |_| {
|
||||||
|
tags_open.set(!tags_open.get());
|
||||||
|
})
|
||||||
|
],
|
||||||
|
ul![
|
||||||
|
C!["menu-list"],
|
||||||
|
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter(),false))),
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
fn news_post(
|
||||||
|
post: &ShowThreadQueryThreadOnNewsPost,
|
||||||
|
show_icon_text: bool,
|
||||||
|
content_el: &ElRef<HtmlElement>,
|
||||||
|
) -> Node<Msg> {
|
||||||
|
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||||
|
let subject = &post.title;
|
||||||
|
set_title(subject);
|
||||||
|
let read_thread_id = post.thread_id.clone();
|
||||||
|
let unread_thread_id = post.thread_id.clone();
|
||||||
|
div![
|
||||||
|
C!["thread"],
|
||||||
|
h3![C!["is-size-5"], subject],
|
||||||
|
div![
|
||||||
|
C!["level", "is-mobile"],
|
||||||
|
div![
|
||||||
|
C!["level-item"],
|
||||||
|
div![
|
||||||
|
C!["buttons", "has-addons"],
|
||||||
|
button![
|
||||||
|
C!["button", "mark-read"],
|
||||||
|
attrs! {At::Title => "Mark as read"},
|
||||||
|
span![C!["icon", "is-small"], i![C!["far", "fa-envelope-open"]]],
|
||||||
|
IF!(show_icon_text=>span!["Read"]),
|
||||||
|
ev(Ev::Click, move |_| Msg::SetUnread(read_thread_id, false)),
|
||||||
|
],
|
||||||
|
button![
|
||||||
|
C!["button", "mark-unread"],
|
||||||
|
attrs! {At::Title => "Mark as unread"},
|
||||||
|
span![C!["icon", "is-small"], i![C!["far", "fa-envelope"]]],
|
||||||
|
IF!(show_icon_text=>span!["Unread"]),
|
||||||
|
ev(Ev::Click, move |_| Msg::SetUnread(unread_thread_id, true)),
|
||||||
|
],
|
||||||
|
],
|
||||||
|
],
|
||||||
|
// This would be the holder for spam buttons on emails, needed to keep layout
|
||||||
|
// consistent
|
||||||
|
div![C!["level-item"], div![]]
|
||||||
|
],
|
||||||
|
div![
|
||||||
|
C!["message"],
|
||||||
|
div![C!["header"], render_news_post_header(&post)],
|
||||||
|
div![
|
||||||
|
C!["body", "news-post", format!("site-{}", post.slug)],
|
||||||
|
el_ref(content_el),
|
||||||
|
raw![&post.body]
|
||||||
|
]
|
||||||
|
],
|
||||||
|
/* TODO(wathiede): plumb in orignal id
|
||||||
|
a![
|
||||||
|
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
||||||
|
"Original"
|
||||||
|
],
|
||||||
|
*/
|
||||||
|
click_to_top(),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> {
|
||||||
|
let from = &post.site;
|
||||||
|
let from_detail = post.url.clone();
|
||||||
|
let avatar: Option<String> = None;
|
||||||
|
//let avatar: Option<String> = Some(String::from("https://bulma.io/images/placeholders/64x64.png"));
|
||||||
|
let id = post.thread_id.clone();
|
||||||
|
let is_unread = !post.is_read;
|
||||||
|
let img = render_avatar(avatar, &from);
|
||||||
|
article![
|
||||||
|
C!["media"],
|
||||||
|
figure![C!["media-left"], p![C!["image", "is-64x64"], img]],
|
||||||
|
div![
|
||||||
|
C!["media-content"],
|
||||||
|
div![
|
||||||
|
C!["content"],
|
||||||
|
p![
|
||||||
|
strong![from],
|
||||||
|
br![],
|
||||||
|
small![
|
||||||
|
&from_detail,
|
||||||
|
" ",
|
||||||
|
span![
|
||||||
|
i![C!["far", "fa-clone"]],
|
||||||
|
ev(Ev::Click, move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::CopyToClipboard(from_detail.to_string())
|
||||||
|
})
|
||||||
|
]
|
||||||
|
],
|
||||||
|
table![tr![td![
|
||||||
|
attrs! {At::ColSpan=>2},
|
||||||
|
span![C!["header"], human_age(post.timestamp)]
|
||||||
|
]]],
|
||||||
|
],
|
||||||
|
],
|
||||||
|
],
|
||||||
|
div![
|
||||||
|
C!["media-right"],
|
||||||
|
span![
|
||||||
|
C!["read-status"],
|
||||||
|
i![C![
|
||||||
|
"far",
|
||||||
|
if is_unread {
|
||||||
|
"fa-envelope"
|
||||||
|
} else {
|
||||||
|
"fa-envelope-open"
|
||||||
|
},
|
||||||
|
]]
|
||||||
|
],
|
||||||
|
ev(Ev::Click, move |e| {
|
||||||
|
e.stop_propagation();
|
||||||
|
Msg::SetUnread(id, !is_unread)
|
||||||
|
})
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
fn reading_progress(ratio: f64) -> Node<Msg> {
|
||||||
|
let percent = ratio * 100.;
|
||||||
|
progress![
|
||||||
|
C![
|
||||||
|
"read-progress",
|
||||||
|
"progress",
|
||||||
|
"is-success",
|
||||||
|
"is-small",
|
||||||
|
IF!(percent<1. => "is-invisible")
|
||||||
|
],
|
||||||
|
attrs! {
|
||||||
|
At::Value=>percent,
|
||||||
|
At::Max=>"100"
|
||||||
|
},
|
||||||
|
format!("{percent}%")
|
||||||
|
]
|
||||||
|
}
|
||||||
|
pub fn versions(versions: &crate::state::Version) -> Node<Msg> {
|
||||||
|
debug!("versions {versions:?}");
|
||||||
|
aside![
|
||||||
|
C!["tags-menu", "menu"],
|
||||||
|
p![C!["menu-label"], "Versions"],
|
||||||
|
ul![
|
||||||
|
C!["menu-list"],
|
||||||
|
li!["Client"],
|
||||||
|
li![span![C!["tag-indent"], &versions.client]]
|
||||||
|
],
|
||||||
|
versions.server.as_ref().map(|v| ul![
|
||||||
|
C!["menu-list"],
|
||||||
|
li!["Server"],
|
||||||
|
li![span![C!["tag-indent"], v]]
|
||||||
|
])
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn click_to_top() -> Node<Msg> {
|
||||||
|
button![
|
||||||
|
C!["button", "is-danger", "is-small"],
|
||||||
|
span!["Top"],
|
||||||
|
span![C!["icon"], i![C!["fas", "fa-arrow-turn-up"]]],
|
||||||
|
ev(Ev::Click, move |_| web_sys::window()
|
||||||
|
.unwrap()
|
||||||
|
.scroll_to_with_x_and_y(0., 0.))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
graphql::show_thread_query::*,
|
||||||
state::{Context, Model, Msg},
|
state::{Context, Model, Msg},
|
||||||
view::{self, view_header, view_search_results},
|
view::{self, reading_progress, view_header, view_search_results},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
log::info!("tablet::view");
|
|
||||||
let show_icon_text = false;
|
let show_icon_text = false;
|
||||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::ThreadResult {
|
Context::ThreadResult {
|
||||||
thread,
|
thread: ShowThreadQueryThread::EmailThread(thread),
|
||||||
open_messages,
|
open_messages,
|
||||||
} => view::thread(thread, open_messages, show_icon_text),
|
} => view::thread(thread, open_messages, show_icon_text, &model.content_el),
|
||||||
|
Context::ThreadResult {
|
||||||
|
thread: ShowThreadQueryThread::NewsPost(post),
|
||||||
|
..
|
||||||
|
} => view::news_post(post, show_icon_text, &model.content_el),
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
@@ -33,9 +37,12 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
div![
|
div![
|
||||||
C!["main-content"],
|
C!["main-content"],
|
||||||
div![
|
div![
|
||||||
|
reading_progress(model.read_completion_ratio),
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
content,
|
content,
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
|
view::tags(model),
|
||||||
|
view::versions(&model.versions)
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
55
web/static/site-specific.css
Normal file
55
web/static/site-specific.css
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
.body figcaption {
|
||||||
|
color: var(--color-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.news-post em {
|
||||||
|
border: 0 !important;
|
||||||
|
font-style: italic;
|
||||||
|
margin: inherit !important;
|
||||||
|
padding: inherit !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.news-post .number {
|
||||||
|
align-items: inherit;
|
||||||
|
background-color: inherit;
|
||||||
|
border-radius: inherit;
|
||||||
|
display: inherit;
|
||||||
|
font-size: inherit;
|
||||||
|
height: inherit;
|
||||||
|
justify-content: inherit;
|
||||||
|
margin-right: inherit;
|
||||||
|
min-width: inherit;
|
||||||
|
padding: inherit;
|
||||||
|
text-align: inherit;
|
||||||
|
vertical-align: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.news-post.site-saturday-morning-breakfast-cereal {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.news-post.site-slashdot i {
|
||||||
|
border-left: 2px solid #ddd;
|
||||||
|
display: block;
|
||||||
|
font-style: normal !important;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
margin-top: 1em;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.news-post.site-news-on-redox-your-next-gen-os h1,
|
||||||
|
.body.news-post.site-news-on-redox-your-next-gen-os h2,
|
||||||
|
.body.news-post.site-news-on-redox-your-next-gen-os h3,
|
||||||
|
.body.news-post.site-news-on-redox-your-next-gen-os h4,
|
||||||
|
.body.news-post.site-news-on-redox-your-next-gen-os h5 {
|
||||||
|
color: var(--color-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.body.mail code,
|
||||||
|
.body.mail pre {
|
||||||
|
color: var(--color-text);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
@@ -1,3 +1,46 @@
|
|||||||
|
:root {
|
||||||
|
--active-brightness: 0.85;
|
||||||
|
--border-radius: 5px;
|
||||||
|
--box-shadow: 2px 2px 10px;
|
||||||
|
--color-accent: #118bee15;
|
||||||
|
--color-bg: #fff;
|
||||||
|
--color-bg-secondary: #e9e9e9;
|
||||||
|
--color-link: #118bee;
|
||||||
|
--color-secondary: #920de9;
|
||||||
|
--color-secondary-accent: #920de90b;
|
||||||
|
--color-shadow: #f4f4f4;
|
||||||
|
--color-table: #118bee;
|
||||||
|
--color-text: #000;
|
||||||
|
--color-text-secondary: #999;
|
||||||
|
--color-scrollbar: #cacae8;
|
||||||
|
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
|
||||||
|
--hover-brightness: 1.2;
|
||||||
|
--justify-important: center;
|
||||||
|
--justify-normal: left;
|
||||||
|
--line-height: 1.5;
|
||||||
|
/*
|
||||||
|
--width-card: 285px;
|
||||||
|
--width-card-medium: 460px;
|
||||||
|
--width-card-wide: 800px;
|
||||||
|
*/
|
||||||
|
--width-content: 1080px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
:root[color-mode="user"] {
|
||||||
|
--color-accent: #0097fc4f;
|
||||||
|
--color-bg: #333;
|
||||||
|
--color-bg-secondary: #555;
|
||||||
|
--color-link: #0097fc;
|
||||||
|
--color-secondary: #e20de9;
|
||||||
|
--color-secondary-accent: #e20de94f;
|
||||||
|
--color-shadow: #bbbbbb20;
|
||||||
|
--color-table: #0097fc;
|
||||||
|
--color-text: #f7f7f7;
|
||||||
|
--color-text-secondary: #aaa;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.message {
|
.message {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
padding: 0.5em;
|
padding: 0.5em;
|
||||||
@@ -30,7 +73,7 @@
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.body {
|
.message .body {
|
||||||
background: white;
|
background: white;
|
||||||
color: black;
|
color: black;
|
||||||
margin-top: 0.5em;
|
margin-top: 0.5em;
|
||||||
@@ -48,13 +91,26 @@
|
|||||||
margin: 0.5rem 0;
|
margin: 0.5rem 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.message .body blockquote {
|
||||||
|
padding-left: 1em;
|
||||||
|
border-left: 2px solid #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message .body ul,
|
||||||
|
.message .body ol,
|
||||||
|
.message .body li {
|
||||||
|
margin: revert;
|
||||||
|
padding: revert;
|
||||||
|
}
|
||||||
|
|
||||||
.error {
|
.error {
|
||||||
background-color: red;
|
background-color: red;
|
||||||
}
|
}
|
||||||
|
|
||||||
.view-part-text-plain {
|
.view-part-text-plain {
|
||||||
padding: 0.5em;
|
font-family: monospace;
|
||||||
overflow-wrap: break-word;
|
overflow-wrap: break-word;
|
||||||
|
padding: 0.5em;
|
||||||
white-space: pre-wrap;
|
white-space: pre-wrap;
|
||||||
word-break: break-word;
|
word-break: break-word;
|
||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
@@ -126,13 +182,13 @@ iframe {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
from {
|
from {
|
||||||
transform: rotate(0deg);
|
transform: rotate(0deg);
|
||||||
}
|
}
|
||||||
|
|
||||||
to {
|
to {
|
||||||
transform: rotate(360deg);
|
transform: rotate(360deg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (max-width: 768px) {
|
@media (max-width: 768px) {
|
||||||
@@ -155,6 +211,10 @@ input::placeholder,
|
|||||||
padding: 1em;
|
padding: 1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.search-results>nav {
|
||||||
|
margin: 1.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
.tablet .thread h3,
|
.tablet .thread h3,
|
||||||
.mobile .thread h3 {
|
.mobile .thread h3 {
|
||||||
overflow-wrap: break-word;
|
overflow-wrap: break-word;
|
||||||
@@ -177,8 +237,6 @@ input::placeholder,
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.search-results .row .checkbox {}
|
|
||||||
|
|
||||||
.search-results .row .summary {
|
.search-results .row .summary {
|
||||||
min-width: 0;
|
min-width: 0;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
@@ -190,17 +248,13 @@ input::placeholder,
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.search-results td.subject {
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-results .subject .tag {}
|
|
||||||
|
|
||||||
.search-results .subject .text {
|
.search-results .subject .text {
|
||||||
padding-left: 0.5rem;
|
display: inline-block;
|
||||||
width: 100%;
|
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
padding-left: 0.5rem;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.search-results .row .from {
|
.search-results .row .from {
|
||||||
@@ -281,10 +335,22 @@ display: none;
|
|||||||
margin: .25em;
|
margin: .25em;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
|
|
||||||
.attachment .card-content {
|
.attachment .card-content {
|
||||||
padding: 0.5rem 1.5rem;
|
padding: 0.5rem 1.5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
.button.spam {
|
.button.spam {
|
||||||
color: #f00;
|
color: #f00;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
progress.read-progress {
|
||||||
|
border-radius: 0;
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
z-index: 999;
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.read-progress.is-small {
|
||||||
|
height: .25rem;
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user