Compare commits

..

338 Commits

Author SHA1 Message Date
6b3567fb1b web: style tag list 2025-01-26 09:42:32 -08:00
c27bcac549 web: switch to debug build and enable minimal optimizations to make wasm work 2025-01-26 09:32:06 -08:00
25d31a6ce7 web: only use one view function, desktop/tablet/mobile handled in CSS 2025-01-26 09:31:44 -08:00
ea280dd366 web: stub out all C![] that need porting to tailwind 2025-01-25 16:56:44 -08:00
9842c8c99c server: add option to inline CSS before slurping contents 2025-01-25 16:09:05 -08:00
906ebd73b2 cargo: don't default to xinu repo, that was misguided 2025-01-25 16:05:05 -08:00
de95781ce7 More lint 2025-01-24 09:38:56 -08:00
c58234fa2e Lint 2025-01-24 09:37:49 -08:00
4099bbe732 Bumping version to 0.0.115 2025-01-19 17:22:37 -08:00
c693d4e78a server: strip html from search index of summaries 2025-01-19 17:22:24 -08:00
f90ff72316 server: fix tantivy/newsreader search bug 2025-01-19 17:22:20 -08:00
bed6ae01f2 Bumping version to 0.0.114 2025-01-19 16:50:50 -08:00
087d6b9a60 Use registry version of formerly git dependencies 2025-01-19 16:50:14 -08:00
b04caa9d5d Bumping version to 0.0.113 2025-01-17 15:51:39 -08:00
17b1125ea3 server: Use crate version of cacher 2025-01-17 15:51:28 -08:00
a8ac79d396 Bumping version to 0.0.112 2025-01-16 16:09:28 -08:00
30cbc260dc web: version bump wasm-bindgen-cli 2025-01-16 16:09:06 -08:00
4601b7e6d3 Bumping version to 0.0.111 2025-01-15 12:27:33 -08:00
28b6f565fd update cacher dependency 2025-01-15 12:27:29 -08:00
48b63b19d5 Bumping version to 0.0.110 2025-01-14 20:55:53 -08:00
184afbb4ee update cacher dependency 2025-01-14 20:55:49 -08:00
f6217810ea Bumping version to 0.0.109 2025-01-14 16:22:24 -08:00
46e2de341b update cacher dependency 2025-01-14 16:22:20 -08:00
9c56fde0b6 Bumping version to 0.0.108 2025-01-14 12:05:38 -08:00
2051e5ebf2 cargo sqlx prepare 2025-01-14 12:05:37 -08:00
5a997e61da web & server: add support for email photos 2025-01-14 12:05:03 -08:00
f27f0deb38 Revert "Remove DB tables that don't seem to work"
This reverts commit 70f437b939.
2025-01-13 21:03:56 -08:00
70f437b939 Remove DB tables that don't seem to work 2025-01-13 20:50:19 -08:00
59648a1b25 Bumping version to 0.0.107 2025-01-12 16:35:17 -08:00
76482c6c15 server: make pagination slightly less bad 2025-01-12 16:35:11 -08:00
de23bae8bd server: add request_id to all graphql logging 2025-01-12 11:40:31 -08:00
e07c0616a2 Bumping version to 0.0.106 2025-01-12 09:26:23 -08:00
13a7de4956 web: refactor mark read logic to be two phases 2025-01-12 09:25:44 -08:00
9ce0aacab0 Bumping version to 0.0.105 2025-01-12 08:34:36 -08:00
ae502a7dfe Bumping version to 0.0.104 2025-01-02 15:19:24 -08:00
947c5970d8 update xtracing dependency 2025-01-02 15:19:17 -08:00
686d163cf6 update xtracing dependency 2025-01-02 15:18:49 -08:00
7c720e66f9 Bumping version to 0.0.103 2024-12-28 15:10:17 -08:00
1029fd7aa2 update cacher dependency 2024-12-28 15:10:12 -08:00
61e59ea315 Bumping version to 0.0.102 2024-12-28 15:09:21 -08:00
5047094bd7 update xtracing dependency 2024-12-28 15:09:16 -08:00
28bd9a9d89 Bumping version to 0.0.101 2024-12-28 15:08:52 -08:00
4b327eeccc update xtracing dependency 2024-12-28 15:08:48 -08:00
d13b5477a5 Bumping version to 0.0.100 2024-12-28 15:08:28 -08:00
8cad404098 update xtracing dependency 2024-12-28 15:08:24 -08:00
23de7186d6 Bumping version to 0.0.99 2024-12-28 15:06:37 -08:00
a26559a07e Bumping version to 0.0.98 2024-12-28 15:04:53 -08:00
1bc7ad9b95 update xtracing dependency 2024-12-28 15:04:48 -08:00
1ac844c08d Bumping version to 0.0.97 2024-12-28 15:04:02 -08:00
d7f7954e59 Bumping version to 0.0.96 2024-12-28 15:02:21 -08:00
ba16e537e6 Bumping version to 0.0.95 2024-12-28 15:00:36 -08:00
60304a23cc update xtracing dependency 2024-12-28 15:00:30 -08:00
ce6aa7d167 Bumping version to 0.0.94 2024-12-28 09:09:41 -08:00
fb55d87876 update xtracing dependency 2024-12-28 09:09:27 -08:00
63374871ac Bumping version to 0.0.93 2024-12-28 08:44:51 -08:00
405dcc5ca6 update cacher dependency 2024-12-28 08:44:47 -08:00
1544405d3a Bumping version to 0.0.92 2024-12-28 08:43:49 -08:00
3b547f6925 update cacher dependency 2024-12-28 08:43:41 -08:00
777f33e212 notmuch: add instrumentation to most public methods 2024-12-26 11:12:47 -08:00
7c7a8c0dcb Bumping version to 0.0.91 2024-12-25 16:22:36 -08:00
6c2722314b server: fix compile problem with new PG schema 2024-12-25 16:22:19 -08:00
7827c24016 Bumping version to 0.0.90 2024-12-25 16:19:16 -08:00
043e46128a cargo sqlx prepare 2024-12-25 16:19:15 -08:00
dad30357ac server: enusre post.link is not null and not empty 2024-12-25 10:12:33 -08:00
4c6b9cde39 Bumping version to 0.0.89 2024-12-25 08:03:13 -08:00
ffb210babb server: ensure uniqueness on post links 2024-12-25 08:02:36 -08:00
145d1c1787 Bumping version to 0.0.88 2024-12-21 16:52:24 -08:00
1708526e33 update xtracing dependency 2024-12-21 16:52:19 -08:00
f8f9b753a6 Bumping version to 0.0.87 2024-12-21 16:23:13 -08:00
7fbb0e0f43 update xtracing dependency 2024-12-21 16:23:09 -08:00
2686670df7 Bumping version to 0.0.86 2024-12-21 16:21:19 -08:00
732fb5054a update xtracing dependency 2024-12-21 16:21:14 -08:00
2abfbda2f0 Bumping version to 0.0.85 2024-12-21 16:19:42 -08:00
cce693174e update xtracing dependency 2024-12-21 16:19:37 -08:00
bec7ee40b4 Bumping version to 0.0.84 2024-12-21 13:18:26 -08:00
79a6245773 update xtracing dependency 2024-12-21 13:18:22 -08:00
3ae1c3fdff Bumping version to 0.0.83 2024-12-21 13:16:50 -08:00
eab96b3f84 update xtracing dependency 2024-12-21 13:16:44 -08:00
07b8db317b cargo sqlx prepare 2024-12-21 13:16:18 -08:00
9debec8daa Bumping version to 0.0.82 2024-12-21 13:10:22 -08:00
b129b99fd9 cargo sqlx prepare 2024-12-21 13:10:21 -08:00
a397bcf190 update xtracing dependency 2024-12-21 13:10:16 -08:00
13c80fe68f update xtracing dependency 2024-12-21 13:08:01 -08:00
438ab0015e update xtracing dependency 2024-12-21 13:07:14 -08:00
93f5145937 update xtracing dependency 2024-12-21 13:06:59 -08:00
36fcc349ec update xtracing dependency 2024-12-21 13:05:31 -08:00
63a1919872 update xtracing dependency 2024-12-21 13:02:59 -08:00
5b6d18bdbc Bumping version to 0.0.81 2024-12-20 09:25:51 -08:00
868d2fb434 xtracing version bump 2024-12-20 09:25:46 -08:00
6ad66a35e7 Bumping version to 0.0.80 2024-12-20 09:18:27 -08:00
cd750e7267 Update xtracing 2024-12-20 09:16:41 -08:00
40be07cb07 Bumping version to 0.0.79 2024-12-20 09:06:45 -08:00
e794a902dd server: clean up some renamed imports 2024-12-20 09:06:35 -08:00
94576e98fc Bumping version to 0.0.78 2024-12-20 09:06:08 -08:00
b7dcb2e875 server: rename crate and binary to letterbox-server 2024-12-20 09:05:35 -08:00
aa9a243894 Bumping version to 0.0.77 2024-12-20 08:43:47 -08:00
1911367aeb cargo update 2024-12-20 08:43:37 -08:00
93bb4a27b9 Bumping version to 0.0.76 2024-12-19 18:44:31 -08:00
0456efeed4 cargo sqlx prepare 2024-12-19 18:44:30 -08:00
3ac2fa290f server: use git version of xtracing 2024-12-19 18:44:13 -08:00
e7feb73f6f lint 2024-12-19 18:38:43 -08:00
5ddb4452ff email2db: stub CLI 2024-12-19 18:35:46 -08:00
760f90762d server: refer to async_graphql extensions through extensions module 2024-12-19 18:35:03 -08:00
51154044cc WIP 2024-12-19 12:56:53 -08:00
06c5cb6cbf Update offline sqlx files on build 2024-12-19 12:50:10 -08:00
0dc1f2cebe Bumping version to 0.0.75 2024-12-19 11:35:18 -08:00
0dec7aaf0e web: pin wasm-bindgen 2024-12-19 11:35:00 -08:00
6fa8d1856a Revert "web: fix breakage do to update in dependency"
This reverts commit 80d23204fe.
2024-12-19 11:34:33 -08:00
95a0279c68 Bumping version to 0.0.74 2024-12-19 11:04:55 -08:00
80d23204fe web: fix breakage do to update in dependency 2024-12-19 11:04:39 -08:00
f45123d6d9 Bumping version to 0.0.73 2024-12-19 10:53:51 -08:00
503913c54a Bumping version to 0.0.72 2024-12-19 10:46:47 -08:00
c4627a13b6 cargo sqlx prepare 2024-12-19 10:46:39 -08:00
e4427fe725 Bumping version to 0.0.71 2024-12-19 10:44:15 -08:00
78f5f00225 cargo update 2024-12-19 10:44:05 -08:00
c6fc34136a Version bump sqlx 2024-12-19 10:44:05 -08:00
1a270997c8 Update xtracing 2024-12-19 10:38:56 -08:00
390fbcceac Bumping version to 0.0.70 2024-12-17 13:57:25 -08:00
d7214f4f29 server: move notmuch refresh out of tantivy cfg block for refresh 2024-12-17 13:57:06 -08:00
b9aaf87dc2 Bumping version to 0.0.69 2024-12-17 09:38:26 -08:00
5ee9d754ba server: actually disable tantivy 2024-12-17 09:38:19 -08:00
dc04d54455 cargo sqlx prepare 2024-12-17 09:34:03 -08:00
9f730e937d Bumping version to 0.0.68 2024-12-17 09:32:13 -08:00
13eaf33b1a server: add postgres based newsreader search and disable tantivy 2024-12-17 09:31:51 -08:00
e36f4f97f9 server: run DB migrations on startup 2024-12-16 19:21:58 -08:00
092d5781ca Bumping version to 0.0.67 2024-12-16 19:21:34 -08:00
0697a5ea41 server: more instrumentation 2024-12-16 19:21:05 -08:00
607e9e2251 Bumping version to 0.0.66 2024-12-16 08:56:24 -08:00
c547170efb server: address lint 2024-12-16 08:56:16 -08:00
0222985f4d server: instrument newsreader impl 2024-12-16 08:56:05 -08:00
94c03a9c7c Bumping version to 0.0.65 2024-12-16 08:34:53 -08:00
4f4e474e66 server: explicitly reload tantivy reader after commit 2024-12-16 08:34:35 -08:00
7a1dec03a3 Bumping version to 0.0.64 2024-12-15 16:26:38 -08:00
f49bc071c2 server: version bump xtracing 2024-12-15 16:26:22 -08:00
8551f0c756 Bumping version to 0.0.63 2024-12-15 15:43:56 -08:00
ac4aaeb0f7 server: warn on failure to open tantivy 2024-12-15 15:43:44 -08:00
4ad963c3be Bumping version to 0.0.62 2024-12-15 15:18:36 -08:00
7c943afc2b server: attempt concurrency with graphql::search and fail 2024-12-15 15:09:41 -08:00
39ea5c5458 Bumping version to 0.0.61 2024-12-15 14:46:53 -08:00
6d8b2de608 server: improve tantivy performance by reusing IndexReader
Also improve a bunch of trace logging
2024-12-15 14:46:10 -08:00
05cdcec244 notmuch: improved error handling and logging 2024-12-15 14:44:02 -08:00
a0eb8dcba6 server: add TODO 2024-12-14 11:56:33 -08:00
9fbfa378bb Bumping version to 0.0.60 2024-12-14 10:09:48 -08:00
872771b02a server: add tracing for graphql handling 2024-12-14 10:09:33 -08:00
416d82042f Bumping version to 0.0.59 2024-12-10 09:13:22 -08:00
a0eb291371 web: most post favicon more cachable 2024-12-10 09:13:11 -08:00
4c88ee18d3 Bumping version to 0.0.58 2024-12-09 13:17:09 -08:00
410e582b44 web: use favicon for avatar when viewing a post 2024-12-09 13:16:55 -08:00
a3f720a51e Bumping version to 0.0.57 2024-12-08 18:05:12 -08:00
962b3542ce web: show email address on hover of name in message view 2024-12-08 18:03:20 -08:00
a6f0971f0f Bumping version to 0.0.56 2024-11-13 17:43:16 -08:00
21789df60a server: handle attachements with name in content-type not disposition 2024-11-13 17:42:53 -08:00
584ff1504d cargo fmt to catch unformated code while LSP was misconfigured 2024-11-03 08:33:10 -08:00
caff1a1ed3 web: remove unnecessary move 2024-10-30 20:07:32 -07:00
d7b4411017 web: update cargo edition 2024-10-30 19:59:06 -07:00
66ada655fc Bumping version to 0.0.55 2024-10-29 17:16:58 -07:00
8dea1f1bd6 web: fix styling on news post tags to match email 2024-10-29 17:16:45 -07:00
e7a865204d Bumping version to 0.0.54 2024-10-27 12:27:34 -07:00
3138379e7d web: add tag when viewing news posts 2024-10-27 12:27:16 -07:00
7828fa0ac8 server: add slurper config for rustacean station 2024-10-27 12:15:43 -07:00
b770bb8986 server: add slurp config for grafana 2024-10-27 12:14:15 -07:00
07c0150d3e Bumping version to 0.0.53 2024-10-27 12:03:25 -07:00
f678338822 server: lint, including bug fix 2024-10-27 12:03:16 -07:00
6e15e69254 server: handle forwarded rfc822 messages 2024-10-27 12:02:00 -07:00
2671a3b787 Bumping version to 0.0.52 2024-10-27 10:56:11 -07:00
93073c9602 server: fix pagination counts for tantivy results 2024-10-27 10:55:49 -07:00
88f8a9d537 Bumping version to 0.0.51 2024-10-13 17:40:35 -07:00
b75b298a9d web: match email header styling when viewing post 2024-10-13 17:40:20 -07:00
031b8ce80e Bumping version to 0.0.50 2024-10-03 09:21:48 -07:00
b0ceba3bcf web: consistent html between open/close header, move padding into header code 2024-10-03 09:21:12 -07:00
e5f5b8ff3c Bumping version to 0.0.49 2024-10-03 09:04:03 -07:00
afb1d291ec web: fix right justify of read icon/timestamp on closed message header 2024-10-03 09:03:22 -07:00
55b46ff929 Bumping version to 0.0.48 2024-10-01 17:20:01 -07:00
58acd8018a web: more dense email headers 2024-10-01 17:19:52 -07:00
e0d0ede2ce Bumping version to 0.0.47 2024-10-01 15:12:20 -07:00
ac46b0e4d0 web: change up spacing in email headers. Increase density 2024-10-01 15:12:02 -07:00
e12ea2d7e4 Bumping version to 0.0.46 2024-09-29 19:17:07 -07:00
5f052facdf web: fix styling of envelope on closed headers 2024-09-29 19:16:51 -07:00
4476749203 Bumping version to 0.0.45 2024-09-29 19:05:59 -07:00
0fa860bc71 web: show email address when now name present 2024-09-29 19:05:46 -07:00
b858b23584 Bumping version to 0.0.44 2024-09-29 18:03:05 -07:00
6500e60c40 web: remove dead code 2024-09-29 18:02:45 -07:00
efc991923d Bumping version to 0.0.43 2024-09-29 17:56:39 -07:00
0b5e057fe6 web: fix spacing when there are few To/CC 2024-09-29 17:56:25 -07:00
822e1b0a9c Bumping version to 0.0.42 2024-09-29 17:15:57 -07:00
4f21814be0 web: successfully rewrite some bits in tailwind 2024-09-29 17:15:28 -07:00
17da489229 web: WIP tailwind integration 2024-09-29 16:43:29 -07:00
5b8639b80f Bumping version to 0.0.41 2024-09-29 16:41:36 -07:00
6c9ef912e6 server: don't touch tantivy if no uids reindexed 2024-09-29 16:41:13 -07:00
da636ca1f3 Bumping version to 0.0.40 2024-09-29 16:28:37 -07:00
7880eddccd Bumping version to 0.0.39 2024-09-29 16:28:25 -07:00
3ec1741f10 web & server: using tantivy for news post search 2024-09-29 16:28:05 -07:00
f36d1e0c29 server: continue if db path missing on create_news_db 2024-09-28 12:29:12 -07:00
ebf32a9905 server: WIP tantivy integration 2024-09-28 12:29:12 -07:00
005a457348 Bumping version to 0.0.38 2024-09-28 12:28:53 -07:00
a89a279764 notmuch: use faster, but inaccurate message count 2024-09-28 12:28:41 -07:00
fbc426f218 Bumping version to 0.0.37 2024-09-28 12:23:29 -07:00
27b480e118 web: try alternative for clearing screen on build 2024-09-28 12:22:35 -07:00
dee6ff9ba0 Bumping version to 0.0.36 2024-09-28 12:06:12 -07:00
73bdcd5441 server: add pjpeg support for attachments 2024-09-28 12:06:00 -07:00
64a38e024d Bumping version to 0.0.35 2024-09-28 11:18:39 -07:00
441b40532f Bumping version to 0.0.34 2024-09-28 11:18:37 -07:00
bfb6a6226d Bumping version to 0.0.33 2024-09-28 11:18:37 -07:00
f464585fad web: tweak hr styling 2024-09-28 11:18:37 -07:00
3fe61f8b09 web: clear screen on rebuild 2024-09-28 11:18:37 -07:00
43b3625656 server: join slurped parts with <hr> elements 2024-09-28 11:16:10 -07:00
6505c90f32 Bumping version to 0.0.32 2024-09-26 16:28:02 -07:00
104eb189fe web: shrink <hr> margins 2024-09-26 16:27:50 -07:00
b70e0018d7 Bumping version to 0.0.31 2024-09-25 19:46:15 -07:00
d962d515f5 web: shorten outbound link on news post 2024-09-25 19:45:52 -07:00
3c8d7d4f81 server: move tantivy code to separate mod 2024-09-22 10:26:45 -07:00
d1604f8e70 server: remove done TODO 2024-09-21 18:48:25 -07:00
6f07817c0e Bumping version to 0.0.30 2024-09-21 13:01:27 -07:00
0ac959ab76 server: add slurp config for ingowald 2024-09-21 13:01:17 -07:00
62b17bd6a6 Bumping version to 0.0.29 2024-09-20 08:56:58 -07:00
c0bac99d5a server: add slurp config for zsa blog 2024-09-20 08:56:45 -07:00
3b69c5e74b Bumping version to 0.0.28 2024-09-19 17:06:03 -07:00
539fd469cc server: create index when missing 2024-09-19 17:05:47 -07:00
442688c35c web: lint 2024-09-19 16:54:18 -07:00
da27f02237 Bumping version to 0.0.27 2024-09-19 16:52:35 -07:00
9460e354b7 server: cargo sqlx prepare 2024-09-19 16:52:26 -07:00
6bab128ed9 Bumping version to 0.0.26 2024-09-19 16:33:50 -07:00
3856b4ca5a server: try different cacher url 2024-09-19 16:33:40 -07:00
bef39eefa5 Bumping version to 0.0.25 2024-09-19 16:08:20 -07:00
b0366c7b4d server: try non-https to see if that works 2024-09-19 16:07:59 -07:00
ca02d84d63 Bumping version to 0.0.24 2024-09-19 16:01:55 -07:00
461d5de886 server: change internal git url 2024-09-19 16:01:41 -07:00
f8134dad7a Bumping version to 0.0.23 2024-09-19 15:53:56 -07:00
30f510bb03 server: WIP tantivy, cache slurps, use shared::compute_color, 2024-09-19 15:53:09 -07:00
e7cbf9cc45 shared: remove debug logging 2024-09-19 13:54:47 -07:00
5108213af5 web: use shared compute_color 2024-09-19 13:49:24 -07:00
d148f625ac shared: add compute_color 2024-09-19 13:48:56 -07:00
a9b8f5a88f Bumping version to 0.0.22 2024-09-16 20:00:16 -07:00
539b584d9b web: fix broken build 2024-09-16 20:00:06 -07:00
2f8d83fc4b Bumping version to 0.0.21 2024-09-16 19:52:28 -07:00
86ee1257fa web: better progress bar 2024-09-16 19:52:20 -07:00
03f1035e0e Bumping version to 0.0.20 2024-09-12 22:38:18 -07:00
bd578191a8 web: add scroll to top button and squelch some debug logging 2024-09-12 22:37:58 -07:00
d4fc2e2ef1 Bumping version to 0.0.19 2024-09-12 15:41:01 -07:00
cde30de81c web: explicitly set progress to zero when not in thread/news view 2024-09-12 15:40:42 -07:00
96be74e3ee Bumping version to 0.0.18 2024-09-12 15:32:30 -07:00
b78d34b27e web: disable bulma styling for .number 2024-09-12 15:32:18 -07:00
b4b64c33a6 Bumping version to 0.0.17 2024-09-12 10:07:00 -07:00
47b1875022 server: tweak cloudflare and prusa slurp config 2024-09-12 10:06:46 -07:00
b06cbd1381 Bumping version to 0.0.16 2024-09-12 10:03:26 -07:00
9e35f8ca6c web: fix <em> looking like a button 2024-09-12 10:01:58 -07:00
8eaefde67d Bumping version to 0.0.15 2024-09-12 09:28:14 -07:00
d5a3324837 server: slurp config for prusa blog and squelch some info logging 2024-09-12 09:27:57 -07:00
f5c90d8770 Bumping version to 0.0.14 2024-09-11 11:46:04 -07:00
825a125a62 web: redox specific styling 2024-09-11 11:45:53 -07:00
da7cf37dae Bumping version to 0.0.13 2024-09-11 11:41:27 -07:00
1985ae1f49 server: add slurp configs for facebook and redox 2024-09-11 11:41:09 -07:00
91eb3019f9 Bumping version to 0.0.12 2024-09-09 20:31:07 -07:00
66e8e00a9b web: remove dead code 2024-09-09 20:21:51 -07:00
4b8923d852 web: more accurate reading progress bar 2024-09-09 20:21:13 -07:00
baba720749 Bumping version to 0.0.11 2024-09-02 13:36:18 -07:00
1ec22599cc web: make pre blocks look like code blocks in email 2024-09-02 13:35:58 -07:00
c69017bc36 Bumping version to 0.0.10 2024-09-02 13:19:11 -07:00
48bf57fbbe web: more pleasant color scheme for code blocks in email 2024-09-02 13:18:49 -07:00
3491856784 Bumping version to 0.0.9 2024-09-01 16:17:35 -07:00
f887c15b46 web: address lint 2024-09-01 16:17:27 -07:00
7786f850d1 Bumping version to 0.0.8 2024-09-01 16:16:09 -07:00
cad778734e web: rename Msg::Reload->Refresh and create proper Reload 2024-09-01 16:15:38 -07:00
1210f7038a Bumping version to 0.0.7 2024-09-01 16:09:14 -07:00
f9ab7284a3 web: remove obsolete Makefile 2024-09-01 16:09:04 -07:00
100865c923 server: use same html cleanup idiom in nm as we do in newreader 2024-09-01 16:08:25 -07:00
b8c1710a83 dev: watch for git commits and rebuild on change 2024-09-01 16:07:22 -07:00
215b8cd41d shared: ignore dirty, if git is present we're developing
When developing dirty can get out of between client and server if you're
only doing development in one.
2024-09-01 15:57:02 -07:00
487d7084c3 Bumping version to 0.0.6 2024-09-01 15:48:41 -07:00
b1e761b26f web: don't show progress bar until 400px have scrolled 2024-09-01 15:48:11 -07:00
3efe90ca21 Update release makefile 2024-09-01 15:40:19 -07:00
61649e1e04 Bumping version to 0.0.5 2024-09-01 15:38:39 -07:00
13ac352a10 Helpers to bump version number 2024-09-01 15:37:00 -07:00
5ca7a25e8d Bumping version to 0.0.4 2024-09-01 15:36:48 -07:00
7bb8ef0938 Bumping version to :?} 2024-09-01 15:36:36 -07:00
5c55a290ac Bumping version to :?} 2024-09-01 15:34:53 -07:00
4e3e1b075d Setting crate version to 0.2.0-a8c5a16 2024-09-01 15:30:37 -07:00
a8c5a164ff web: clean up version string and reload on mismatch 2024-09-01 15:02:34 -07:00
1f393f1c7f Add server and client build versions 2024-09-01 14:55:51 -07:00
fdaff70231 server: improve cloudflare and grafana image and iframe rendering 2024-09-01 11:05:07 -07:00
7218c13b9e server: address lint 2024-08-31 16:18:47 -07:00
934cb9d91b web: address lint 2024-08-31 16:11:49 -07:00
4faef5e017 web: add scrollbar for read progress 2024-08-31 16:08:06 -07:00
5c813e7350 web: style improvements for figure captions 2024-08-31 15:04:19 -07:00
fb754469ce web: let pullquotes on grafana blog be full width 2024-08-31 14:46:38 -07:00
548b5a0ab0 server: extract image title and alt attributes into figure captions 2024-08-31 14:43:04 -07:00
f77d0776c4 web: style tweaks for <em> 2024-08-31 14:42:19 -07:00
e73f70af8f Fix new post read/unread handling 2024-08-31 13:49:03 -07:00
a9e6120f81 web: don't make slashdot pull quotes italic 2024-08-31 13:36:21 -07:00
090a010a63 server: fix thread id for news posts 2024-08-31 13:23:25 -07:00
85c762a297 web: add class for mail vs news-post bodies 2024-08-31 11:54:19 -07:00
a8d5617cf2 Treat email and news posts as distinct types on the frontend and backend 2024-08-31 11:40:06 -07:00
760cec01a8 Refactor thread responses into an enum.
Lays ground work for different types of views, i.e. email, news, docs, etc.
2024-08-26 21:48:53 -07:00
446fcfe37f server: fix url for graphiql 2024-08-26 21:48:25 -07:00
71de3ef8ae server: add ability to slurp contents from site 2024-08-25 19:37:53 -07:00
d98d429b5c notmuch: add TODO 2024-08-25 19:37:37 -07:00
cf5a6fadfd server: sort dependencies 2024-08-24 09:26:52 -07:00
9a078cd238 server: only add "view on site" link if it's not in the html body 2024-08-19 10:57:09 -07:00
a81a803cca server: include default chrome CSS as a baseline for news threads 2024-08-19 10:47:38 -07:00
816587b688 server: fix download of chrome default CSS 2024-08-19 10:47:14 -07:00
4083c58bbd server: add chrome default styles
From:
https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/html/resources/html.css
2024-08-19 10:31:59 -07:00
8769e5acd4 server: fix counting issue w/ notmuch (messages vs threads) 2024-08-18 14:18:15 -07:00
3edf9fdb5d web: fix age display when less than 1 minute 2024-08-18 12:55:39 -07:00
ac0ce29c76 web: preserve checked boxes on search refresh 2024-08-18 11:04:31 -07:00
5279578c64 server: fix inline image loading 2024-08-17 16:33:53 -07:00
632f64261e server: fix notmuch paging bug 2024-08-15 16:21:27 -07:00
b5e25eef78 server: fix paging if only notmuch results are found 2024-08-15 14:58:23 -07:00
8a237bf8e1 server: add link to news posts back to original article 2024-08-12 21:14:32 -07:00
c5def6c0e3 web: allow clicking anywhere in the subject line in search results 2024-08-12 20:54:16 -07:00
d1cfc77148 server: more news title/body cleanup, and don't search news so much 2024-08-12 20:53:48 -07:00
c314e3c798 web: make whole row of search results clickable
No longer allow searching by tag by clicking on chiclet
2024-08-06 21:37:38 -07:00
7c5ef96ff0 server: fix paging bug where p1->p2->p1 wouldn't show consistent results 2024-08-06 21:15:10 -07:00
474cf38180 server: cargo sqlx prepare 2024-08-06 20:55:05 -07:00
e81a452dfb web: scroll to top when viewing a new tag 2024-08-06 20:54:25 -07:00
e570202ba2 Merge news and email search results 2024-08-06 20:44:25 -07:00
a84c9f0eaf server: address some lint 2024-08-05 15:54:26 -07:00
530bd8e350 Inline mvp and custom override CSS when rendering RSS posts 2024-08-05 15:47:31 -07:00
359e798cfa server: going with mvp.css not normalize.css 2024-08-04 21:23:05 -07:00
d7d257a6b5 https://andybrewer.github.io/mvp/mvp.css 2024-08-04 21:22:34 -07:00
9ad9ff6879 https://necolas.github.io/normalize.css/8.0.1/normalize.css 2024-08-03 21:31:09 -07:00
56bc1cf7ed server: escape RSS feeds that are HTML escaped 2024-08-03 11:29:20 -07:00
e0863ac085 web: more robust avatar intial filtering 2024-07-29 17:29:15 -07:00
d5fa89b38c web: show tag list in all modalities. WIP 2024-07-29 08:48:44 -07:00
605af13a37 web: monospace font for plain text emails 2024-07-29 08:32:28 -07:00
3838cbd6e2 cargo fix 2024-07-24 11:08:47 -07:00
c76df0ef90 web: update copy icon in more places 2024-07-24 11:06:38 -07:00
cd77d302df web: small icon tweak for copying email addresses 2024-07-24 11:03:32 -07:00
71348d562d version bump 2024-07-24 11:03:26 -07:00
b6ae46db93 Move cargo config up a directory 2024-07-22 16:56:13 -07:00
6cb84054ed Only build server by default 2024-07-22 16:48:47 -07:00
7b511c1673 Fix cleanhtml build 2024-07-22 16:41:14 -07:00
bfd5e12bea Make URL joining more robust 2024-07-22 16:39:59 -07:00
ad8fb77857 Add copy to clipboard links to from/to/cc addresses 2024-07-22 16:04:25 -07:00
831466ddda Add mark read/unread support for news 2024-07-22 14:43:05 -07:00
4ee34444ae Move thread: and id: prefixing to server side.
This paves way for better news: support
2024-07-22 14:26:48 -07:00
879ddb112e Remove some logging and fix a comment 2024-07-22 14:26:24 -07:00
331fb4f11b Fix build 2024-07-22 12:19:45 -07:00
4e5275ca0e cargo sqlx prepare 2024-07-22 12:19:38 -07:00
78 changed files with 10117 additions and 2442 deletions

9
.cargo/config.toml Normal file
View File

@@ -0,0 +1,9 @@
[build]
rustflags = [ "--cfg=web_sys_unstable_apis" ]
[registry]
global-credential-providers = ["cargo:token"]
[registries.xinu]
index = "sparse+https://git.z.xinu.tv/api/packages/wathiede/cargo/"

4403
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,10 @@
[workspace] [workspace]
resolver = "2" resolver = "2"
members = [ default-members = ["server"]
"web", members = ["web", "server", "notmuch", "procmail2notmuch", "shared"]
"server",
"notmuch", [profile.dev]
"procmail2notmuch", opt-level = 1
"shared"
]
[profile.release] [profile.release]
lto = true lto = true

7
Makefile Normal file
View File

@@ -0,0 +1,7 @@
.PHONEY: release
release:
(cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true)
bash scripts/update-crate-version.sh
git push
all: release

4
dev.sh
View File

@@ -1,7 +1,7 @@
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
tmux new-session -d -s letterbox-dev tmux new-session -d -s letterbox-dev
tmux rename-window web tmux rename-window web
tmux send-keys "cd web; trunk serve -w ../shared -w ../notmuch -w ./" C-m tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
tmux new-window -n server tmux new-window -n server
tmux send-keys "cd server; cargo watch -c -x run -w ../shared -w ../notmuch -w ./" C-m tmux send-keys "cd server; cargo watch -c -x run -w ../.git -w ../shared -w ../notmuch -w ./" C-m
tmux attach -d -t letterbox-dev tmux attach -d -t letterbox-dev

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "notmuch" name = "notmuch"
version = "0.1.0" version = "0.0.115"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -10,6 +10,7 @@ log = "0.4.14"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["unbounded_depth"] } serde_json = { version = "1.0", features = ["unbounded_depth"] }
thiserror = "1.0.30" thiserror = "1.0.30"
tracing = "0.1.41"
[dev-dependencies] [dev-dependencies]
itertools = "0.10.1" itertools = "0.10.1"

View File

@@ -213,8 +213,9 @@ use std::{
process::Command, process::Command,
}; };
use log::info; use log::{error, info};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument;
/// # Number of seconds since the Epoch /// # Number of seconds since the Epoch
pub type UnixTime = isize; pub type UnixTime = isize;
@@ -465,6 +466,8 @@ pub struct Notmuch {
config_path: Option<PathBuf>, config_path: Option<PathBuf>,
} }
// TODO: rewrite to use tokio::process::Command and make everything async to see if that helps with
// concurrency being more parallel.
impl Notmuch { impl Notmuch {
pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch { pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch {
Notmuch { Notmuch {
@@ -472,6 +475,7 @@ impl Notmuch {
} }
} }
#[instrument(skip_all)]
pub fn new(&self) -> Result<Vec<u8>, NotmuchError> { pub fn new(&self) -> Result<Vec<u8>, NotmuchError> {
self.run_notmuch(["new"]) self.run_notmuch(["new"])
} }
@@ -480,6 +484,7 @@ impl Notmuch {
self.run_notmuch(std::iter::empty::<&str>()) self.run_notmuch(std::iter::empty::<&str>())
} }
#[instrument(skip_all, fields(query=query))]
pub fn tags_for_query(&self, query: &str) -> Result<Vec<String>, NotmuchError> { pub fn tags_for_query(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
let res = self.run_notmuch(["search", "--format=json", "--output=tags", query])?; let res = self.run_notmuch(["search", "--format=json", "--output=tags", query])?;
Ok(serde_json::from_slice(&res)?) Ok(serde_json::from_slice(&res)?)
@@ -489,16 +494,19 @@ impl Notmuch {
self.tags_for_query("*") self.tags_for_query("*")
} }
#[instrument(skip_all, fields(tag=tag,search_term=search_term))]
pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> { pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
self.run_notmuch(["tag", &format!("+{tag}"), search_term])?; self.run_notmuch(["tag", &format!("+{tag}"), search_term])?;
Ok(()) Ok(())
} }
#[instrument(skip_all, fields(tag=tag,search_term=search_term))]
pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> { pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
self.run_notmuch(["tag", &format!("-{tag}"), search_term])?; self.run_notmuch(["tag", &format!("-{tag}"), search_term])?;
Ok(()) Ok(())
} }
#[instrument(skip_all, fields(query=query,offset=offset,limit=limit))]
pub fn search( pub fn search(
&self, &self,
query: &str, query: &str,
@@ -507,23 +515,35 @@ impl Notmuch {
) -> Result<SearchSummary, NotmuchError> { ) -> Result<SearchSummary, NotmuchError> {
let query = if query.is_empty() { "*" } else { query }; let query = if query.is_empty() { "*" } else { query };
let res = self.run_notmuch([ let res = self
"search", .run_notmuch([
"--format=json", "search",
&format!("--offset={offset}"), "--format=json",
&format!("--limit={limit}"), &format!("--offset={offset}"),
query, &format!("--limit={limit}"),
])?; query,
Ok(serde_json::from_slice(&res)?) ])
.inspect_err(|err| error!("failed to notmuch search for query '{query}': {err}"))?;
Ok(serde_json::from_slice(&res).unwrap_or_else(|err| {
error!("failed to decode search result for query '{query}': {err}");
SearchSummary(Vec::new())
}))
} }
#[instrument(skip_all, fields(query=query))]
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> { pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
// NOTE: --output=threads is technically more correct, but really slow
// TODO: find a fast thread count path
// let res = self.run_notmuch(["count", "--output=threads", query])?;
let res = self.run_notmuch(["count", query])?; let res = self.run_notmuch(["count", query])?;
// Strip '\n' from res. // Strip '\n' from res.
let s = std::str::from_utf8(&res[..res.len() - 1])?; let s = std::str::from_utf8(&res)?.trim();
Ok(s.parse()?) Ok(s.parse()
.inspect_err(|err| error!("failed to parse count for query '{query}': {err}"))
.unwrap_or(0))
} }
#[instrument(skip_all, fields(query=query))]
pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> { pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> {
let slice = self.run_notmuch([ let slice = self.run_notmuch([
"show", "show",
@@ -542,6 +562,7 @@ impl Notmuch {
Ok(val) Ok(val)
} }
#[instrument(skip_all, fields(query=query,part=part))]
pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> { pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> {
let slice = self.run_notmuch([ let slice = self.run_notmuch([
"show", "show",
@@ -561,20 +582,24 @@ impl Notmuch {
Ok(val) Ok(val)
} }
#[instrument(skip_all, fields(id=id))]
pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> { pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> {
self.show_original_part(id, 0) self.show_original_part(id, 0)
} }
#[instrument(skip_all, fields(id=id,part=part))]
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> { pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?; let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
Ok(res) Ok(res)
} }
#[instrument(skip_all, fields(query=query))]
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> { pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?; let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
Ok(serde_json::from_slice(&res)?) Ok(serde_json::from_slice(&res)?)
} }
#[instrument(skip_all, fields(query=query))]
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> { pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?; let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
Ok(serde_json::from_slice(&res)?) Ok(serde_json::from_slice(&res)?)

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "procmail2notmuch" name = "procmail2notmuch"
version = "0.1.0" version = "0.0.115"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -0,0 +1,5 @@
#!env bash
set -e -x
cargo-set-version set-version --bump patch
VERSION="$(awk -F\" '/^version/ {print $2}' server/Cargo.toml)"
git commit Cargo.lock */Cargo.toml -m "Bumping version to ${VERSION:?}"

View File

@@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n title,\n name,\n homepage\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n uid = $1\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "date",
"type_info": "Timestamp"
},
{
"ordinal": 1,
"name": "is_read",
"type_info": "Bool"
},
{
"ordinal": 2,
"name": "link",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "summary",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "homepage",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true,
true,
false,
true,
true,
true,
true,
true
]
},
"hash": "113694cd5bf0d2582ff3a635776daa608fe88abe1185958c4215646c92335afb"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\nSELECT\n url\nFROM email_photo ep\nJOIN email_address ea\nON ep.id = ea.email_photo_id\nWHERE\n address = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "url",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "126e16a4675e8d79f330b235f9e1b8614ab1e1526e4e69691c5ebc70d54a42ef"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n site,\n name,\n count (\n NOT is_read\n OR NULL\n ) unread\nFROM\n post AS p\n JOIN feed AS f ON p.site = f.slug --\n -- TODO: figure this out to make the query faster when only looking for unread\n --WHERE\n -- (\n -- NOT $1\n -- OR NOT is_read\n -- )\nGROUP BY\n 1,\n 2\nORDER BY\n site\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "unread",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
true,
true,
null
]
},
"hash": "2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n site,\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $5 :: text IS NULL\n OR to_tsvector('english', summary) @@ websearch_to_tsquery('english', $5)\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "site",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "date",
"type_info": "Timestamp"
},
{
"ordinal": 2,
"name": "is_read",
"type_info": "Bool"
},
{
"ordinal": 3,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "uid",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "name",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text",
"Bool",
"Int8",
"Int8",
"Text"
]
},
"nullable": [
true,
true,
true,
true,
false,
true
]
},
"hash": "99114d4840067acb12d9a41ef036bdd8ecf87cfdde8ce4985821485816af5213"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n post\nSET\n is_read = $1\nWHERE\n uid = $2\n",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\nSELECT id\nFROM feed\nWHERE slug = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "dabd12987369cb273c0191d46645c376439d246d5a697340574c6afdac93d2cc"
}

View File

@@ -0,0 +1,24 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n COUNT(*) count\nFROM\n post\nWHERE\n (\n $1 :: text IS NULL\n OR site = $1\n )\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $3 :: text IS NULL\n OR to_tsvector('english', summary) @@ websearch_to_tsquery('english', $3)\n )\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text",
"Bool",
"Text"
]
},
"nullable": [
null
]
},
"hash": "e118f546c628661023aa25803bb29affb6cd25eca63246e5ace5b90a845d76ac"
}

View File

@@ -0,0 +1,24 @@
{
"db_name": "PostgreSQL",
"query": "\nINSERT INTO feed ( name, slug, url, homepage, selector )\nVALUES ( $1, $2, $3, '', '' )\nRETURNING id\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Text",
"Text",
"Text"
]
},
"nullable": [
false
]
},
"hash": "e2a448aaf4fe92fc1deda10bf844f6b9225d35758cba7c9f337c1a730aee41bd"
}

View File

@@ -1,33 +1,52 @@
[package] [package]
name = "server" name = "letterbox-server"
version = "0.1.0" version = "0.0.115"
edition = "2021" edition = "2021"
default-run = "server" default-run = "letterbox-server"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
notmuch = { path = "../notmuch" }
shared = { path = "../shared" }
serde_json = "1.0.87"
thiserror = "1.0.37"
serde = { version = "1.0.147", features = ["derive"] }
log = "0.4.17"
tokio = "1.26.0"
glog = "0.1.0"
urlencoding = "2.1.3"
async-graphql = { version = "6.0.11", features = ["log"] }
async-graphql-rocket = "6.0.11"
rocket_cors = "0.6.0"
memmap = "0.7.0"
mailparse = "0.15.0"
ammonia = "3.3.0" ammonia = "3.3.0"
lol_html = "1.2.0"
css-inline = "0.13.0"
anyhow = "1.0.79" anyhow = "1.0.79"
maplit = "1.0.2" async-graphql = { version = "7", features = ["log"] }
async-graphql-rocket = "7"
async-trait = "0.1.81"
build-info = "0.0.38"
cacher = { version = "0.1.0", registry = "xinu" }
chrono = "0.4.39"
clap = { version = "4.5.23", features = ["derive"] }
css-inline = "0.13.0"
html-escape = "0.2.13"
linkify = "0.10.0" linkify = "0.10.0"
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] } log = "0.4.17"
lol_html = "1.2.0"
mailparse = "0.15.0"
maplit = "1.0.2"
memmap = "0.7.0"
notmuch = { path = "../notmuch" }
opentelemetry = "0.27.1"
reqwest = { version = "0.12.7", features = ["blocking"] }
rocket = { version = "0.5.0-rc.2", features = ["json"] }
rocket_cors = "0.6.0"
scraper = "0.20.0"
serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.87"
shared = { path = "../shared" }
sqlx = { version = "0.8.2", features = ["postgres", "runtime-tokio", "time"] }
tantivy = { version = "0.22.0", optional = true }
thiserror = "1.0.37"
tokio = "1.26.0"
tracing = "0.1.41"
url = "2.5.2" url = "2.5.2"
urlencoding = "2.1.3"
#xtracing = { path = "../../xtracing" }
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
xtracing = { version = "0.1.0", registry = "xinu" }
[build-dependencies]
build-info-build = "0.0.38"
[features]
#default = [ "tantivy" ]
tantivy = ["dep:tantivy"]

View File

@@ -1,6 +1,8 @@
[release] [release]
address = "0.0.0.0" address = "0.0.0.0"
port = 9345 port = 9345
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
newsreader_tantivy_db_path = "../target/database/newsreader"
[debug] [debug]
address = "0.0.0.0" address = "0.0.0.0"
@@ -8,3 +10,5 @@ port = 9345
# Uncomment to make it production like. # Uncomment to make it production like.
#log_level = "critical" #log_level = "critical"
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader" newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
newsreader_tantivy_db_path = "../target/database/newsreader"
slurp_cache_path = "/net/nasx/x/letterbox/slurp"

5
server/build.rs Normal file
View File

@@ -0,0 +1,5 @@
fn main() {
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
// and `build_info::format!` in the main program.
build_info_build::build_script();
}

View File

@@ -0,0 +1,3 @@
DROP INDEX IF EXISTS post_summary_idx;
DROP INDEX IF EXISTS post_site_idx;
DROP INDEX IF EXISTS post_title_idx;

View File

@@ -0,0 +1,3 @@
CREATE INDEX post_summary_idx ON post USING GIN (to_tsvector('english', summary));
CREATE INDEX post_site_idx ON post USING GIN (to_tsvector('english', site));
CREATE INDEX post_title_idx ON post USING GIN (to_tsvector('english', title));

View File

@@ -0,0 +1,24 @@
BEGIN;
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
DROP TABLE IF EXISTS public."Email";
DROP TABLE IF EXISTS public."EmailDisplayName";
DROP TABLE IF EXISTS public."Message";
DROP TABLE IF EXISTS public."Header";
DROP TABLE IF EXISTS public."File";
DROP TABLE IF EXISTS public."Avatar";
DROP TABLE IF EXISTS public."Body";
DROP TABLE IF EXISTS public."Thread";
DROP TABLE IF EXISTS public."Tag";
END;

View File

@@ -0,0 +1,174 @@
-- This script was generated by the ERD tool in pgAdmin 4.
-- Please log an issue at https://github.com/pgadmin-org/pgadmin4/issues/new/choose if you find any bugs, including reproduction steps.
BEGIN;
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
CREATE TABLE IF NOT EXISTS public."Email"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
address text NOT NULL,
avatar_id integer,
PRIMARY KEY (id),
CONSTRAINT avatar_id UNIQUE (avatar_id)
);
CREATE TABLE IF NOT EXISTS public."EmailDisplayName"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
email_id integer NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."Message"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
subject text,
"from" integer,
"to" integer,
cc integer,
header_id integer,
hash text NOT NULL,
file_id integer NOT NULL,
date timestamp with time zone NOT NULL,
unread boolean NOT NULL,
body_id integer NOT NULL,
thread_id integer NOT NULL,
tag_id integer,
CONSTRAINT body_id UNIQUE (body_id)
);
CREATE TABLE IF NOT EXISTS public."Header"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
key text NOT NULL,
value text NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."File"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
path text NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."Avatar"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
url text NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."Body"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
text text NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."Thread"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS public."Tag"
(
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
name text NOT NULL,
display text,
fg_color integer,
bg_color integer,
PRIMARY KEY (id)
);
ALTER TABLE IF EXISTS public."Email"
ADD CONSTRAINT email_avatar_fkey FOREIGN KEY (avatar_id)
REFERENCES public."Avatar" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."EmailDisplayName"
ADD CONSTRAINT email_id_fk FOREIGN KEY (email_id)
REFERENCES public."Email" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_to_fkey FOREIGN KEY ("to")
REFERENCES public."Email" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_cc_fkey FOREIGN KEY (cc)
REFERENCES public."Email" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_from_fkey FOREIGN KEY ("from")
REFERENCES public."Email" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_header_fkey FOREIGN KEY (header_id)
REFERENCES public."Header" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_file_fkey FOREIGN KEY (file_id)
REFERENCES public."File" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_body_id_fkey FOREIGN KEY (body_id)
REFERENCES public."Body" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_thread_fkey FOREIGN KEY (thread_id)
REFERENCES public."Thread" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
ALTER TABLE IF EXISTS public."Message"
ADD CONSTRAINT message_tag_fkey FOREIGN KEY (tag_id)
REFERENCES public."Tag" (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
NOT VALID;
END;

View File

@@ -0,0 +1,3 @@
-- Add down migration script here
ALTER TABLE
post DROP CONSTRAINT post_link_key;

View File

@@ -0,0 +1,28 @@
WITH dupes AS (
SELECT
uid,
link,
Row_number() over(
PARTITION by link
ORDER BY
link
) AS RowNumber
FROM
post
)
DELETE FROM
post
WHERE
uid IN (
SELECT
uid
FROM
dupes
WHERE
RowNumber > 1
);
ALTER TABLE
post
ADD
UNIQUE (link);

View File

@@ -0,0 +1,7 @@
ALTER TABLE
post
ALTER COLUMN
link DROP NOT NULL;
ALTER TABLE
post DROP CONSTRAINT link;

View File

@@ -0,0 +1,17 @@
DELETE FROM
post
WHERE
link IS NULL
OR link = '';
ALTER TABLE
post
ALTER COLUMN
link
SET
NOT NULL;
ALTER TABLE
post
ADD
CONSTRAINT link CHECK (link <> '');

View File

@@ -0,0 +1,3 @@
DROP TABLE IF EXISTS email_address;
DROP TABLE IF EXISTS photo;
DROP TABLE IF EXISTS google_person;

View File

@@ -0,0 +1,19 @@
-- Add up migration script here
CREATE TABLE IF NOT EXISTS google_person (
id SERIAL PRIMARY KEY,
resource_name TEXT NOT NULL UNIQUE,
display_name TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS email_photo (
id SERIAL PRIMARY KEY,
google_person_id INTEGER REFERENCES google_person (id) UNIQUE,
url TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS email_address (
id SERIAL PRIMARY KEY,
address TEXT NOT NULL UNIQUE,
email_photo_id INTEGER REFERENCES email_photo (id),
google_person_id INTEGER REFERENCES google_person (id)
);

View File

@@ -0,0 +1,5 @@
-- Add down migration script here
DROP INDEX post_summary_idx;
CREATE INDEX post_summary_idx ON post USING gin (
to_tsvector('english', summary)
);

View File

@@ -0,0 +1,11 @@
-- Something like this around summary in the idx w/ tsvector
DROP INDEX post_summary_idx;
CREATE INDEX post_summary_idx ON post USING gin (to_tsvector(
'english',
regexp_replace(
regexp_replace(summary, '<[^>]+>', ' ', 'g'),
'\s+',
' ',
'g'
)
));

14
server/sql/all-posts.sql Normal file
View File

@@ -0,0 +1,14 @@
SELECT
site,
title,
summary,
link,
date,
is_read,
uid,
p.id id
FROM
post AS p
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
ORDER BY
date DESC;

6
server/sql/all-uids.sql Normal file
View File

@@ -0,0 +1,6 @@
SELECT
uid
FROM
post AS p
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
;

View File

@@ -3,8 +3,15 @@ SELECT
FROM FROM
post post
WHERE WHERE
site = $1 (
$1 :: text IS NULL
OR site = $1
)
AND ( AND (
NOT $2 NOT $2
OR NOT is_read OR NOT is_read
) )
AND (
$3 :: text IS NULL
OR to_tsvector('english', summary) @@ websearch_to_tsquery('english', $3)
)

View File

@@ -0,0 +1,14 @@
SELECT
site AS "site!",
title AS "title!",
summary AS "summary!",
link AS "link!",
date AS "date!",
is_read AS "is_read!",
uid AS "uid!",
p.id id
FROM
post p
JOIN feed f ON p.site = f.slug
WHERE
uid = ANY ($1);

View File

@@ -0,0 +1,6 @@
UPDATE
post
SET
is_read = $1
WHERE
uid = $2

View File

@@ -0,0 +1,14 @@
SELECT
site,
date,
is_read,
title,
uid,
name
FROM
post p
JOIN feed f ON p.site = f.slug
WHERE
uid = ANY ($1)
ORDER BY
date DESC;

View File

@@ -1,4 +1,5 @@
SELECT SELECT
site,
date, date,
is_read, is_read,
title, title,
@@ -8,11 +9,15 @@ FROM
post p post p
JOIN feed f ON p.site = f.slug JOIN feed f ON p.site = f.slug
WHERE WHERE
site = $1 ($1::text IS NULL OR site = $1)
AND ( AND (
NOT $2 NOT $2
OR NOT is_read OR NOT is_read
) )
AND (
$5 :: text IS NULL
OR to_tsvector('english', summary) @@ websearch_to_tsquery('english', $5)
)
ORDER BY ORDER BY
date DESC, date DESC,
title OFFSET $3 title OFFSET $3

View File

@@ -1,6 +1,6 @@
use std::fs; use std::fs;
use server::sanitize_html; use letterbox_server::sanitize_html;
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
let mut args = std::env::args().skip(1); let mut args = std::env::args().skip(1);
@@ -9,7 +9,7 @@ fn main() -> anyhow::Result<()> {
println!("Sanitizing {src} into {dst}"); println!("Sanitizing {src} into {dst}");
let bytes = fs::read(src)?; let bytes = fs::read(src)?;
let html = String::from_utf8_lossy(&bytes); let html = String::from_utf8_lossy(&bytes);
let html = sanitize_html(&html, "")?; let html = sanitize_html(&html, "", &None)?;
fs::write(dst, html)?; fs::write(dst, html)?;
Ok(()) Ok(())

View File

@@ -0,0 +1,22 @@
use clap::Parser;
use letterbox_server::mail::read_mail_to_db;
use sqlx::postgres::PgPool;
/// Add certain emails as posts in newsfeed app.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// DB URL, something like postgres://newsreader@nixos-07.h.xinu.tv/newsreader
#[arg(short, long)]
db_url: String,
/// path to parse
path: String,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
let args = Args::parse();
let pool = PgPool::connect(&args.db_url).await?;
read_mail_to_db(&pool, &args.path).await?;
Ok(())
}

View File

@@ -1,10 +1,20 @@
// Rocket generates a lot of warnings for handlers
// TODO: figure out why
#![allow(unreachable_patterns)]
#[macro_use] #[macro_use]
extern crate rocket; extern crate rocket;
use std::{error::Error, io::Cursor, str::FromStr}; use std::{error::Error, io::Cursor, str::FromStr};
use async_graphql::{http::GraphiQLSource, EmptySubscription, Schema}; use async_graphql::{extensions, http::GraphiQLSource, EmptySubscription, Schema};
use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse}; use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
use glog::Flags; #[cfg(feature = "tantivy")]
use letterbox_server::tantivy::TantivyConnection;
use letterbox_server::{
config::Config,
error::ServerError,
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
nm::{attachment_bytes, cid_attachment_bytes},
};
use notmuch::{Notmuch, NotmuchError, ThreadSet}; use notmuch::{Notmuch, NotmuchError, ThreadSet};
use rocket::{ use rocket::{
fairing::AdHoc, fairing::AdHoc,
@@ -15,24 +25,8 @@ use rocket::{
Response, State, Response, State,
}; };
use rocket_cors::{AllowedHeaders, AllowedOrigins}; use rocket_cors::{AllowedHeaders, AllowedOrigins};
use serde::Deserialize;
use server::{
error::ServerError,
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
nm::{attachment_bytes, cid_attachment_bytes},
};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
#[derive(Deserialize)]
struct Config {
newsreader_database_url: String,
}
#[get("/refresh")]
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
}
#[get("/show/<query>/pretty")] #[get("/show/<query>/pretty")]
async fn show_pretty( async fn show_pretty(
nm: &State<Notmuch>, nm: &State<Notmuch>,
@@ -164,7 +158,7 @@ async fn original(
#[rocket::get("/")] #[rocket::get("/")]
fn graphiql() -> content::RawHtml<String> { fn graphiql() -> content::RawHtml<String> {
content::RawHtml(GraphiQLSource::build().endpoint("/graphql").finish()) content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
} }
#[rocket::get("/graphql?<query..>")] #[rocket::get("/graphql?<query..>")]
@@ -182,14 +176,9 @@ async fn graphql_request(
#[rocket::main] #[rocket::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
glog::new() let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
.init(Flags { build_info::build_info!(fn bi);
colorlogtostderr: true, info!("Build Info: {}", shared::build_version(bi));
//alsologtostderr: true, // use logtostderr to only write to stderr and not to files
logtostderr: true,
..Default::default()
})
.unwrap();
let allowed_origins = AllowedOrigins::all(); let allowed_origins = AllowedOrigins::all();
let cors = rocket_cors::CorsOptions { let cors = rocket_cors::CorsOptions {
allowed_origins, allowed_origins,
@@ -208,7 +197,6 @@ async fn main() -> Result<(), Box<dyn Error>> {
shared::urls::MOUNT_POINT, shared::urls::MOUNT_POINT,
routes![ routes![
original, original,
refresh,
show_pretty, show_pretty,
show, show,
graphql_query, graphql_query,
@@ -223,12 +211,24 @@ async fn main() -> Result<(), Box<dyn Error>> {
.attach(AdHoc::config::<Config>()); .attach(AdHoc::config::<Config>());
let config: Config = rkt.figment().extract()?; let config: Config = rkt.figment().extract()?;
if !std::fs::exists(&config.slurp_cache_path)? {
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
std::fs::create_dir_all(&config.slurp_cache_path)?;
}
let pool = PgPool::connect(&config.newsreader_database_url).await?; let pool = PgPool::connect(&config.newsreader_database_url).await?;
sqlx::migrate!("./migrations").run(&pool).await?;
#[cfg(feature = "tantivy")]
let tantivy_conn = TantivyConnection::new(&config.newsreader_tantivy_db_path)?;
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription) let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
.data(Notmuch::default()) .data(Notmuch::default())
.data(pool.clone()) .data(config)
.extension(async_graphql::extensions::Logger) .data(pool.clone());
.finish();
#[cfg(feature = "tantivy")]
let schema = schema.data(tantivy_conn);
let schema = schema.extension(extensions::Logger).finish();
let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default()); let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default());
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config")) //.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))

File diff suppressed because it is too large Load Diff

7
server/src/config.rs Normal file
View File

@@ -0,0 +1,7 @@
use serde::Deserialize;
#[derive(Deserialize)]
pub struct Config {
pub newsreader_database_url: String,
pub newsreader_tantivy_db_path: String,
pub slurp_cache_path: String,
}

8
server/src/custom.css Normal file
View File

@@ -0,0 +1,8 @@
pre {
background-color: var(--color-bg);
color: var(--color-text);
}
code {
background-color: var(--color-bg-secondary);
}

View File

@@ -1,9 +1,11 @@
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error}; use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
use mailparse::MailParseError; use mailparse::MailParseError;
#[cfg(feature = "tantivy")]
use tantivy::{query::QueryParserError, TantivyError};
use thiserror::Error; use thiserror::Error;
use crate::SanitizeError; use crate::TransformError;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum ServerError { pub enum ServerError {
@@ -19,8 +21,8 @@ pub enum ServerError {
PartNotFound, PartNotFound,
#[error("sqlx error: {0}")] #[error("sqlx error: {0}")]
SQLXError(#[from] sqlx::Error), SQLXError(#[from] sqlx::Error),
#[error("html sanitize error: {0}")] #[error("html transform error: {0}")]
SanitizeError(#[from] SanitizeError), TransformError(#[from] TransformError),
#[error("UTF8 error: {0}")] #[error("UTF8 error: {0}")]
Utf8Error(#[from] Utf8Error), Utf8Error(#[from] Utf8Error),
#[error("FromUTF8 error: {0}")] #[error("FromUTF8 error: {0}")]
@@ -29,6 +31,12 @@ pub enum ServerError {
StringError(String), StringError(String),
#[error("invalid url: {0}")] #[error("invalid url: {0}")]
UrlParseError(#[from] url::ParseError), UrlParseError(#[from] url::ParseError),
#[cfg(feature = "tantivy")]
#[error("tantivy error: {0}")]
TantivyError(#[from] TantivyError),
#[cfg(feature = "tantivy")]
#[error("tantivy query parse error: {0}")]
QueryParseError(#[from] QueryParserError),
#[error("impossible: {0}")] #[error("impossible: {0}")]
InfaillibleError(#[from] Infallible), InfaillibleError(#[from] Infallible),
} }

View File

@@ -1,13 +1,20 @@
use std::{fmt, str::FromStr};
use async_graphql::{ use async_graphql::{
connection::{Connection}, connection::{self, Connection, Edge, OpaqueCursor},
Context, EmptySubscription, Enum, Error, FieldResult, Object, Schema, SimpleObject, Union, Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema,
SimpleObject, Union,
}; };
use log::info; use log::info;
use notmuch::Notmuch; use notmuch::Notmuch;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use tokio::join;
use tracing::instrument;
use crate::{newsreader, nm}; #[cfg(feature = "tantivy")]
use crate::tantivy::TantivyConnection;
use crate::{config::Config, newsreader, nm, Query};
/// # Number of seconds since the Epoch /// # Number of seconds since the Epoch
pub type UnixTime = isize; pub type UnixTime = isize;
@@ -15,6 +22,26 @@ pub type UnixTime = isize;
/// # Thread ID, sans "thread:" /// # Thread ID, sans "thread:"
pub type ThreadId = String; pub type ThreadId = String;
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
pub enum Corpus {
Notmuch,
Newsreader,
Tantivy,
}
impl FromStr for Corpus {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"notmuch" => Corpus::Notmuch,
"newsreader" => Corpus::Newsreader,
"tantivy" => Corpus::Tantivy,
s => return Err(format!("unknown corpus: '{s}'")),
})
}
}
// TODO: add is_read field and remove all use of 'tag:unread'
#[derive(Debug, SimpleObject)] #[derive(Debug, SimpleObject)]
pub struct ThreadSummary { pub struct ThreadSummary {
pub thread: ThreadId, pub thread: ThreadId,
@@ -29,10 +56,29 @@ pub struct ThreadSummary {
pub authors: String, pub authors: String,
pub subject: String, pub subject: String,
pub tags: Vec<String>, pub tags: Vec<String>,
pub corpus: Corpus,
}
#[derive(Debug, Union)]
pub enum Thread {
Email(EmailThread),
News(NewsPost),
} }
#[derive(Debug, SimpleObject)] #[derive(Debug, SimpleObject)]
pub struct Thread { pub struct NewsPost {
pub thread_id: String,
pub is_read: bool,
pub slug: String,
pub site: String,
pub title: String,
pub body: String,
pub url: String,
pub timestamp: i64,
}
#[derive(Debug, SimpleObject)]
pub struct EmailThread {
pub thread_id: String, pub thread_id: String,
pub subject: String, pub subject: String,
pub messages: Vec<Message>, pub messages: Vec<Message>,
@@ -191,31 +237,78 @@ impl Body {
pub struct Email { pub struct Email {
pub name: Option<String>, pub name: Option<String>,
pub addr: Option<String>, pub addr: Option<String>,
pub photo_url: Option<String>,
}
impl fmt::Display for Email {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match (&self.name, &self.addr) {
(Some(name), Some(addr)) => write!(f, "{name} <{addr}>")?,
(Some(name), None) => write!(f, "{name}")?,
(None, Some(addr)) => write!(f, "{addr}")?,
(None, None) => write!(f, "<UNKNOWN>")?,
}
Ok(())
}
} }
#[derive(SimpleObject)] #[derive(SimpleObject)]
pub(crate) struct Tag { pub struct Tag {
pub name: String, pub name: String,
pub fg_color: String, pub fg_color: String,
pub bg_color: String, pub bg_color: String,
pub unread: usize, pub unread: usize,
} }
#[derive(Serialize, Deserialize, Debug, InputObject)]
struct SearchCursor {
newsreader_offset: i32,
notmuch_offset: i32,
#[cfg(feature = "tantivy")]
tantivy_offset: i32,
}
fn request_id() -> String {
let now = std::time::SystemTime::now();
let nanos = now
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap_or_default()
.as_nanos();
format!("{nanos:x}")
}
pub struct QueryRoot; pub struct QueryRoot;
#[Object] #[Object]
impl QueryRoot { impl QueryRoot {
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
build_info::build_info!(fn bi);
Ok(shared::build_version(bi))
}
#[instrument(skip_all, fields(query=query))]
#[instrument(skip_all, fields(query=query, request_id=request_id()))]
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> { async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>(); let pool = ctx.data_unchecked::<PgPool>();
#[cfg(feature = "tantivy")]
let tantivy = ctx.data_unchecked::<TantivyConnection>();
// TODO: make this search both copra and merge results let newsreader_query: Query = query.parse()?;
if newsreader::is_newsreader_search(&query) {
Ok(newsreader::count(pool, &query).await?) let newsreader_count = newsreader::count(pool, &newsreader_query).await?;
} else { let notmuch_count = nm::count(nm, &newsreader_query).await?;
Ok(nm::count(nm, &query).await?) #[cfg(feature = "tantivy")]
} let tantivy_count = tantivy.count(&newsreader_query).await?;
#[cfg(not(feature = "tantivy"))]
let tantivy_count = 0;
let total = newsreader_count + notmuch_count + tantivy_count;
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
Ok(total)
} }
// TODO: this function doesn't get parallelism, possibly because notmuch is sync and blocks,
// rewrite that with tokio::process:Command
#[instrument(skip_all, fields(query=query, request_id=request_id()))]
async fn search<'ctx>( async fn search<'ctx>(
&self, &self,
ctx: &Context<'ctx>, ctx: &Context<'ctx>,
@@ -224,19 +317,156 @@ impl QueryRoot {
first: Option<i32>, first: Option<i32>,
last: Option<i32>, last: Option<i32>,
query: String, query: String,
) -> Result<Connection<usize, ThreadSummary>, Error> { ) -> Result<Connection<OpaqueCursor<SearchCursor>, ThreadSummary>, Error> {
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})"); info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",);
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>(); let pool = ctx.data_unchecked::<PgPool>();
#[cfg(feature = "tantivy")]
let tantivy = ctx.data_unchecked::<TantivyConnection>();
// TODO: make this search both copra and merge results Ok(connection::query(
if newsreader::is_newsreader_search(&query) { after,
Ok(newsreader::search(pool, after, before, first, last, query).await?) before,
} else { first,
Ok(nm::search(nm, after, before, first, last, query).await?) last,
} |after: Option<OpaqueCursor<SearchCursor>>,
before: Option<OpaqueCursor<SearchCursor>>,
first: Option<usize>,
last: Option<usize>| async move {
info!(
"search(after {:?} before {:?} first {first:?} last {last:?} query: {query:?})",
after.as_ref().map(|v| &v.0),
before.as_ref().map(|v| &v.0)
);
let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset);
let notmuch_after = after.as_ref().map(|sc| sc.notmuch_offset);
#[cfg(feature = "tantivy")]
let tantivy_after = after.as_ref().map(|sc| sc.tantivy_offset);
let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset);
let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset);
#[cfg(feature = "tantivy")]
let tantivy_before = before.as_ref().map(|sc| sc.tantivy_offset);
let first = first.map(|v| v as i32);
let last = last.map(|v| v as i32);
let query: Query = query.parse()?;
info!("newsreader_query {query:?}");
let newsreader_fut = newsreader_search(
pool,
newsreader_after,
newsreader_before,
first,
last,
&query,
);
let notmuch_fut =
notmuch_search(nm, notmuch_after, notmuch_before, first, last, &query);
#[cfg(feature = "tantivy")]
let tantivy_fut = tantivy_search(
tantivy,
pool,
tantivy_after,
tantivy_before,
first,
last,
&query,
);
#[cfg(not(feature = "tantivy"))]
let tantivy_fut =
async { Ok::<Vec<ThreadSummaryCursor>, async_graphql::Error>(Vec::new()) };
let (newsreader_results, notmuch_results, tantivy_results) =
join!(newsreader_fut, notmuch_fut, tantivy_fut);
let newsreader_results = newsreader_results?;
let notmuch_results = notmuch_results?;
let tantivy_results = tantivy_results?;
info!(
"newsreader_results ({}) notmuch_results ({}) tantivy_results ({})",
newsreader_results.len(),
notmuch_results.len(),
tantivy_results.len()
);
let mut results: Vec<_> = newsreader_results
.into_iter()
.chain(notmuch_results)
.chain(tantivy_results)
.collect();
// The leading '-' is to reverse sort
results.sort_by_key(|item| match item {
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
#[cfg(feature = "tantivy")]
ThreadSummaryCursor::Tantivy(_, ts) => -ts.timestamp,
});
let mut has_next_page = before.is_some();
if let Some(first) = first {
let first = first as usize;
if results.len() > first {
has_next_page = true;
results.truncate(first);
}
}
let mut has_previous_page = after.is_some();
if let Some(last) = last {
let last = last as usize;
if results.len() > last {
has_previous_page = true;
results.truncate(last);
}
}
let mut connection = Connection::new(has_previous_page, has_next_page);
// Set starting offset as the value from cursor to preserve state if no results from a corpus survived the truncation
let mut newsreader_offset =
after.as_ref().map(|sc| sc.newsreader_offset).unwrap_or(0);
let mut notmuch_offset = after.as_ref().map(|sc| sc.notmuch_offset).unwrap_or(0);
#[cfg(feature = "tantivy")]
let tantivy_offset = after.as_ref().map(|sc| sc.tantivy_offset).unwrap_or(0);
info!(
"newsreader_offset ({}) notmuch_offset ({})",
newsreader_offset, notmuch_offset,
);
connection.edges.extend(results.into_iter().map(|item| {
let thread_summary;
match item {
ThreadSummaryCursor::Newsreader(offset, ts) => {
thread_summary = ts;
newsreader_offset = offset;
}
ThreadSummaryCursor::Notmuch(offset, ts) => {
thread_summary = ts;
notmuch_offset = offset;
}
#[cfg(feature = "tantivy")]
ThreadSummaryCursor::Tantivy(offset, ts) => {
thread_summary = ts;
tantivy_offset = offset;
}
}
let cur = OpaqueCursor(SearchCursor {
newsreader_offset,
notmuch_offset,
#[cfg(feature = "tantivy")]
tantivy_offset,
});
Edge::new(cur, thread_summary)
}));
Ok::<_, async_graphql::Error>(connection)
},
)
.await?)
} }
#[instrument(skip_all, fields(request_id=request_id()))]
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> { async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>(); let pool = ctx.data_unchecked::<PgPool>();
@@ -245,27 +475,84 @@ impl QueryRoot {
tags.append(&mut nm::tags(nm, needs_unread)?); tags.append(&mut nm::tags(nm, needs_unread)?);
Ok(tags) Ok(tags)
} }
#[instrument(skip_all, fields(thread_id=thread_id, request_id=request_id()))]
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> { async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
let pool = ctx.data_unchecked::<PgPool>(); let pool = ctx.data_unchecked::<PgPool>();
let config = ctx.data_unchecked::<Config>();
let debug_content_tree = ctx let debug_content_tree = ctx
.look_ahead() .look_ahead()
.field("messages") .field("messages")
.field("body") .field("body")
.field("contentTree") .field("contentTree")
.exists(); .exists();
// TODO: look at thread_id and conditionally load newsreader
if newsreader::is_newsreader_thread(&thread_id) { if newsreader::is_newsreader_thread(&thread_id) {
Ok(newsreader::thread(pool, thread_id).await?) Ok(newsreader::thread(config, pool, thread_id).await?)
} else { } else {
Ok(nm::thread(nm, thread_id, debug_content_tree).await?) Ok(nm::thread(nm, pool, thread_id, debug_content_tree).await?)
} }
} }
} }
#[derive(Debug)]
enum ThreadSummaryCursor {
Newsreader(i32, ThreadSummary),
Notmuch(i32, ThreadSummary),
#[cfg(feature = "tantivy")]
Tantivy(i32, ThreadSummary),
}
async fn newsreader_search(
pool: &PgPool,
after: Option<i32>,
before: Option<i32>,
first: Option<i32>,
last: Option<i32>,
query: &Query,
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
Ok(newsreader::search(pool, after, before, first, last, &query)
.await?
.into_iter()
.map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts))
.collect())
}
async fn notmuch_search(
nm: &Notmuch,
after: Option<i32>,
before: Option<i32>,
first: Option<i32>,
last: Option<i32>,
query: &Query,
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
Ok(nm::search(nm, after, before, first, last, &query)
.await?
.into_iter()
.map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts))
.collect())
}
#[cfg(feature = "tantivy")]
async fn tantivy_search(
tantivy: &TantivyConnection,
pool: &PgPool,
after: Option<i32>,
before: Option<i32>,
first: Option<i32>,
last: Option<i32>,
query: &Query,
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
Ok(tantivy
.search(pool, after, before, first, last, &query)
.await?
.into_iter()
.map(|(cur, ts)| ThreadSummaryCursor::Tantivy(cur, ts))
.collect())
}
pub struct Mutation; pub struct Mutation;
#[Object] #[Object]
impl Mutation { impl Mutation {
#[instrument(skip_all, fields(query=query, unread=unread, request_id=request_id()))]
async fn set_read_status<'ctx>( async fn set_read_status<'ctx>(
&self, &self,
ctx: &Context<'ctx>, ctx: &Context<'ctx>,
@@ -273,14 +560,18 @@ impl Mutation {
unread: bool, unread: bool,
) -> Result<bool, Error> { ) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>(); let nm = ctx.data_unchecked::<Notmuch>();
info!("set_read_status({unread})"); let pool = ctx.data_unchecked::<PgPool>();
if unread { #[cfg(feature = "tantivy")]
nm.tag_add("unread", &format!("{query}"))?; let tantivy = ctx.data_unchecked::<TantivyConnection>();
} else {
nm.tag_remove("unread", &format!("{query}"))?; let query: Query = query.parse()?;
} newsreader::set_read_status(pool, &query, unread).await?;
#[cfg(feature = "tantivy")]
tantivy.reindex_thread(pool, &query).await?;
nm::set_read_status(nm, &query, unread).await?;
Ok(true) Ok(true)
} }
#[instrument(skip_all, fields(query=query, tag=tag, request_id=request_id()))]
async fn tag_add<'ctx>( async fn tag_add<'ctx>(
&self, &self,
ctx: &Context<'ctx>, ctx: &Context<'ctx>,
@@ -292,6 +583,7 @@ impl Mutation {
nm.tag_add(&tag, &query)?; nm.tag_add(&tag, &query)?;
Ok(true) Ok(true)
} }
#[instrument(skip_all, fields(query=query, tag=tag, request_id=request_id()))]
async fn tag_remove<'ctx>( async fn tag_remove<'ctx>(
&self, &self,
ctx: &Context<'ctx>, ctx: &Context<'ctx>,
@@ -303,6 +595,30 @@ impl Mutation {
nm.tag_remove(&tag, &query)?; nm.tag_remove(&tag, &query)?;
Ok(true) Ok(true)
} }
/// Drop and recreate tantivy index. Warning this is slow
#[cfg(feature = "tantivy")]
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
let tantivy = ctx.data_unchecked::<TantivyConnection>();
let pool = ctx.data_unchecked::<PgPool>();
tantivy.drop_and_load_index()?;
tantivy.reindex_all(pool).await?;
Ok(true)
}
#[instrument(skip_all, fields(request_id=request_id()))]
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
let nm = ctx.data_unchecked::<Notmuch>();
info!("{}", String::from_utf8_lossy(&nm.new()?));
#[cfg(feature = "tantivy")]
{
let tantivy = ctx.data_unchecked::<TantivyConnection>();
let pool = ctx.data_unchecked::<PgPool>();
// TODO: parallelize
tantivy.refresh(pool).await?;
}
Ok(true)
}
} }
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>; pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;

View File

@@ -1,22 +1,357 @@
pub mod config;
pub mod error; pub mod error;
pub mod graphql; pub mod graphql;
pub mod mail;
pub mod newsreader; pub mod newsreader;
pub mod nm; pub mod nm;
#[cfg(feature = "tantivy")]
pub mod tantivy;
use std::{collections::HashMap, convert::Infallible, fmt, str::FromStr, sync::Arc};
use async_trait::async_trait;
use cacher::{Cacher, FilesystemCacher};
use css_inline::{CSSInliner, InlineError, InlineOptions}; use css_inline::{CSSInliner, InlineError, InlineOptions};
use linkify::{LinkFinder, LinkKind}; use linkify::{LinkFinder, LinkKind};
use log::error; use log::{error, info, warn};
use lol_html::{element, errors::RewritingError, rewrite_str, RewriteStrSettings}; use lol_html::{
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
RewriteStrSettings,
};
use maplit::{hashmap, hashset}; use maplit::{hashmap, hashset};
use scraper::{Html, Selector};
use sqlx::types::time::PrimitiveDateTime;
use thiserror::Error; use thiserror::Error;
use tokio::sync::Mutex;
use url::Url; use url::Url;
use crate::{
error::ServerError,
graphql::{Corpus, ThreadSummary},
newsreader::is_newsreader_thread,
nm::is_notmuch_thread_or_id,
};
const NEWSREADER_TAG_PREFIX: &'static str = "News/";
const NEWSREADER_THREAD_PREFIX: &'static str = "news:";
// TODO: figure out how to use Cow
#[async_trait]
trait Transformer: Send + Sync {
fn should_run(&self, _addr: &Option<Url>, _html: &str) -> bool {
true
}
// TODO: should html be something like `html_escape` uses:
// <S: ?Sized + AsRef<str>>(text: &S) -> Cow<str>
async fn transform(&self, addr: &Option<Url>, html: &str) -> Result<String, TransformError>;
}
// TODO: how would we make this more generic to allow good implementations of Transformer outside
// of this module?
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum SanitizeError { pub enum TransformError {
#[error("lol-html rewrite error")] #[error("lol-html rewrite error: {0}")]
RewritingError(#[from] RewritingError), RewritingError(#[from] RewritingError),
#[error("css inline error")] #[error("css inline error: {0}")]
InlineError(#[from] InlineError), InlineError(#[from] InlineError),
#[error("failed to fetch url error: {0}")]
ReqwestError(#[from] reqwest::Error),
#[error("failed to parse HTML: {0}")]
HtmlParsingError(String),
}
struct SanitizeHtml<'a> {
cid_prefix: &'a str,
base_url: &'a Option<Url>,
}
#[async_trait]
impl<'a> Transformer for SanitizeHtml<'a> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(sanitize_html(html, self.cid_prefix, self.base_url)?)
}
}
struct EscapeHtml;
#[async_trait]
impl Transformer for EscapeHtml {
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
html.contains("&")
}
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(html_escape::decode_html_entities(html).to_string())
}
}
struct StripHtml;
#[async_trait]
impl Transformer for StripHtml {
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
// Lame test
html.contains("<")
}
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
let mut text = String::new();
let element_content_handlers = vec![text!("*", |t| {
text += t.as_str();
Ok(())
})];
let _ = rewrite_str(
html,
RewriteStrSettings {
element_content_handlers,
..RewriteStrSettings::default()
},
)?;
Ok(text)
}
}
struct InlineRemoteStyle<'a> {
base_url: &'a Option<Url>,
}
#[async_trait]
impl<'a> Transformer for InlineRemoteStyle<'a> {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
let css = concat!(
"/* chrome-default.css */\n",
include_str!("chrome-default.css"),
"\n/* mvp.css */\n",
include_str!("mvp.css"),
"\n/* Xinu Specific overrides */\n",
include_str!("custom.css"),
);
let inline_opts = InlineOptions {
//inline_style_tags: true,
//keep_style_tags: false,
//keep_link_tags: true,
base_url: self.base_url.clone(),
//load_remote_stylesheets: true,
//preallocate_node_capacity: 32,
..InlineOptions::default()
};
//info!("HTML:\n{html}");
info!("base_url: {:#?}", self.base_url);
Ok(
match CSSInliner::options()
.base_url(self.base_url.clone())
.build()
.inline(&html)
{
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline remote CSS: {err}");
html.to_string()
}
},
)
}
}
struct InlineStyle;
#[async_trait]
impl Transformer for InlineStyle {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
let css = concat!(
"/* chrome-default.css */\n",
include_str!("chrome-default.css"),
"\n/* mvp.css */\n",
include_str!("mvp.css"),
"\n/* Xinu Specific overrides */\n",
include_str!("custom.css"),
);
let inline_opts = InlineOptions {
inline_style_tags: true,
keep_style_tags: false,
keep_link_tags: true,
base_url: None,
load_remote_stylesheets: true,
extra_css: Some(css.into()),
preallocate_node_capacity: 32,
..InlineOptions::default()
};
//info!("HTML:\n{html}");
Ok(match CSSInliner::new(inline_opts).inline(&html) {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline CSS: {err}");
html.to_string()
}
})
}
}
/// Process images will extract any alt or title tags on images and place them as labels below said
/// image. It also handles data-src and data-cfsrc attributes
struct FrameImages;
#[async_trait]
impl Transformer for FrameImages {
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
Ok(rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![
element!("img[data-src]", |el| {
let src = el
.get_attribute("data-src")
.unwrap_or("https://placehold.co/600x400".to_string());
el.set_attribute("src", &src)?;
Ok(())
}),
element!("img[data-cfsrc]", |el| {
let src = el
.get_attribute("data-cfsrc")
.unwrap_or("https://placehold.co/600x400".to_string());
el.set_attribute("src", &src)?;
Ok(())
}),
element!("img[alt], img[title]", |el| {
let src = el
.get_attribute("src")
.unwrap_or("https://placehold.co/600x400".to_string());
let alt = el.get_attribute("alt");
let title = el.get_attribute("title");
let mut frags =
vec!["<figure>".to_string(), format!(r#"<img src="{src}">"#)];
alt.map(|t| {
if !t.is_empty() {
frags.push(format!("<figcaption>Alt: {t}</figcaption>"))
}
});
title.map(|t| {
if !t.is_empty() {
frags.push(format!("<figcaption>Title: {t}</figcaption>"))
}
});
frags.push("</figure>".to_string());
el.replace(&frags.join("\n"), ContentType::Html);
Ok(())
}),
],
..RewriteStrSettings::default()
},
)?)
}
}
struct AddOutlink;
#[async_trait]
impl Transformer for AddOutlink {
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
if let Some(link) = link {
link.scheme().starts_with("http") && !html.contains(link.as_str())
} else {
false
}
}
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
if let Some(link) = link {
Ok(format!(
r#"
{html}
<div><a href="{}">View on site</a></div>
"#,
link
))
} else {
Ok(html.to_string())
}
}
}
struct SlurpContents {
cacher: Arc<Mutex<FilesystemCacher>>,
inline_css: bool,
site_selectors: HashMap<String, Vec<Selector>>,
}
impl SlurpContents {
fn get_selectors(&self, link: &Url) -> Option<&[Selector]> {
for (host, selector) in self.site_selectors.iter() {
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
return Some(&selector);
}
}
None
}
}
#[async_trait]
impl Transformer for SlurpContents {
fn should_run(&self, link: &Option<Url>, _: &str) -> bool {
if let Some(link) = link {
return self.get_selectors(link).is_some();
}
false
}
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
let Some(link) = link else {
return Ok(html.to_string());
};
let Some(selectors) = self.get_selectors(&link) else {
return Ok(html.to_string());
};
let cacher = self.cacher.lock().await;
let body = if let Some(body) = cacher.get(link.as_str()) {
info!("cache hit for {link}");
String::from_utf8_lossy(&body).to_string()
} else {
let body = reqwest::get(link.as_str()).await?.text().await?;
cacher.set(link.as_str(), body.as_bytes());
body
};
let body = Arc::new(body);
let base_url = Some(link.clone());
let body = if self.inline_css {
let inner_body = Arc::clone(&body);
let res = tokio::task::spawn_blocking(move || {
let res = CSSInliner::options()
.base_url(base_url)
.build()
.inline(&inner_body);
match res {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline remote CSS: {err}");
Arc::into_inner(inner_body).expect("failed to take body out of Arc")
}
}
})
.await;
match res {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to spawn inline remote CSS: {err}");
Arc::into_inner(body).expect("failed to take body out of Arc")
}
}
} else {
Arc::into_inner(body).expect("failed to take body out of Arc")
};
let doc = Html::parse_document(&body);
let mut results = Vec::new();
for selector in selectors {
for frag in doc.select(&selector) {
results.push(frag.html())
// TODO: figure out how to warn if there were no hits
//warn!("couldn't find '{:?}' in {}", selector, link);
}
}
Ok(results.join("<br>"))
}
} }
pub fn linkify_html(text: &str) -> String { pub fn linkify_html(text: &str) -> String {
@@ -50,31 +385,33 @@ pub fn linkify_html(text: &str) -> String {
pub fn sanitize_html( pub fn sanitize_html(
html: &str, html: &str,
cid_prefix: &str, cid_prefix: &str,
base_url: &Url, base_url: &Option<Url>,
) -> Result<String, SanitizeError> { ) -> Result<String, TransformError> {
let element_content_handlers = vec![ let inline_opts = InlineOptions {
inline_style_tags: true,
keep_style_tags: true,
keep_link_tags: false,
base_url: None,
load_remote_stylesheets: false,
extra_css: None,
preallocate_node_capacity: 32,
..InlineOptions::default()
};
let html = match CSSInliner::new(inline_opts).inline(&html) {
Ok(inlined_html) => inlined_html,
Err(err) => {
error!("failed to inline CSS: {err}");
html.to_string()
}
};
let mut element_content_handlers = vec![
// Open links in new tab // Open links in new tab
element!("a[href]", |el| { element!("a[href]", |el| {
el.set_attribute("target", "_blank").unwrap(); el.set_attribute("target", "_blank").unwrap();
Ok(()) Ok(())
}), }),
// Make links with relative URLs absolute
element!("a[href]", |el| {
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
el.set_attribute("href", &href.as_str()).unwrap();
}
Ok(())
}),
// Make images with relative srcs absolute
element!("img[src]", |el| {
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
el.set_attribute("src", &src.as_str()).unwrap();
}
Ok(())
}),
// Replace mixed part CID images with URL // Replace mixed part CID images with URL
element!("img[src]", |el| { element!("img[src]", |el| {
let src = el let src = el
@@ -95,28 +432,60 @@ pub fn sanitize_html(
el.set_attribute("src", &src)?; el.set_attribute("src", &src)?;
Ok(())
}),
// Add https to href with //<domain name>
element!("link[href]", |el| {
info!("found link[href] {el:?}");
let mut href = el.get_attribute("href").expect("href was required");
if href.starts_with("//") {
warn!("adding https to {href}");
href.insert_str(0, "https:");
}
el.set_attribute("href", &href)?;
Ok(())
}),
// Add https to src with //<domain name>
element!("style[src]", |el| {
let mut src = el.get_attribute("src").expect("src was required");
if src.starts_with("//") {
src.insert_str(0, "https:");
}
el.set_attribute("src", &src)?;
Ok(()) Ok(())
}), }),
]; ];
if let Some(base_url) = base_url {
element_content_handlers.extend(vec![
// Make links with relative URLs absolute
element!("a[href]", |el| {
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
el.set_attribute("href", &href.as_str()).unwrap();
}
let inline_opts = InlineOptions { Ok(())
inline_style_tags: true, }),
keep_style_tags: false, // Make images with relative srcs absolute
keep_link_tags: false, element!("img[src]", |el| {
base_url: None, if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
load_remote_stylesheets: false, el.set_attribute("src", &src.as_str()).unwrap();
extra_css: None, }
preallocate_node_capacity: 32,
..InlineOptions::default()
};
let inlined_html = match CSSInliner::new(inline_opts).inline(&html) { Ok(())
Ok(inlined_html) => inlined_html, }),
Err(err) => { ]);
error!("failed to inline CSS: {err}"); }
html.to_string() let html = rewrite_str(
} &html,
}; RewriteStrSettings {
element_content_handlers,
..RewriteStrSettings::default()
},
)?;
// Default's don't allow style, but we want to preserve that. // Default's don't allow style, but we want to preserve that.
// TODO: remove 'class' if rendering mails moves to a two phase process where abstract message // TODO: remove 'class' if rendering mails moves to a two phase process where abstract message
// types are collected, santized, and then grouped together as one big HTML doc // types are collected, santized, and then grouped together as one big HTML doc
@@ -164,6 +533,7 @@ pub fn sanitize_html(
"hgroup", "hgroup",
"hr", "hr",
"i", "i",
"iframe", // wathiede
"img", "img",
"ins", "ins",
"kbd", "kbd",
@@ -172,6 +542,7 @@ pub fn sanitize_html(
"map", "map",
"mark", "mark",
"nav", "nav",
"noscript", // wathiede
"ol", "ol",
"p", "p",
"pre", "pre",
@@ -225,6 +596,9 @@ pub fn sanitize_html(
"hr" => hashset![ "hr" => hashset![
"align", "size", "width" "align", "size", "width"
], ],
"iframe" => hashset![
"src", "allow", "allowfullscreen"
],
"img" => hashset![ "img" => hashset![
"align", "alt", "height", "src", "width" "align", "alt", "height", "src", "width"
], ],
@@ -260,19 +634,225 @@ pub fn sanitize_html(
], ],
]; ];
let rewritten_html = rewrite_str( let html = ammonia::Builder::default()
&inlined_html,
RewriteStrSettings {
element_content_handlers,
..RewriteStrSettings::default()
},
)?;
let clean_html = ammonia::Builder::default()
.tags(tags) .tags(tags)
.tag_attributes(tag_attributes) .tag_attributes(tag_attributes)
.generic_attributes(attributes) .generic_attributes(attributes)
.clean(&rewritten_html) .clean(&html)
.to_string(); .to_string();
Ok(clean_html) Ok(html)
}
fn compute_offset_limit(
after: Option<i32>,
before: Option<i32>,
first: Option<i32>,
last: Option<i32>,
) -> (i32, i32) {
let default_page_size = 100;
match (after, before, first, last) {
// Reasonable defaults
(None, None, None, None) => (0, default_page_size),
(None, None, Some(first), None) => (0, first),
(Some(after), None, None, None) => (after + 1, default_page_size),
(Some(after), None, Some(first), None) => (after + 1, first),
(None, Some(before), None, None) => (0.max(before - default_page_size), default_page_size),
(None, Some(before), None, Some(last)) => (0.max(before - last), last),
(None, None, None, Some(_)) => {
panic!("specifying last and no before doesn't make sense")
}
(None, None, Some(_), Some(_)) => {
panic!("specifying first and last doesn't make sense")
}
(None, Some(_), Some(_), _) => {
panic!("specifying before and first doesn't make sense")
}
(Some(_), Some(_), _, _) => {
panic!("specifying after and before doesn't make sense")
}
(Some(_), None, None, Some(_)) => {
panic!("specifying after and last doesn't make sense")
}
(Some(_), None, Some(_), Some(_)) => {
panic!("specifying after, first and last doesn't make sense")
}
}
}
#[derive(Debug, Default)]
pub struct Query {
pub unread_only: bool,
pub tags: Vec<String>,
pub uids: Vec<String>,
pub remainder: Vec<String>,
pub is_notmuch: bool,
pub is_newsreader: bool,
pub is_tantivy: bool,
pub corpus: Option<Corpus>,
}
impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if self.unread_only {
write!(f, "is:unread ")?;
}
for tag in &self.tags {
write!(f, "tag:{tag} ")?;
}
for uid in &self.uids {
write!(f, "id:{uid} ")?;
}
if self.is_notmuch {
write!(f, "is:mail ")?;
}
if self.is_newsreader {
write!(f, "is:newsreader ")?;
}
if self.is_newsreader {
write!(f, "is:news ")?;
}
match self.corpus {
Some(c) => write!(f, "corpus:{c:?}")?,
_ => (),
}
for rem in &self.remainder {
write!(f, "{rem} ")?;
}
Ok(())
}
}
impl Query {
// Converts the internal state of Query to something suitable for notmuch queries. Removes and
// letterbox specific '<key>:<value' tags
fn to_notmuch(&self) -> String {
let mut parts = Vec::new();
if !self.is_notmuch {
return String::new();
}
if self.unread_only {
parts.push("is:unread".to_string());
}
for tag in &self.tags {
parts.push(format!("tag:{tag}"));
}
for uid in &self.uids {
parts.push(uid.clone());
}
parts.extend(self.remainder.clone());
parts.join(" ")
}
}
impl FromStr for Query {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut unread_only = false;
let mut tags = Vec::new();
let mut uids = Vec::new();
let mut remainder = Vec::new();
let mut is_notmuch = false;
let mut is_newsreader = false;
let mut is_tantivy = false;
let mut corpus = None;
for word in s.split_whitespace() {
if word == "is:unread" {
unread_only = true
} else if word.starts_with("tag:") {
tags.push(word["tag:".len()..].to_string());
/*
} else if word.starts_with("tag:") {
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
// database
site = Some(NON_EXISTENT_SITE_NAME.to_string());
*/
} else if word.starts_with("corpus:") {
let c = word["corpus:".len()..].to_string();
corpus = c.parse::<Corpus>().map(|c| Some(c)).unwrap_or_else(|e| {
warn!("Error parsing corpus '{c}': {e:?}");
None
});
} else if is_newsreader_thread(word) {
uids.push(word.to_string());
} else if is_notmuch_thread_or_id(word) {
uids.push(word.to_string());
} else if word == "is:mail" || word == "is:email" || word == "is:notmuch" {
is_notmuch = true;
} else if word == "is:news" {
is_newsreader = true;
} else if word == "is:newsreader" {
is_newsreader = true;
} else {
remainder.push(word.to_string());
}
}
// If we don't see any explicit filters for a corpus, flip them all on
if corpus.is_none() && !(is_notmuch || is_tantivy || is_newsreader) {
is_notmuch = true;
is_newsreader = true;
is_tantivy = true;
}
Ok(Query {
unread_only,
tags,
uids,
remainder,
is_notmuch,
is_newsreader,
is_tantivy,
corpus,
})
}
}
pub struct ThreadSummaryRecord {
pub site: Option<String>,
pub date: Option<PrimitiveDateTime>,
pub is_read: Option<bool>,
pub title: Option<String>,
pub uid: String,
pub name: Option<String>,
pub corpus: Corpus,
}
async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
let mut tags = vec![format!("{NEWSREADER_TAG_PREFIX}{site}")];
if !r.is_read.unwrap_or(true) {
tags.push("unread".to_string());
};
let mut title = r.title.unwrap_or("NO TITLE".to_string());
title = clean_title(&title).await.expect("failed to clean title");
ThreadSummary {
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
timestamp: r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp() as isize,
date_relative: format!("{:?}", r.date),
//date_relative: "TODO date_relative".to_string(),
matched: 0,
total: 1,
authors: r.name.unwrap_or_else(|| site.clone()),
subject: title,
tags,
corpus: r.corpus,
}
}
async fn clean_title(title: &str) -> Result<String, ServerError> {
// Make title HTML so html parsers work
let mut title = format!("<html>{title}</html>");
let title_tranformers: Vec<Box<dyn Transformer>> =
vec![Box::new(EscapeHtml), Box::new(StripHtml)];
// Make title HTML so html parsers work
title = format!("<html>{title}</html>");
for t in title_tranformers.iter() {
if t.should_run(&None, &title) {
title = t.transform(&None, &title).await?;
}
}
Ok(title)
} }

113
server/src/mail.rs Normal file
View File

@@ -0,0 +1,113 @@
use std::{fs::File, io::Read};
use mailparse::{
addrparse_header, dateparse, parse_mail, MailHeaderMap, MailParseError, ParsedMail,
};
use sqlx::postgres::PgPool;
use thiserror::Error;
use tracing::info;
#[derive(Error, Debug)]
pub enum MailError {
#[error("missing from header")]
MissingFrom,
#[error("missing from header display name")]
MissingFromDisplayName,
#[error("missing subject header")]
MissingSubject,
#[error("missing html part")]
MissingHtmlPart,
#[error("missing message ID")]
MissingMessageId,
#[error("missing date")]
MissingDate,
#[error("DB error {0}")]
SqlxError(#[from] sqlx::Error),
#[error("IO error {0}")]
IOError(#[from] std::io::Error),
#[error("mail parse error {0}")]
MailParseError(#[from] MailParseError),
}
pub async fn read_mail_to_db(pool: &PgPool, path: &str) -> Result<(), MailError> {
let mut file = File::open(path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
let m = parse_mail(&buffer)?;
let subject = m
.headers
.get_first_value("subject")
.ok_or(MailError::MissingSubject)?;
let from = addrparse_header(
m.headers
.get_first_header("from")
.ok_or(MailError::MissingFrom)?,
)?;
let from = from.extract_single_info().ok_or(MailError::MissingFrom)?;
let name = from.display_name.ok_or(MailError::MissingFromDisplayName)?;
let slug = name.to_lowercase().replace(' ', "-");
let url = from.addr;
let message_id = m
.headers
.get_first_value("Message-ID")
.ok_or(MailError::MissingMessageId)?;
let uid = &message_id;
let feed_id = find_feed(&pool, &name, &slug, &url).await?;
let date = dateparse(
&m.headers
.get_first_value("Date")
.ok_or(MailError::MissingDate)?,
)?;
println!("Feed: {feed_id} Subject: {}", subject);
if let Some(_m) = first_html(&m) {
info!("add email {slug} {subject} {message_id} {date} {uid} {url}");
} else {
return Err(MailError::MissingHtmlPart.into());
}
Ok(())
}
fn first_html<'m>(m: &'m ParsedMail<'m>) -> Option<&'m ParsedMail<'m>> {
for ele in m.parts() {
if ele.ctype.mimetype == "text/html" {
return Some(ele);
}
}
None
}
async fn find_feed(pool: &PgPool, name: &str, slug: &str, url: &str) -> Result<i32, MailError> {
match sqlx::query!(
r#"
SELECT id
FROM feed
WHERE slug = $1
"#,
slug
)
.fetch_one(pool)
.await
{
Err(sqlx::Error::RowNotFound) => {
let rec = sqlx::query!(
r#"
INSERT INTO feed ( name, slug, url, homepage, selector )
VALUES ( $1, $2, $3, '', '' )
RETURNING id
"#,
name,
slug,
url
)
.fetch_one(pool)
.await?;
return Ok(rec.id);
}
Ok(rec) => return Ok(rec.id),
Err(e) => return Err(e.into()),
};
}

498
server/src/mvp.css Normal file
View File

@@ -0,0 +1,498 @@
/* MVP.css v1.15 - https://github.com/andybrewer/mvp */
/* :root content stored in client side index.html */
html {
scroll-behavior: smooth;
}
@media (prefers-reduced-motion: reduce) {
html {
scroll-behavior: auto;
}
}
/* Layout */
article aside {
background: var(--color-secondary-accent);
border-left: 4px solid var(--color-secondary);
padding: 0.01rem 0.8rem;
}
body {
background: var(--color-bg);
color: var(--color-text);
font-family: var(--font-family);
line-height: var(--line-height);
margin: 0;
overflow-x: hidden;
padding: 0;
}
footer,
header,
main {
margin: 0 auto;
max-width: var(--width-content);
padding: 3rem 1rem;
}
hr {
background-color: var(--color-bg-secondary);
border: none;
height: 1px;
margin: 4rem 0;
width: 100%;
}
section {
display: flex;
flex-wrap: wrap;
justify-content: var(--justify-important);
}
section img,
article img {
max-width: 100%;
}
section pre {
overflow: auto;
}
section aside {
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
box-shadow: var(--box-shadow) var(--color-shadow);
margin: 1rem;
padding: 1.25rem;
width: var(--width-card);
}
section aside:hover {
box-shadow: var(--box-shadow) var(--color-bg-secondary);
}
[hidden] {
display: none;
}
/* Headers */
article header,
div header,
main header {
padding-top: 0;
}
header {
text-align: var(--justify-important);
}
header a b,
header a em,
header a i,
header a strong {
margin-left: 0.5rem;
margin-right: 0.5rem;
}
header nav img {
margin: 1rem 0;
}
section header {
padding-top: 0;
width: 100%;
}
/* Nav */
nav {
align-items: center;
display: flex;
font-weight: bold;
justify-content: space-between;
margin-bottom: 7rem;
}
nav ul {
list-style: none;
padding: 0;
}
nav ul li {
display: inline-block;
margin: 0 0.5rem;
position: relative;
text-align: left;
}
/* Nav Dropdown */
nav ul li:hover ul {
display: block;
}
nav ul li ul {
background: var(--color-bg);
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
box-shadow: var(--box-shadow) var(--color-shadow);
display: none;
height: auto;
left: -2px;
padding: .5rem 1rem;
position: absolute;
top: 1.7rem;
white-space: nowrap;
width: auto;
z-index: 1;
}
nav ul li ul::before {
/* fill gap above to make mousing over them easier */
content: "";
position: absolute;
left: 0;
right: 0;
top: -0.5rem;
height: 0.5rem;
}
nav ul li ul li,
nav ul li ul li a {
display: block;
}
/* Typography */
code,
samp {
background-color: var(--color-accent);
border-radius: var(--border-radius);
color: var(--color-text);
display: inline-block;
margin: 0 0.1rem;
padding: 0 0.5rem;
}
details {
margin: 1.3rem 0;
}
details summary {
font-weight: bold;
cursor: pointer;
}
h1,
h2,
h3,
h4,
h5,
h6 {
line-height: var(--line-height);
text-wrap: balance;
}
mark {
padding: 0.1rem;
}
ol li,
ul li {
padding: 0.2rem 0;
}
p {
margin: 0.75rem 0;
padding: 0;
width: 100%;
}
pre {
margin: 1rem 0;
max-width: var(--width-card-wide);
padding: 1rem 0;
}
pre code,
pre samp {
display: block;
max-width: var(--width-card-wide);
padding: 0.5rem 2rem;
white-space: pre-wrap;
}
small {
color: var(--color-text-secondary);
}
sup {
background-color: var(--color-secondary);
border-radius: var(--border-radius);
color: var(--color-bg);
font-size: xx-small;
font-weight: bold;
margin: 0.2rem;
padding: 0.2rem 0.3rem;
position: relative;
top: -2px;
}
/* Links */
a {
color: var(--color-link);
display: inline-block;
font-weight: bold;
text-decoration: underline;
}
a:hover {
filter: brightness(var(--hover-brightness));
}
a:active {
filter: brightness(var(--active-brightness));
}
a b,
a em,
a i,
a strong,
button,
input[type="submit"] {
border-radius: var(--border-radius);
display: inline-block;
font-size: medium;
font-weight: bold;
line-height: var(--line-height);
margin: 0.5rem 0;
padding: 1rem 2rem;
}
button,
input[type="submit"] {
font-family: var(--font-family);
}
button:hover,
input[type="submit"]:hover {
cursor: pointer;
filter: brightness(var(--hover-brightness));
}
button:active,
input[type="submit"]:active {
filter: brightness(var(--active-brightness));
}
a b,
a strong,
button,
input[type="submit"] {
background-color: var(--color-link);
border: 2px solid var(--color-link);
color: var(--color-bg);
}
a em,
a i {
border: 2px solid var(--color-link);
border-radius: var(--border-radius);
color: var(--color-link);
display: inline-block;
padding: 1rem 2rem;
}
article aside a {
color: var(--color-secondary);
}
/* Images */
figure {
margin: 0;
padding: 0;
}
figure img {
max-width: 100%;
}
figure figcaption {
color: var(--color-text-secondary);
}
/* Forms */
button:disabled,
input:disabled {
background: var(--color-bg-secondary);
border-color: var(--color-bg-secondary);
color: var(--color-text-secondary);
cursor: not-allowed;
}
button[disabled]:hover,
input[type="submit"][disabled]:hover {
filter: none;
}
form {
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
box-shadow: var(--box-shadow) var(--color-shadow);
display: block;
max-width: var(--width-card-wide);
min-width: var(--width-card);
padding: 1.5rem;
text-align: var(--justify-normal);
}
form header {
margin: 1.5rem 0;
padding: 1.5rem 0;
}
input,
label,
select,
textarea {
display: block;
font-size: inherit;
max-width: var(--width-card-wide);
}
input[type="checkbox"],
input[type="radio"] {
display: inline-block;
}
input[type="checkbox"]+label,
input[type="radio"]+label {
display: inline-block;
font-weight: normal;
position: relative;
top: 1px;
}
input[type="range"] {
padding: 0.4rem 0;
}
input,
select,
textarea {
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
margin-bottom: 1rem;
padding: 0.4rem 0.8rem;
}
input[type="text"],
input[type="password"] textarea {
width: calc(100% - 1.6rem);
}
input[readonly],
textarea[readonly] {
background-color: var(--color-bg-secondary);
}
label {
font-weight: bold;
margin-bottom: 0.2rem;
}
/* Popups */
dialog {
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
box-shadow: var(--box-shadow) var(--color-shadow);
position: fixed;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 50%;
z-index: 999;
}
/* Tables */
table {
border: 1px solid var(--color-bg-secondary);
border-radius: var(--border-radius);
border-spacing: 0;
display: inline-block;
max-width: 100%;
overflow-x: auto;
padding: 0;
white-space: nowrap;
}
table td,
table th,
table tr {
padding: 0.4rem 0.8rem;
text-align: var(--justify-important);
}
table thead {
background-color: var(--color-table);
border-collapse: collapse;
border-radius: var(--border-radius);
color: var(--color-bg);
margin: 0;
padding: 0;
}
table thead tr:first-child th:first-child {
border-top-left-radius: var(--border-radius);
}
table thead tr:first-child th:last-child {
border-top-right-radius: var(--border-radius);
}
table thead th:first-child,
table tr td:first-child {
text-align: var(--justify-normal);
}
table tr:nth-child(even) {
background-color: var(--color-accent);
}
/* Quotes */
blockquote {
display: block;
font-size: x-large;
line-height: var(--line-height);
margin: 1rem auto;
max-width: var(--width-card-medium);
padding: 1.5rem 1rem;
text-align: var(--justify-important);
}
blockquote footer {
color: var(--color-text-secondary);
display: block;
font-size: small;
line-height: var(--line-height);
padding: 1.5rem 0;
}
/* Scrollbars */
* {
scrollbar-width: thin;
scrollbar-color: var(--color-scrollbar) transparent;
}
*::-webkit-scrollbar {
width: 5px;
height: 5px;
}
*::-webkit-scrollbar-track {
background: transparent;
}
*::-webkit-scrollbar-thumb {
background-color: var(--color-scrollbar);
border-radius: 10px;
}

View File

@@ -1,154 +1,166 @@
use std::{ use std::sync::Arc;
convert::Infallible,
hash::{DefaultHasher, Hash, Hasher},
str::FromStr,
};
use async_graphql::connection::{self, Connection, Edge}; use cacher::FilesystemCacher;
use log::info; use log::info;
use maplit::hashmap;
use scraper::Selector;
use shared::compute_color;
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use tokio::sync::Mutex;
use tracing::instrument;
use url::Url; use url::Url;
const TAG_PREFIX: &'static str = "News/";
const THREAD_PREFIX: &'static str = "news:";
use crate::{ use crate::{
clean_title, compute_offset_limit,
config::Config,
error::ServerError, error::ServerError,
graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary}, graphql::{Corpus, NewsPost, Tag, Thread, ThreadSummary},
sanitize_html, thread_summary_from_row, AddOutlink, EscapeHtml, FrameImages, InlineRemoteStyle, Query,
SanitizeHtml, SlurpContents, ThreadSummaryRecord, Transformer, NEWSREADER_TAG_PREFIX,
NEWSREADER_THREAD_PREFIX,
}; };
pub fn is_newsreader_search(query: &str) -> bool { pub fn is_newsreader_query(query: &Query) -> bool {
query.contains(TAG_PREFIX) query.is_newsreader || query.corpus == Some(Corpus::Newsreader)
} }
pub fn is_newsreader_thread(query: &str) -> bool { pub fn is_newsreader_thread(query: &str) -> bool {
query.starts_with(THREAD_PREFIX) query.starts_with(NEWSREADER_THREAD_PREFIX)
} }
pub async fn count(pool: &PgPool, query: &str) -> Result<usize, ServerError> { pub fn extract_thread_id(query: &str) -> &str {
let query: Query = query.parse()?; if query.starts_with(NEWSREADER_THREAD_PREFIX) {
let site = query.site.expect("search has no site"); &query[NEWSREADER_THREAD_PREFIX.len()..]
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only) } else {
query
}
}
pub fn extract_site(tag: &str) -> &str {
&tag[NEWSREADER_TAG_PREFIX.len()..]
}
pub fn make_news_tag(tag: &str) -> String {
format!("tag:{NEWSREADER_TAG_PREFIX}{tag}")
}
fn site_from_tags(tags: &[String]) -> Option<String> {
for t in tags {
if t.starts_with(NEWSREADER_TAG_PREFIX) {
return Some(extract_site(t).to_string());
}
}
None
}
#[instrument(name = "newsreader::count", skip_all, fields(query=%query))]
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
if !is_newsreader_query(query) {
return Ok(0);
}
let site = site_from_tags(&query.tags);
if !query.tags.is_empty() && site.is_none() {
// Newsreader can only handle all sites read/unread queries, anything with a non-site tag
// isn't supported
return Ok(0);
}
let search_term = query.remainder.join(" ");
let search_term = search_term.trim();
let search_term = if search_term.is_empty() {
None
} else {
Some(search_term)
};
// TODO: add support for looking for search_term in title and site
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only, search_term)
.fetch_one(pool) .fetch_one(pool)
.await?; .await?;
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0)) Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
} }
#[instrument(name = "newsreader::search", skip_all, fields(query=%query))]
pub async fn search( pub async fn search(
pool: &PgPool, pool: &PgPool,
after: Option<String>, after: Option<i32>,
before: Option<String>, before: Option<i32>,
first: Option<i32>, first: Option<i32>,
last: Option<i32>, last: Option<i32>,
query: String, query: &Query,
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> { ) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
let query: Query = query.parse()?; info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
info!("news search query {query:?}"); if !is_newsreader_query(query) {
let site = query.site.expect("search has no site"); return Ok(Vec::new());
connection::query( }
after, let site = site_from_tags(&query.tags);
before, if !query.tags.is_empty() && site.is_none() {
first, // Newsreader can only handle all sites read/unread queries, anything with a non-site tag
last, // isn't supported
|after: Option<usize>, before: Option<usize>, first, last| async move { return Ok(Vec::new());
info!("search page info {after:#?}, {before:#?}, {first:#?}, {last:#?}"); }
let default_page_size = 100;
let (offset, limit) = match (after, before, first, last) {
// Reasonable defaults
(None, None, None, None) => (0, default_page_size),
(None, None, Some(first), None) => (0, first),
(Some(after), None, None, None) => (after, default_page_size),
(Some(after), None, Some(first), None) => (after, first),
(None, Some(before), None, None) => {
(before.saturating_sub(default_page_size), default_page_size)
}
(None, Some(before), None, Some(last)) => (before.saturating_sub(last), last),
(None, None, None, Some(_)) => {
panic!("specifying last and no before doesn't make sense")
}
(None, None, Some(_), Some(_)) => {
panic!("specifying first and last doesn't make sense")
}
(None, Some(_), Some(_), _) => {
panic!("specifying before and first doesn't make sense")
}
(Some(_), Some(_), _, _) => {
panic!("specifying after and before doesn't make sense")
}
(Some(_), None, None, Some(_)) => {
panic!("specifying after and last doesn't make sense")
}
(Some(_), None, Some(_), Some(_)) => {
panic!("specifying after, first and last doesn't make sense")
}
};
// The +1 is to see if there are more pages of data available.
let limit = limit + 1;
info!("search page offset {offset} limit {limit}"); let (offset, mut limit) = compute_offset_limit(after, before, first, last);
let rows = sqlx::query_file!( if before.is_none() {
"sql/threads.sql", // When searching forward, the +1 is to see if there are more pages of data available.
site, // Searching backwards implies there's more pages forward, because the value represented by
query.unread_only, // `before` is on the next page.
offset as i64, limit = limit + 1;
limit as i64 }
)
.fetch_all(pool)
.await?;
let mut slice = rows info!(
.into_iter() "search offset {offset} limit {limit} site {site:?} unread_only {}",
.map(|r| { query.unread_only
let tags = if r.is_read.unwrap_or(false) { );
vec![site.clone()] let search_term = query.remainder.join(" ");
} else { let search_term = search_term.trim();
vec!["unread".to_string(), site.clone()] let search_term = if search_term.is_empty() {
}; None
ThreadSummary { } else {
thread: format!("{THREAD_PREFIX}{}", r.uid), Some(search_term)
timestamp: r };
.date
.expect("post missing date") // TODO: add support for looking for search_term in title and site
.assume_utc() let rows = sqlx::query_file!(
.unix_timestamp() as isize, "sql/threads.sql",
date_relative: "TODO date_relative".to_string(), site,
matched: 0, query.unread_only,
total: 1, offset as i64,
authors: r.name.unwrap_or_else(|| site.clone()), limit as i64,
subject: r.title.unwrap_or("NO TITLE".to_string()), search_term
tags,
}
})
.collect::<Vec<_>>();
let has_more = slice.len() == limit;
let mut connection = Connection::new(offset > 0, has_more);
if has_more {
slice.pop();
};
connection.edges.extend(
slice
.into_iter()
.enumerate()
.map(|(idx, item)| Edge::new(offset + idx, item)),
);
Ok::<_, async_graphql::Error>(connection)
},
) )
.await .fetch_all(pool)
.await?;
let mut res = Vec::new();
for (i, r) in rows.into_iter().enumerate() {
res.push((
i as i32 + offset,
thread_summary_from_row(ThreadSummaryRecord {
site: r.site,
date: r.date,
is_read: r.is_read,
title: r.title,
uid: r.uid,
name: r.name,
corpus: Corpus::Newsreader,
})
.await,
));
}
Ok(res)
} }
#[instrument(name = "newsreader::tags", skip_all, fields(needs_unread=%_needs_unread))]
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> { pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
// TODO: optimize query by using needs_unread // TODO: optimize query by using needs_unread
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?; let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
let tags = tags let tags = tags
.into_iter() .into_iter()
.map(|tag| { .map(|tag| {
let mut hasher = DefaultHasher::new();
tag.site.hash(&mut hasher);
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0); let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site")); let name = format!(
"{NEWSREADER_TAG_PREFIX}{}",
tag.site.expect("tag must have site")
);
let hex = compute_color(&name);
Tag { Tag {
name, name,
fg_color: "white".to_string(), fg_color: "white".to_string(),
@@ -160,122 +172,139 @@ pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, Server
Ok(tags) Ok(tags)
} }
pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerError> { #[instrument(name = "newsreader::thread", skip_all, fields(thread_id=%thread_id))]
pub async fn thread(
config: &Config,
pool: &PgPool,
thread_id: String,
) -> Result<Thread, ServerError> {
let id = thread_id let id = thread_id
.strip_prefix(THREAD_PREFIX) .strip_prefix(NEWSREADER_THREAD_PREFIX)
.expect("news thread doesn't start with '{THREAD_PREFIX}'") .expect("news thread doesn't start with '{NEWSREADER_THREAD_PREFIX}'")
.to_string(); .to_string();
let r = sqlx::query_file!("sql/thread.sql", id) let r = sqlx::query_file!("sql/thread.sql", id)
.fetch_one(pool) .fetch_one(pool)
.await?; .await?;
let site = r.site.unwrap_or("NO SITE".to_string()); let slug = r.site.unwrap_or("no-slug".to_string());
let tags = if r.is_read.unwrap_or(false) { let site = r.name.unwrap_or("NO SITE".to_string());
vec![site.clone()] // TODO: remove the various places that have this as an Option
} else { let link = Some(Url::parse(&r.link)?);
vec!["unread".to_string(), site.clone()] let mut body = r.summary.unwrap_or("NO SUMMARY".to_string());
}; let cacher = Arc::new(Mutex::new(FilesystemCacher::new(&config.slurp_cache_path)?));
let default_homepage = "http://no-homepage"; let body_tranformers: Vec<Box<dyn Transformer>> = vec![
let homepage = Url::parse( Box::new(SlurpContents {
&r.homepage cacher,
.map(|h| { // TODO: make this true when bulma is finally removed
if h.is_empty() { inline_css: false,
default_homepage.to_string() site_selectors: hashmap![
} else { "atmeta.com".to_string() => vec![
h Selector::parse("div.entry-content").unwrap(),
} ],
}) "blog.prusa3d.com".to_string() => vec![
.unwrap_or(default_homepage.to_string()), Selector::parse("article.content .post-block").unwrap(),
)?; ],
let link = Url::parse( "blog.cloudflare.com".to_string() => vec![
&r.link Selector::parse(".author-lists .author-name-tooltip").unwrap(),
.as_ref() Selector::parse(".post-full-content").unwrap()
.map(|h| { ],
if h.is_empty() { "blog.zsa.io".to_string() => vec![
default_homepage.to_string() Selector::parse("section.blog-article").unwrap(),
} else { ],
h.to_string() "engineering.fb.com".to_string() => vec![
} Selector::parse("article").unwrap(),
}) ],
.unwrap_or(default_homepage.to_string()), "grafana.com".to_string() => vec![
)?; Selector::parse(".blog-content").unwrap(),
let addr = r.link.as_ref().map(|link| { ],
if link.contains('@') { "hackaday.com".to_string() => vec![
link.clone() Selector::parse("div.entry-featured-image").unwrap(),
} else { Selector::parse("div.entry-content").unwrap()
if let Ok(url) = homepage.join(&link) { ],
url.to_string() "ingowald.blog".to_string() => vec![
} else { Selector::parse("article").unwrap(),
link.clone() ],
} "jvns.ca".to_string() => vec![
Selector::parse("article").unwrap(),
],
"mitchellh.com".to_string() => vec![Selector::parse("div.w-full").unwrap()],
"natwelch.com".to_string() => vec![
Selector::parse("article div.prose").unwrap(),
],
"rustacean-station.org".to_string() => vec![
Selector::parse("article").unwrap(),
],
"slashdot.org".to_string() => vec![
Selector::parse("span.story-byline").unwrap(),
Selector::parse("div.p").unwrap(),
],
"trofi.github.io".to_string() => vec![
Selector::parse("#content").unwrap(),
],
"www.redox-os.org".to_string() => vec![
Selector::parse("div.content").unwrap(),
],
"www.smbc-comics.com".to_string() => vec![
Selector::parse("img#cc-comic").unwrap(),
Selector::parse("div#aftercomic img").unwrap(),
],
],
}),
Box::new(FrameImages),
Box::new(AddOutlink),
// TODO: causes doubling of images in cloudflare blogs
//Box::new(EscapeHtml),
Box::new(SanitizeHtml {
cid_prefix: "",
base_url: &link,
}),
];
for t in body_tranformers.iter() {
if t.should_run(&link, &body) {
body = t.transform(&link, &body).await?;
} }
});
let html = r.summary.unwrap_or("NO SUMMARY".to_string());
// TODO: add site specific cleanups. For example:
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolet
let html = sanitize_html(&html, "", &link)?;
let body = Body::Html(Html {
html,
content_tree: "".to_string(),
});
let title = r.title.unwrap_or("NO TITLE".to_string());
let from = Some(Email {
name: r.name,
addr: addr.map(|a| a.to_string()),
});
Ok(Thread {
thread_id,
subject: title.clone(),
messages: vec![Message {
id,
from,
to: Vec::new(),
cc: Vec::new(),
subject: Some(title),
timestamp: Some(
r.date
.expect("post missing date")
.assume_utc()
.unix_timestamp(),
),
headers: Vec::new(),
body,
path: "".to_string(),
attachments: Vec::new(),
tags,
}],
})
}
#[derive(Debug)]
struct Query {
unread_only: bool,
site: Option<String>,
remainder: Vec<String>,
}
impl FromStr for Query {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut unread_only = false;
let mut site = None;
let mut remainder = Vec::new();
let site_prefix = format!("tag:{TAG_PREFIX}");
for word in s.split_whitespace() {
if word == "is:unread" {
unread_only = true
} else if word.starts_with(&site_prefix) {
site = Some(word[site_prefix.len()..].to_string())
} else {
remainder.push(word.to_string());
}
}
Ok(Query {
unread_only,
site,
remainder,
})
} }
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
let is_read = r.is_read.unwrap_or(false);
let timestamp = r
.date
.expect("post missing date")
.assume_utc()
.unix_timestamp();
Ok(Thread::News(NewsPost {
thread_id,
is_read,
slug,
site,
title,
body,
url: link
.as_ref()
.map(|url| url.to_string())
.unwrap_or("NO URL".to_string()),
timestamp,
}))
}
#[instrument(name = "newsreader::set_read_status", skip_all, fields(query=%query,unread=%unread))]
pub async fn set_read_status<'ctx>(
pool: &PgPool,
query: &Query,
unread: bool,
) -> Result<bool, ServerError> {
// TODO: make single query when query.uids.len() > 1
let uids: Vec<_> = query
.uids
.iter()
.filter(|uid| is_newsreader_thread(uid))
.map(
|uid| extract_thread_id(uid), // TODO strip prefix
)
.collect();
for uid in uids {
sqlx::query_file!("sql/set_unread.sql", !unread, uid)
.execute(pool)
.await?;
}
Ok(true)
} }

View File

@@ -5,32 +5,43 @@ use std::{
time::Instant, time::Instant,
}; };
use async_graphql::connection::{self, Connection, Edge};
use log::{error, info, warn}; use log::{error, info, warn};
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail}; use mailparse::{parse_content_type, parse_mail, MailHeader, MailHeaderMap, ParsedMail};
use memmap::MmapOptions; use memmap::MmapOptions;
use notmuch::Notmuch; use notmuch::Notmuch;
use url::Url; use sqlx::PgPool;
use tracing::instrument;
use crate::{ use crate::{
compute_offset_limit,
error::ServerError, error::ServerError,
graphql::{ graphql::{
Attachment, Body, DispositionType, Email, Header, Html, Message, PlainText, Tag, Thread, Attachment, Body, Corpus, DispositionType, Email, EmailThread, Header, Html, Message,
ThreadSummary, UnhandledContentType, PlainText, Tag, Thread, ThreadSummary, UnhandledContentType,
}, },
linkify_html, sanitize_html, linkify_html, InlineStyle, Query, SanitizeHtml, Transformer,
}; };
const TEXT_PLAIN: &'static str = "text/plain";
const TEXT_HTML: &'static str = "text/html";
const IMAGE_JPEG: &'static str = "image/jpeg"; const IMAGE_JPEG: &'static str = "image/jpeg";
const IMAGE_PJPEG: &'static str = "image/pjpeg";
const IMAGE_PNG: &'static str = "image/png"; const IMAGE_PNG: &'static str = "image/png";
const MESSAGE_RFC822: &'static str = "message/rfc822";
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative"; const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
const MULTIPART_MIXED: &'static str = "multipart/mixed"; const MULTIPART_MIXED: &'static str = "multipart/mixed";
const MULTIPART_RELATED: &'static str = "multipart/related"; const MULTIPART_RELATED: &'static str = "multipart/related";
const TEXT_HTML: &'static str = "text/html";
const TEXT_PLAIN: &'static str = "text/plain";
const MAX_RAW_MESSAGE_SIZE: usize = 100_000; const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
fn is_notmuch_query(query: &Query) -> bool {
query.is_notmuch || query.corpus == Some(Corpus::Notmuch)
}
pub fn is_notmuch_thread_or_id(id: &str) -> bool {
id.starts_with("id:") || id.starts_with("thread:")
}
// TODO(wathiede): decide good error type // TODO(wathiede): decide good error type
pub fn threadset_to_messages(thread_set: notmuch::ThreadSet) -> Result<Vec<Message>, ServerError> { pub fn threadset_to_messages(thread_set: notmuch::ThreadSet) -> Result<Vec<Message>, ServerError> {
for t in thread_set.0 { for t in thread_set.0 {
@@ -39,48 +50,45 @@ pub fn threadset_to_messages(thread_set: notmuch::ThreadSet) -> Result<Vec<Messa
Ok(Vec::new()) Ok(Vec::new())
} }
pub async fn count(nm: &Notmuch, query: &str) -> Result<usize, ServerError> { #[instrument(name="nm::count", skip_all, fields(query=%query))]
Ok(nm.count(query)?) pub async fn count(nm: &Notmuch, query: &Query) -> Result<usize, ServerError> {
if !is_notmuch_query(query) {
return Ok(0);
}
let query = query.to_notmuch();
Ok(nm.count(&query)?)
} }
#[instrument(name="nm::search", skip_all, fields(query=%query))]
pub async fn search( pub async fn search(
nm: &Notmuch, nm: &Notmuch,
after: Option<String>, after: Option<i32>,
before: Option<String>, before: Option<i32>,
first: Option<i32>, first: Option<i32>,
last: Option<i32>, last: Option<i32>,
query: String, query: &Query,
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> { ) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
connection::query( if !is_notmuch_query(query) {
after, return Ok(Vec::new());
before, }
first, let query = query.to_notmuch();
last, let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|after, before, first, last| async move { if before.is_none() {
let total = nm.count(&query)?; // When searching forward, the +1 is to see if there are more pages of data available.
let (first, last) = if let (None, None) = (first, last) { // Searching backwards implies there's more pages forward, because the value represented by
info!("neither first nor last set, defaulting first to 20"); // `before` is on the next page.
(Some(20), None) limit = limit + 1;
} else { }
(first, last) Ok(nm
}; .search(&query, offset as usize, limit as usize)?
.0
let mut start = after.map(|after| after + 1).unwrap_or(0); .into_iter()
let mut end = before.unwrap_or(total); .enumerate()
if let Some(first) = first { .map(|(i, ts)| {
end = (start + first).min(end); (
} offset + i as i32,
if let Some(last) = last { ThreadSummary {
start = if last > end - start { end } else { end - last }; thread: format!("thread:{}", ts.thread),
}
let count = end - start;
let slice: Vec<ThreadSummary> = nm
.search(&query, start, count)?
.0
.into_iter()
.map(|ts| ThreadSummary {
thread: ts.thread,
timestamp: ts.timestamp, timestamp: ts.timestamp,
date_relative: ts.date_relative, date_relative: ts.date_relative,
matched: ts.matched, matched: ts.matched,
@@ -88,22 +96,14 @@ pub async fn search(
authors: ts.authors, authors: ts.authors,
subject: ts.subject, subject: ts.subject,
tags: ts.tags, tags: ts.tags,
}) corpus: Corpus::Notmuch,
.collect(); },
)
let mut connection = Connection::new(start > 0, end < total); })
connection.edges.extend( .collect())
slice
.into_iter()
.enumerate()
.map(|(idx, item)| Edge::new(start + idx, item)),
);
Ok::<_, async_graphql::Error>(connection)
},
)
.await
} }
#[instrument(name="nm::tags", skip_all, fields(needs_unread=needs_unread))]
pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> { pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
let now = Instant::now(); let now = Instant::now();
let unread_msg_cnt: HashMap<String, usize> = if needs_unread { let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
@@ -145,8 +145,10 @@ pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
Ok(tags) Ok(tags)
} }
#[instrument(name="nm::thread", skip_all, fields(thread_id=thread_id))]
pub async fn thread( pub async fn thread(
nm: &Notmuch, nm: &Notmuch,
pool: &PgPool,
thread_id: String, thread_id: String,
debug_content_tree: bool, debug_content_tree: bool,
) -> Result<Thread, ServerError> { ) -> Result<Thread, ServerError> {
@@ -159,7 +161,7 @@ pub async fn thread(
let mmap = unsafe { MmapOptions::new().map(&file)? }; let mmap = unsafe { MmapOptions::new().map(&file)? };
let m = parse_mail(&mmap)?; let m = parse_mail(&mmap)?;
let from = email_addresses(&path, &m, "from")?; let from = email_addresses(&path, &m, "from")?;
let from = match from.len() { let mut from = match from.len() {
0 => None, 0 => None,
1 => from.into_iter().next(), 1 => from.into_iter().next(),
_ => { _ => {
@@ -171,6 +173,16 @@ pub async fn thread(
from.into_iter().next() from.into_iter().next()
} }
}; };
match from.as_mut() {
Some(from) => {
if let Some(addr) = from.addr.as_mut() {
let photo_url = photo_url_for_email_address(&pool, &addr).await?;
from.photo_url = photo_url;
}
}
_ => (),
}
let to = email_addresses(&path, &m, "to")?; let to = email_addresses(&path, &m, "to")?;
let cc = email_addresses(&path, &m, "cc")?; let cc = email_addresses(&path, &m, "cc")?;
let subject = m.headers.get_first_value("subject"); let subject = m.headers.get_first_value("subject");
@@ -179,8 +191,10 @@ pub async fn thread(
.get_first_value("date") .get_first_value("date")
.and_then(|d| mailparse::dateparse(&d).ok()); .and_then(|d| mailparse::dateparse(&d).ok());
let cid_prefix = shared::urls::cid_prefix(None, &id); let cid_prefix = shared::urls::cid_prefix(None, &id);
let base_url = Url::parse("https://there-should-be-no-relative-urls-in-email").unwrap(); let base_url = None;
let body = match extract_body(&m, &id)? { let mut part_addr = Vec::new();
part_addr.push(id.to_string());
let body = match extract_body(&m, &mut part_addr)? {
Body::PlainText(PlainText { text, content_tree }) => { Body::PlainText(PlainText { text, content_tree }) => {
let text = if text.len() > MAX_RAW_MESSAGE_SIZE { let text = if text.len() > MAX_RAW_MESSAGE_SIZE {
format!( format!(
@@ -193,17 +207,29 @@ pub async fn thread(
}; };
Body::Html(Html { Body::Html(Html {
html: format!( html: {
r#"<p class="view-part-text-plain">{}</p>"#, let body_tranformers: Vec<Box<dyn Transformer>> = vec![
// Trim newlines to prevent excessive white space at the beginning/end of Box::new(InlineStyle),
// presenation. Leave tabs and spaces incase plain text attempts to center a Box::new(SanitizeHtml {
// header on the first line. cid_prefix: &cid_prefix,
sanitize_html( base_url: &base_url,
&linkify_html(&text.trim_matches('\n')), }),
&cid_prefix, ];
&base_url let mut html = linkify_html(&text.trim_matches('\n'));
)? for t in body_tranformers.iter() {
), if t.should_run(&None, &html) {
html = t.transform(&None, &html).await?;
}
}
format!(
r#"<p class="view-part-text-plain">{}</p>"#,
// Trim newlines to prevent excessive white space at the beginning/end of
// presenation. Leave tabs and spaces incase plain text attempts to center a
// header on the first line.
html
)
},
content_tree: if debug_content_tree { content_tree: if debug_content_tree {
render_content_type_tree(&m) render_content_type_tree(&m)
} else { } else {
@@ -211,8 +237,27 @@ pub async fn thread(
}, },
}) })
} }
Body::Html(Html { html, content_tree }) => Body::Html(Html { Body::Html(Html {
html: sanitize_html(&html, &cid_prefix, &base_url)?, mut html,
content_tree,
}) => Body::Html(Html {
html: {
let body_tranformers: Vec<Box<dyn Transformer>> = vec![
// TODO: this breaks things like emails from calendar
//Box::new(InlineStyle),
Box::new(SanitizeHtml {
cid_prefix: &cid_prefix,
base_url: &base_url,
}),
];
for t in body_tranformers.iter() {
if t.should_run(&None, &html) {
html = t.transform(&None, &html).await?;
}
}
html
},
content_tree: if debug_content_tree { content_tree: if debug_content_tree {
render_content_type_tree(&m) render_content_type_tree(&m)
} else { } else {
@@ -248,7 +293,7 @@ pub async fn thread(
// TODO(wathiede): parse message and fill out attachments // TODO(wathiede): parse message and fill out attachments
let attachments = extract_attachments(&m, &id)?; let attachments = extract_attachments(&m, &id)?;
messages.push(Message { messages.push(Message {
id, id: format!("id:{id}"),
from, from,
to, to,
cc, cc,
@@ -270,11 +315,11 @@ pub async fn thread(
.next() .next()
.and_then(|m| m.subject.clone()) .and_then(|m| m.subject.clone())
.unwrap_or("(NO SUBJECT)".to_string()); .unwrap_or("(NO SUBJECT)".to_string());
Ok(Thread { Ok(Thread::Email(EmailThread {
thread_id, thread_id,
subject, subject,
messages, messages,
}) }))
} }
fn email_addresses( fn email_addresses(
@@ -296,6 +341,7 @@ fn email_addresses(
mailparse::MailAddr::Single(s) => addrs.push(Email { mailparse::MailAddr::Single(s) => addrs.push(Email {
name: s.display_name, name: s.display_name,
addr: Some(s.addr), addr: Some(s.addr),
photo_url: None,
}), //println!("Single: {s}"), }), //println!("Single: {s}"),
} }
} }
@@ -310,12 +356,14 @@ fn email_addresses(
addrs.push(Email { addrs.push(Email {
name: Some(name.to_string()), name: Some(name.to_string()),
addr: Some(addr.to_string()), addr: Some(addr.to_string()),
photo_url: None,
}); });
} }
} else { } else {
addrs.push(Email { addrs.push(Email {
name: Some(v), name: Some(v),
addr: None, addr: None,
photo_url: None,
}); });
} }
} }
@@ -376,16 +424,14 @@ pub fn attachment_bytes(nm: &Notmuch, id: &str, idx: &[usize]) -> Result<Attachm
Err(ServerError::PartNotFound) Err(ServerError::PartNotFound)
} }
fn extract_body(m: &ParsedMail, id: &str) -> Result<Body, ServerError> { fn extract_body(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
let mut part_addr = Vec::new();
part_addr.push(id.to_string());
let body = m.get_body()?; let body = m.get_body()?;
let ret = match m.ctype.mimetype.as_str() { let ret = match m.ctype.mimetype.as_str() {
TEXT_PLAIN => return Ok(Body::text(body)), TEXT_PLAIN => return Ok(Body::text(body)),
TEXT_HTML => return Ok(Body::html(body)), TEXT_HTML => return Ok(Body::html(body)),
MULTIPART_MIXED => extract_mixed(m, &mut part_addr), MULTIPART_MIXED => extract_mixed(m, part_addr),
MULTIPART_ALTERNATIVE => extract_alternative(m, &mut part_addr), MULTIPART_ALTERNATIVE => extract_alternative(m, part_addr),
MULTIPART_RELATED => extract_related(m, &mut part_addr), MULTIPART_RELATED => extract_related(m, part_addr),
_ => extract_unhandled(m), _ => extract_unhandled(m),
}; };
if let Err(err) = ret { if let Err(err) = ret {
@@ -425,7 +471,7 @@ fn extract_alternative(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bo
} }
for sp in &m.subparts { for sp in &m.subparts {
if sp.ctype.mimetype.as_str() == MULTIPART_MIXED { if sp.ctype.mimetype.as_str() == MULTIPART_MIXED {
return extract_related(sp, part_addr); return extract_mixed(sp, part_addr);
} }
} }
for sp in &m.subparts { for sp in &m.subparts {
@@ -454,13 +500,16 @@ fn extract_alternative(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Bo
// multipart/mixed defines multiple types of context all of which should be presented to the user // multipart/mixed defines multiple types of context all of which should be presented to the user
// 'serially'. // 'serially'.
fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> { fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
//todo!("add some sort of visual indicator there are unhandled types, i.e. .ics files");
let handled_types = vec![ let handled_types = vec![
IMAGE_JPEG,
IMAGE_PJPEG,
IMAGE_PNG,
MESSAGE_RFC822,
MULTIPART_ALTERNATIVE, MULTIPART_ALTERNATIVE,
MULTIPART_RELATED, MULTIPART_RELATED,
TEXT_HTML, TEXT_HTML,
TEXT_PLAIN, TEXT_PLAIN,
IMAGE_JPEG,
IMAGE_PNG,
]; ];
let mut unhandled_types: Vec<_> = m let mut unhandled_types: Vec<_> = m
.subparts .subparts
@@ -476,11 +525,12 @@ fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Se
for (idx, sp) in m.subparts.iter().enumerate() { for (idx, sp) in m.subparts.iter().enumerate() {
part_addr.push(idx.to_string()); part_addr.push(idx.to_string());
match sp.ctype.mimetype.as_str() { match sp.ctype.mimetype.as_str() {
MESSAGE_RFC822 => parts.push(extract_rfc822(&sp, part_addr)?),
MULTIPART_RELATED => parts.push(extract_related(sp, part_addr)?), MULTIPART_RELATED => parts.push(extract_related(sp, part_addr)?),
MULTIPART_ALTERNATIVE => parts.push(extract_alternative(sp, part_addr)?), MULTIPART_ALTERNATIVE => parts.push(extract_alternative(sp, part_addr)?),
TEXT_PLAIN => parts.push(Body::text(sp.get_body()?)), TEXT_PLAIN => parts.push(Body::text(sp.get_body()?)),
TEXT_HTML => parts.push(Body::html(sp.get_body()?)), TEXT_HTML => parts.push(Body::html(sp.get_body()?)),
IMAGE_JPEG | IMAGE_PNG => { IMAGE_PJPEG | IMAGE_JPEG | IMAGE_PNG => {
let pcd = sp.get_content_disposition(); let pcd = sp.get_content_disposition();
let filename = pcd let filename = pcd
.params .params
@@ -489,8 +539,10 @@ fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Se
.unwrap_or("".to_string()); .unwrap_or("".to_string());
// Only add inline images, attachments are handled as an attribute of the top level Message and rendered separate client-side. // Only add inline images, attachments are handled as an attribute of the top level Message and rendered separate client-side.
if pcd.disposition == mailparse::DispositionType::Inline { if pcd.disposition == mailparse::DispositionType::Inline {
// TODO: make URL generation more programatic based on what the frontend has
// mapped
parts.push(Body::html(format!( parts.push(Body::html(format!(
r#"<img src="/view/attachment/{}/{}/{filename}">"#, r#"<img src="/api/view/attachment/{}/{}/{filename}">"#,
part_addr[0], part_addr[0],
part_addr part_addr
.iter() .iter()
@@ -501,13 +553,25 @@ fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Se
))); )));
} }
} }
_ => (), mt => parts.push(unhandled_html(MULTIPART_MIXED, mt)),
} }
part_addr.pop(); part_addr.pop();
} }
Ok(flatten_body_parts(&parts)) Ok(flatten_body_parts(&parts))
} }
fn unhandled_html(parent_type: &str, child_type: &str) -> Body {
Body::Html(Html {
html: format!(
r#"
<div class="p-4 error">
Unhandled mimetype {child_type} in a {parent_type} message
</div>
"#
),
content_tree: String::new(),
})
}
fn flatten_body_parts(parts: &[Body]) -> Body { fn flatten_body_parts(parts: &[Body]) -> Body {
let html = parts let html = parts
.iter() .iter()
@@ -518,7 +582,7 @@ fn flatten_body_parts(parts: &[Body]) -> Body {
// Trim newlines to prevent excessive white space at the beginning/end of // Trim newlines to prevent excessive white space at the beginning/end of
// presenation. Leave tabs and spaces incase plain text attempts to center a // presenation. Leave tabs and spaces incase plain text attempts to center a
// header on the first line. // header on the first line.
linkify_html(&text.trim_matches('\n')) linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
) )
} }
Body::Html(Html { html, .. }) => html.clone(), Body::Html(Html { html, .. }) => html.clone(),
@@ -529,14 +593,14 @@ fn flatten_body_parts(parts: &[Body]) -> Body {
// Trim newlines to prevent excessive white space at the beginning/end of // Trim newlines to prevent excessive white space at the beginning/end of
// presenation. Leave tabs and spaces incase plain text attempts to center a // presenation. Leave tabs and spaces incase plain text attempts to center a
// header on the first line. // header on the first line.
linkify_html(&text.trim_matches('\n')) linkify_html(&html_escape::encode_text(text).trim_matches('\n'))
) )
} }
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n"); .join("\n");
info!("flatten_body_parts {} {html}", parts.len()); info!("flatten_body_parts {}", parts.len());
Body::html(html) Body::html(html)
} }
@@ -547,6 +611,7 @@ fn extract_related(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body,
TEXT_HTML, TEXT_HTML,
TEXT_PLAIN, TEXT_PLAIN,
IMAGE_JPEG, IMAGE_JPEG,
IMAGE_PJPEG,
IMAGE_PNG, IMAGE_PNG,
]; ];
let mut unhandled_types: Vec<_> = m let mut unhandled_types: Vec<_> = m
@@ -561,7 +626,10 @@ fn extract_related(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body,
} }
for (i, sp) in m.subparts.iter().enumerate() { for (i, sp) in m.subparts.iter().enumerate() {
if sp.ctype.mimetype == IMAGE_PNG || sp.ctype.mimetype == IMAGE_JPEG { if sp.ctype.mimetype == IMAGE_PNG
|| sp.ctype.mimetype == IMAGE_JPEG
|| sp.ctype.mimetype == IMAGE_PJPEG
{
info!("sp.ctype {:#?}", sp.ctype); info!("sp.ctype {:#?}", sp.ctype);
//info!("sp.headers {:#?}", sp.headers); //info!("sp.headers {:#?}", sp.headers);
if let Some(cid) = sp.headers.get_first_value("Content-Id") { if let Some(cid) = sp.headers.get_first_value("Content-Id") {
@@ -628,6 +696,7 @@ fn walk_attachments_inner<T, F: Fn(&ParsedMail, &[usize]) -> Option<T> + Copy>(
fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, ServerError> { fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, ServerError> {
let mut attachments = Vec::new(); let mut attachments = Vec::new();
for (idx, sp) in m.subparts.iter().enumerate() { for (idx, sp) in m.subparts.iter().enumerate() {
info!("sp: {:?}", sp.headers);
if let Some(attachment) = extract_attachment(sp, id, &[idx]) { if let Some(attachment) = extract_attachment(sp, id, &[idx]) {
// Filter out inline attachements, they're flattened into the body of the message. // Filter out inline attachements, they're flattened into the body of the message.
if attachment.disposition == DispositionType::Attachment { if attachment.disposition == DispositionType::Attachment {
@@ -640,11 +709,22 @@ fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, Serv
fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachment> { fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachment> {
let pcd = m.get_content_disposition(); let pcd = m.get_content_disposition();
// TODO: do we need to handle empty filename attachments, or should we change the definition of let pct = m
// Attachment::filename? .get_headers()
let Some(filename) = pcd.params.get("filename").map(|f| f.clone()) else { .get_first_value("Content-Type")
return None; .map(|s| parse_content_type(&s));
let filename = match (
pcd.params.get("filename").map(|f| f.clone()),
pct.map(|pct| pct.params.get("name").map(|f| f.clone())),
) {
// Use filename from Content-Disposition
(Some(filename), _) => filename,
// Use filename from Content-Type
(_, Some(Some(name))) => name,
// No known filename, assume it's not an attachment
_ => return None,
}; };
info!("filename {filename}");
// TODO: grab this from somewhere // TODO: grab this from somewhere
let content_id = None; let content_id = None;
@@ -672,6 +752,44 @@ fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachm
bytes, bytes,
}); });
} }
fn email_address_strings(emails: &[Email]) -> Vec<String> {
emails
.iter()
.map(|e| e.to_string())
.inspect(|e| info!("e {e}"))
.collect()
}
fn extract_rfc822(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
fn extract_headers(m: &ParsedMail) -> Result<Body, ServerError> {
let path = "<in-memory>";
let from = email_address_strings(&email_addresses(path, &m, "from")?).join(", ");
let to = email_address_strings(&email_addresses(path, &m, "to")?).join(", ");
let cc = email_address_strings(&email_addresses(path, &m, "cc")?).join(", ");
let date = m.headers.get_first_value("date").unwrap_or(String::new());
let subject = m
.headers
.get_first_value("subject")
.unwrap_or(String::new());
let text = format!(
r#"
---------- Forwarded message ----------
From: {from}
To: {to}
CC: {cc}
Date: {date}
Subject: {subject}
"#
);
Ok(Body::text(text))
}
let inner_body = m.get_body()?;
let inner_m = parse_mail(inner_body.as_bytes())?;
let headers = extract_headers(&inner_m)?;
let body = extract_body(&inner_m, part_addr)?;
Ok(flatten_body_parts(&[headers, body]))
}
pub fn get_attachment_filename(header_value: &str) -> &str { pub fn get_attachment_filename(header_value: &str) -> &str {
info!("get_attachment_filename {header_value}"); info!("get_attachment_filename {header_value}");
@@ -752,3 +870,46 @@ fn render_content_type_tree(m: &ParsedMail) -> String {
SKIP_HEADERS.join("\n ") SKIP_HEADERS.join("\n ")
) )
} }
#[instrument(name="nm::set_read_status", skip_all, fields(query=%query, unread=unread))]
pub async fn set_read_status<'ctx>(
nm: &Notmuch,
query: &Query,
unread: bool,
) -> Result<bool, ServerError> {
let uids: Vec<_> = query
.uids
.iter()
.filter(|uid| is_notmuch_thread_or_id(uid))
.collect();
info!("set_read_status({unread} {uids:?})");
for uid in uids {
if unread {
nm.tag_add("unread", uid)?;
} else {
nm.tag_remove("unread", uid)?;
}
}
Ok(true)
}
async fn photo_url_for_email_address(
pool: &PgPool,
addr: &str,
) -> Result<Option<String>, ServerError> {
let row = sqlx::query!(
r#"
SELECT
url
FROM email_photo ep
JOIN email_address ea
ON ep.id = ea.email_photo_id
WHERE
address = $1
"#,
addr
)
.fetch_optional(pool)
.await?;
Ok(row.map(|r| r.url))
}

353
server/src/tantivy.rs Normal file
View File

@@ -0,0 +1,353 @@
use std::collections::HashSet;
use log::{debug, error, info, warn};
use sqlx::{postgres::PgPool, types::time::PrimitiveDateTime};
use tantivy::{
collector::{DocSetCollector, TopDocs},
doc, query,
query::{AllQuery, BooleanQuery, Occur, QueryParser, TermQuery},
schema::{Facet, IndexRecordOption, Value},
DocAddress, Index, IndexReader, Searcher, TantivyDocument, TantivyError, Term,
};
use tracing::{info_span, instrument, Instrument};
use crate::{
compute_offset_limit,
error::ServerError,
graphql::{Corpus, ThreadSummary},
newsreader::{extract_thread_id, is_newsreader_thread},
thread_summary_from_row, Query, ThreadSummaryRecord,
};
pub fn is_tantivy_query(query: &Query) -> bool {
query.is_tantivy || query.corpus == Some(Corpus::Tantivy)
}
pub struct TantivyConnection {
db_path: String,
index: Index,
reader: IndexReader,
}
fn get_index(db_path: &str) -> Result<Index, TantivyError> {
Ok(match Index::open_in_dir(db_path) {
Ok(idx) => idx,
Err(err) => {
warn!("Failed to open {db_path}: {err}");
create_news_db(db_path)?;
Index::open_in_dir(db_path)?
}
})
}
impl TantivyConnection {
pub fn new(tantivy_db_path: &str) -> Result<TantivyConnection, TantivyError> {
let index = get_index(tantivy_db_path)?;
let reader = index.reader()?;
Ok(TantivyConnection {
db_path: tantivy_db_path.to_string(),
index,
reader,
})
}
#[instrument(name = "tantivy::refresh", skip_all)]
pub async fn refresh(&self, pool: &PgPool) -> Result<(), ServerError> {
let start_time = std::time::Instant::now();
let p_uids: Vec<_> = sqlx::query_file!("sql/all-uids.sql")
.fetch_all(pool)
.instrument(info_span!("postgres query"))
.await?
.into_iter()
.map(|r| r.uid)
.collect();
info!(
"refresh from postgres got {} uids in {}",
p_uids.len(),
start_time.elapsed().as_secs_f32()
);
let t_span = info_span!("tantivy query");
let _enter = t_span.enter();
let start_time = std::time::Instant::now();
let (searcher, _query) = self.searcher_and_query(&Query::default())?;
let docs = searcher.search(&AllQuery, &DocSetCollector)?;
let uid = self.index.schema().get_field("uid")?;
let t_uids: Vec<_> = docs
.into_iter()
.map(|doc_address| {
searcher
.doc(doc_address)
.map(|doc: TantivyDocument| {
debug!("doc: {doc:#?}");
doc.get_first(uid)
.expect("uid")
.as_str()
.expect("as_str")
.to_string()
})
.expect("searcher.doc")
})
.collect();
drop(_enter);
info!(
"refresh tantivy got {} uids in {}",
t_uids.len(),
start_time.elapsed().as_secs_f32()
);
let t_set: HashSet<_> = t_uids.into_iter().collect();
let need: Vec<_> = p_uids
.into_iter()
.filter(|uid| !t_set.contains(uid.as_str()))
.collect();
if !need.is_empty() {
info!(
"need to reindex {} uids: {:?}...",
need.len(),
&need[..need.len().min(10)]
);
}
let batch_size = 1000;
let uids: Vec<_> = need[..need.len().min(batch_size)]
.into_iter()
.cloned()
.collect();
self.reindex_uids(pool, &uids).await
}
#[instrument(skip(self, pool))]
async fn reindex_uids(&self, pool: &PgPool, uids: &[String]) -> Result<(), ServerError> {
if uids.is_empty() {
return Ok(());
}
// TODO: add SlurpContents and convert HTML to text
let pool: &PgPool = pool;
let mut index_writer = self.index.writer(50_000_000)?;
let schema = self.index.schema();
let site = schema.get_field("site")?;
let title = schema.get_field("title")?;
let summary = schema.get_field("summary")?;
let link = schema.get_field("link")?;
let date = schema.get_field("date")?;
let is_read = schema.get_field("is_read")?;
let uid = schema.get_field("uid")?;
let id = schema.get_field("id")?;
let tag = schema.get_field("tag")?;
info!("reindexing {} posts", uids.len());
let rows = sqlx::query_file_as!(PostgresDoc, "sql/posts-from-uids.sql", uids)
.fetch_all(pool)
.await?;
if uids.len() != rows.len() {
error!(
"Had {} uids and only got {} rows: uids {uids:?}",
uids.len(),
rows.len()
);
}
for r in rows {
let id_term = Term::from_field_text(uid, &r.uid);
index_writer.delete_term(id_term);
let slug = r.site;
let tag_facet = Facet::from(&format!("/News/{slug}"));
index_writer.add_document(doc!(
site => slug.clone(),
title => r.title,
// TODO: clean and extract text from HTML
summary => r.summary,
link => r.link,
date => tantivy::DateTime::from_primitive(r.date),
is_read => r.is_read,
uid => r.uid,
id => r.id as u64,
tag => tag_facet,
))?;
}
info_span!("IndexWriter.commit").in_scope(|| index_writer.commit())?;
info_span!("IndexReader.reload").in_scope(|| self.reader.reload())?;
Ok(())
}
#[instrument(name = "tantivy::reindex_thread", skip_all, fields(query=%query))]
pub async fn reindex_thread(&self, pool: &PgPool, query: &Query) -> Result<(), ServerError> {
let uids: Vec<_> = query
.uids
.iter()
.filter(|uid| is_newsreader_thread(uid))
.map(|uid| extract_thread_id(uid).to_string())
.collect();
Ok(self.reindex_uids(pool, &uids).await?)
}
#[instrument(name = "tantivy::reindex_all", skip_all)]
pub async fn reindex_all(&self, pool: &PgPool) -> Result<(), ServerError> {
let rows = sqlx::query_file!("sql/all-posts.sql")
.fetch_all(pool)
.await?;
let uids: Vec<String> = rows.into_iter().map(|r| r.uid).collect();
self.reindex_uids(pool, &uids).await?;
Ok(())
}
fn searcher_and_query(
&self,
query: &Query,
) -> Result<(Searcher, Box<dyn query::Query>), ServerError> {
// TODO: only create one reader
// From https://tantivy-search.github.io/examples/basic_search.html
// "For a search server you will typically create one reader for the entire lifetime of
// your program, and acquire a new searcher for every single request."
//
// I think there's some challenge in making the reader work if we reindex, so reader my
// need to be stored indirectly, and be recreated on reindex
// I think creating a reader takes 200-300 ms.
let schema = self.index.schema();
let searcher = self.reader.searcher();
let title = schema.get_field("title")?;
let summary = schema.get_field("summary")?;
let query_parser = QueryParser::for_index(&self.index, vec![title, summary]);
// Tantivy uses '*' to match all docs, not empty string
let term = &query.remainder.join(" ");
let term = if term.is_empty() { "*" } else { term };
info!("query_parser('{term}')");
let tantivy_query = query_parser.parse_query(&term)?;
let tag = schema.get_field("tag")?;
let is_read = schema.get_field("is_read")?;
let mut terms = vec![(Occur::Must, tantivy_query)];
for t in &query.tags {
let facet = Facet::from(&format!("/{t}"));
let facet_term = Term::from_facet(tag, &facet);
let facet_term_query = Box::new(TermQuery::new(facet_term, IndexRecordOption::Basic));
terms.push((Occur::Must, facet_term_query));
}
if query.unread_only {
info!("searching for unread only");
let term = Term::from_field_bool(is_read, false);
terms.push((
Occur::Must,
Box::new(TermQuery::new(term, IndexRecordOption::Basic)),
));
}
let search_query = BooleanQuery::new(terms);
Ok((searcher, Box::new(search_query)))
}
#[instrument(name="tantivy::count", skip_all, fields(query=%query))]
pub async fn count(&self, query: &Query) -> Result<usize, ServerError> {
if !is_tantivy_query(query) {
return Ok(0);
}
info!("tantivy::count {query:?}");
use tantivy::collector::Count;
let (searcher, query) = self.searcher_and_query(&query)?;
Ok(searcher.search(&query, &Count)?)
}
#[instrument(name="tantivy::search", skip_all, fields(query=%query))]
pub async fn search(
&self,
pool: &PgPool,
after: Option<i32>,
before: Option<i32>,
first: Option<i32>,
last: Option<i32>,
query: &Query,
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
if !is_tantivy_query(query) {
return Ok(Vec::new());
}
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
if before.is_none() {
// When searching forward, the +1 is to see if there are more pages of data available.
// Searching backwards implies there's more pages forward, because the value represented by
// `before` is on the next page.
limit = limit + 1;
}
let (searcher, search_query) = self.searcher_and_query(&query)?;
info!("Tantivy::search(query '{query:?}', off {offset}, lim {limit}, search_query {search_query:?})");
let top_docs = searcher.search(
&search_query,
&TopDocs::with_limit(limit as usize)
.and_offset(offset as usize)
.order_by_u64_field("date", tantivy::index::Order::Desc),
)?;
info!("search found {} docs", top_docs.len());
let uid = self.index.schema().get_field("uid")?;
let uids = top_docs
.into_iter()
.map(|(_, doc_address): (u64, DocAddress)| {
searcher.doc(doc_address).map(|doc: TantivyDocument| {
debug!("doc: {doc:#?}");
doc.get_first(uid)
.expect("doc missing uid")
.as_str()
.expect("doc str missing")
.to_string()
})
})
.collect::<Result<Vec<String>, TantivyError>>()?;
//let uids = format!("'{}'", uids.join("','"));
info!("uids {uids:?}");
let rows = sqlx::query_file!("sql/threads-from-uid.sql", &uids as &[String])
.fetch_all(pool)
.await?;
let mut res = Vec::new();
info!("found {} hits joining w/ tantivy", rows.len());
for (i, r) in rows.into_iter().enumerate() {
res.push((
i as i32 + offset,
thread_summary_from_row(ThreadSummaryRecord {
site: r.site,
date: r.date,
is_read: r.is_read,
title: r.title,
uid: r.uid,
name: r.name,
corpus: Corpus::Tantivy,
})
.await,
));
}
Ok(res)
}
pub fn drop_and_load_index(&self) -> Result<(), TantivyError> {
create_news_db(&self.db_path)
}
}
fn create_news_db(tantivy_db_path: &str) -> Result<(), TantivyError> {
info!("create_news_db");
// Don't care if directory didn't exist
let _ = std::fs::remove_dir_all(tantivy_db_path);
std::fs::create_dir_all(tantivy_db_path)?;
use tantivy::schema::*;
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("site", STRING | STORED);
schema_builder.add_text_field("title", TEXT | STORED);
schema_builder.add_text_field("summary", TEXT);
schema_builder.add_text_field("link", STRING | STORED);
schema_builder.add_date_field("date", FAST | INDEXED | STORED);
schema_builder.add_bool_field("is_read", FAST | INDEXED | STORED);
schema_builder.add_text_field("uid", STRING | STORED);
schema_builder.add_u64_field("id", FAST);
schema_builder.add_facet_field("tag", FacetOptions::default());
let schema = schema_builder.build();
Index::create_in_dir(tantivy_db_path, schema)?;
Ok(())
}
struct PostgresDoc {
site: String,
title: String,
summary: String,
link: String,
date: PrimitiveDateTime,
is_read: bool,
uid: String,
id: i32,
}

View File

@@ -0,0 +1,59 @@
<!DOCTYPE html>
<html>
<head>
<meta charset=utf-8 />
<meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
<title>GraphQL Playground</title>
<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/css/index.css" />
<link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/favicon.png" />
<script src="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/js/middleware.js"></script>
</head>
<body>
<div id="root">
<style>
body {
background-color: rgb(23, 42, 58);
font-family: Open Sans, sans-serif;
height: 90vh;
}
#root {
height: 100%;
width: 100%;
display: flex;
align-items: center;
justify-content: center;
}
.loading {
font-size: 32px;
font-weight: 200;
color: rgba(255, 255, 255, .6);
margin-left: 20px;
}
img {
width: 78px;
height: 78px;
}
.title {
font-weight: 400;
}
</style>
<img src='//cdn.jsdelivr.net/npm/graphql-playground-react/build/logo.png' alt=''>
<div class="loading"> Loading
<span class="title">GraphQL Playground</span>
</div>
</div>
<script>window.addEventListener('load', function (event) {
GraphQLPlayground.init(document.getElementById('root'), {
// options as 'endpoint' belong here
endpoint: "/api/graphql",
})
})</script>
</body>
</html>

View File

@@ -1,10 +1,11 @@
[package] [package]
name = "shared" name = "shared"
version = "0.1.0" version = "0.0.115"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
build-info = "0.0.38"
notmuch = { path = "../notmuch" } notmuch = { path = "../notmuch" }
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }

View File

@@ -1,3 +1,6 @@
use std::hash::{DefaultHasher, Hash, Hasher};
use build_info::{BuildInfo, VersionControl};
use notmuch::SearchSummary; use notmuch::SearchSummary;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -33,3 +36,23 @@ pub mod urls {
} }
} }
} }
pub fn build_version(bi: fn() -> &'static BuildInfo) -> String {
fn commit(git: &Option<VersionControl>) -> String {
let Some(VersionControl::Git(git)) = git else {
return String::new();
};
let mut s = vec!["-".to_string(), git.commit_short_id.clone()];
if let Some(branch) = &git.branch {
s.push(format!(" ({branch})"));
}
s.join("")
}
let bi = bi();
format!("v{}{}", bi.crate_info.version, commit(&bi.version_control)).to_string()
}
pub fn compute_color(data: &str) -> String {
let mut hasher = DefaultHasher::new();
data.hash(&mut hasher);
format!("#{:06x}", hasher.finish() % (1 << 24))
}

View File

@@ -1,5 +1,5 @@
[package] [package]
version = "0.1.0" version = "0.0.115"
name = "letterbox" name = "letterbox"
repository = "https://github.com/seed-rs/seed-quickstart" repository = "https://github.com/seed-rs/seed-quickstart"
authors = ["Bill Thiede <git@xinu.tv>"] authors = ["Bill Thiede <git@xinu.tv>"]
@@ -7,7 +7,10 @@ description = "App Description"
categories = ["category"] categories = ["category"]
license = "MIT" license = "MIT"
readme = "./README.md" readme = "./README.md"
edition = "2018" edition = "2021"
[build-dependencies]
build-info-build = "0.0.38"
[dev-dependencies] [dev-dependencies]
wasm-bindgen-test = "0.3.33" wasm-bindgen-test = "0.3.33"
@@ -17,10 +20,10 @@ console_error_panic_hook = "0.1.7"
log = "0.4.17" log = "0.4.17"
seed = { version = "0.10.0", features = ["routing"] } seed = { version = "0.10.0", features = ["routing"] }
#seed = "0.9.2" #seed = "0.9.2"
console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"} console_log = { version = "0.1.0", registry = "xinu" }
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }
notmuch = {path = "../notmuch"} notmuch = { path = "../notmuch" }
shared = {path = "../shared"} shared = { path = "../shared" }
itertools = "0.10.5" itertools = "0.10.5"
serde_json = { version = "1.0.93", features = ["unbounded_depth"] } serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
chrono = "0.4.31" chrono = "0.4.31"
@@ -29,6 +32,8 @@ thiserror = "1.0.50"
seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" } seed_hooks = { git = "https://github.com/wathiede/styles_hooks", package = "seed_hooks", branch = "main" }
gloo-net = { version = "0.4.0", features = ["json", "serde_json"] } gloo-net = { version = "0.4.0", features = ["json", "serde_json"] }
human_format = "1.1.0" human_format = "1.1.0"
build-info = "0.0.38"
wasm-bindgen = "0.2.95"
[package.metadata.wasm-pack.profile.release] [package.metadata.wasm-pack.profile.release]
wasm-opt = ['-Os'] wasm-opt = ['-Os']
@@ -36,6 +41,10 @@ wasm-opt = ['-Os']
[dependencies.web-sys] [dependencies.web-sys]
version = "0.3.58" version = "0.3.58"
features = [ features = [
"Clipboard",
"DomRect",
"Element",
"MediaQueryList", "MediaQueryList",
"Window" "Navigator",
"Window",
] ]

View File

@@ -1,8 +0,0 @@
.PHONY: all
APP=letterbox
# Build in release mode and push to minio for serving.
all:
trunk build --release
mc mirror m/$(APP)/ /tmp/$(APP)-$(shell date +%s)
mc mirror --overwrite --remove dist/ m/$(APP)/

View File

@@ -1,5 +1,5 @@
[build] [build]
release = true release = false
[serve] [serve]
# The address to serve on. # The address to serve on.
@@ -9,6 +9,11 @@ port = 6758
[[proxy]] [[proxy]]
backend = "http://localhost:9345/api/" backend = "http://localhost:9345/api/"
[[hooks]]
stage = "pre_build"
command = "printf"
command_arguments = ["\\033c"]
#[[hooks]] #[[hooks]]
#stage = "pre_build" #stage = "pre_build"
#command = "cargo" #command = "cargo"

5
web/build.rs Normal file
View File

@@ -0,0 +1,5 @@
fn main() {
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
// and `build_info::format!` in the main program.
build_info_build::build_script();
}

View File

@@ -14,6 +14,7 @@ query FrontPageQuery($query: String!, $after: String $before: String, $first: In
subject subject
authors authors
tags tags
corpus
} }
} }
tags { tags {
@@ -22,4 +23,5 @@ query FrontPageQuery($query: String!, $after: String $before: String, $first: In
fgColor fgColor
unread unread
} }
version
} }

View File

@@ -0,0 +1,3 @@
mutation RefreshMutation {
refresh
}

View File

@@ -2,6 +2,28 @@
"data": { "data": {
"__schema": { "__schema": {
"directives": [ "directives": [
{
"args": [
{
"defaultValue": "\"No longer supported\"",
"description": "A reason for why it is deprecated, formatted using Markdown syntax",
"name": "reason",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
],
"description": "Marks an element of a GraphQL schema as no longer supported.",
"locations": [
"FIELD_DEFINITION",
"ARGUMENT_DEFINITION",
"INPUT_FIELD_DEFINITION",
"ENUM_VALUE"
],
"name": "deprecated"
},
{ {
"args": [ "args": [
{ {
@@ -27,6 +49,14 @@
], ],
"name": "include" "name": "include"
}, },
{
"args": [],
"description": "Indicates that an Input Object is a OneOf Input Object (and thus requires\n exactly one of its field be provided)",
"locations": [
"INPUT_OBJECT"
],
"name": "oneOf"
},
{ {
"args": [ "args": [
{ {
@@ -51,6 +81,29 @@
"INLINE_FRAGMENT" "INLINE_FRAGMENT"
], ],
"name": "skip" "name": "skip"
},
{
"args": [
{
"defaultValue": null,
"description": "URL that specifies the behavior of this scalar.",
"name": "url",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
],
"description": "Provides a scalar specification URL for specifying the behavior of custom scalar types.",
"locations": [
"SCALAR"
],
"name": "specifiedBy"
} }
], ],
"mutationType": { "mutationType": {
@@ -232,6 +285,35 @@
"name": "Boolean", "name": "Boolean",
"possibleTypes": null "possibleTypes": null
}, },
{
"description": null,
"enumValues": [
{
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "NOTMUCH"
},
{
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "NEWSREADER"
},
{
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "TANTIVY"
}
],
"fields": null,
"inputFields": null,
"interfaces": null,
"kind": "ENUM",
"name": "Corpus",
"possibleTypes": null
},
{ {
"description": null, "description": null,
"enumValues": [ "enumValues": [
@@ -282,6 +364,18 @@
"name": "String", "name": "String",
"ofType": null "ofType": null
} }
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "photoUrl",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
} }
], ],
"inputFields": null, "inputFields": null,
@@ -290,6 +384,73 @@
"name": "Email", "name": "Email",
"possibleTypes": null "possibleTypes": null
}, },
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "subject",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "messages",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "Message",
"ofType": null
}
}
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "EmailThread",
"possibleTypes": null
},
{ {
"description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).", "description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).",
"enumValues": null, "enumValues": null,
@@ -783,6 +944,22 @@
"ofType": null "ofType": null
} }
} }
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "refresh",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
} }
], ],
"inputFields": null, "inputFields": null,
@@ -791,6 +968,145 @@
"name": "Mutation", "name": "Mutation",
"possibleTypes": null "possibleTypes": null
}, },
{
"description": null,
"enumValues": null,
"fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "isRead",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "slug",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "site",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "title",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "body",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "url",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "timestamp",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
}
}
],
"inputFields": null,
"interfaces": [],
"kind": "OBJECT",
"name": "NewsPost",
"possibleTypes": null
},
{ {
"description": "Information about pagination in a connection", "description": "Information about pagination in a connection",
"enumValues": null, "enumValues": null,
@@ -905,6 +1221,22 @@
"description": null, "description": null,
"enumValues": null, "enumValues": null,
"fields": [ "fields": [
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "version",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{ {
"args": [ "args": [
{ {
@@ -1056,7 +1388,7 @@
"kind": "NON_NULL", "kind": "NON_NULL",
"name": null, "name": null,
"ofType": { "ofType": {
"kind": "OBJECT", "kind": "UNION",
"name": "Thread", "name": "Thread",
"ofType": null "ofType": null
} }
@@ -1157,69 +1489,23 @@
{ {
"description": null, "description": null,
"enumValues": null, "enumValues": null,
"fields": [ "fields": null,
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "threadId",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "subject",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "messages",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "Message",
"ofType": null
}
}
}
}
}
],
"inputFields": null, "inputFields": null,
"interfaces": [], "interfaces": null,
"kind": "OBJECT", "kind": "UNION",
"name": "Thread", "name": "Thread",
"possibleTypes": null "possibleTypes": [
{
"kind": "OBJECT",
"name": "EmailThread",
"ofType": null
},
{
"kind": "OBJECT",
"name": "NewsPost",
"ofType": null
}
]
}, },
{ {
"description": null, "description": null,
@@ -1360,6 +1646,22 @@
} }
} }
} }
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "corpus",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "Corpus",
"ofType": null
}
}
} }
], ],
"inputFields": null, "inputFields": null,
@@ -1586,7 +1888,22 @@
} }
}, },
{ {
"args": [], "args": [
{
"defaultValue": "false",
"description": null,
"name": "includeDeprecated",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
}
],
"deprecationReason": null, "deprecationReason": null,
"description": null, "description": null,
"isDeprecated": false, "isDeprecated": false,
@@ -1857,7 +2174,22 @@
} }
}, },
{ {
"args": [], "args": [
{
"defaultValue": "false",
"description": null,
"name": "includeDeprecated",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
}
],
"deprecationReason": null, "deprecationReason": null,
"description": null, "description": null,
"isDeprecated": false, "isDeprecated": false,
@@ -1990,6 +2322,34 @@
"name": "String", "name": "String",
"ofType": null "ofType": null
} }
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "isDeprecated",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
},
{
"args": [],
"deprecationReason": null,
"description": null,
"isDeprecated": false,
"name": "deprecationReason",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
} }
], ],
"inputFields": null, "inputFields": null,
@@ -2002,6 +2362,22 @@
"description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes\nall available types and directives on the server, as well as the entry\npoints for query, mutation, and subscription operations.", "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes\nall available types and directives on the server, as well as the entry\npoints for query, mutation, and subscription operations.",
"enumValues": null, "enumValues": null,
"fields": [ "fields": [
{
"args": [],
"deprecationReason": null,
"description": "description of __Schema for newer graphiql introspection schema\nrequirements",
"isDeprecated": false,
"name": "description",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
},
{ {
"args": [], "args": [],
"deprecationReason": null, "deprecationReason": null,
@@ -2252,7 +2628,22 @@
} }
}, },
{ {
"args": [], "args": [
{
"defaultValue": "false",
"description": null,
"name": "includeDeprecated",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
}
}
],
"deprecationReason": null, "deprecationReason": null,
"description": null, "description": null,
"isDeprecated": false, "isDeprecated": false,

View File

@@ -1,47 +1,61 @@
query ShowThreadQuery($threadId: String!) { query ShowThreadQuery($threadId: String!) {
thread(threadId: $threadId) { thread(threadId: $threadId) {
threadId, __typename ... on NewsPost{
subject threadId
messages { isRead
id slug
subject site
tags title
from { body
name url
addr
}
to {
name
addr
}
cc {
name
addr
}
timestamp timestamp
body { # TODO: unread
__typename }
... on UnhandledContentType { __typename ... on EmailThread{
contents threadId,
contentTree subject
} messages {
... on PlainText {
contents
contentTree
}
... on Html {
contents
contentTree
}
}
path
attachments {
id id
idx subject
filename tags
contentType from {
contentId name
size addr
photoUrl
}
to {
name
addr
}
cc {
name
addr
}
timestamp
body {
__typename
... on UnhandledContentType {
contents
contentTree
}
... on PlainText {
contents
contentTree
}
... on Html {
contents
contentTree
}
}
path
attachments {
id
idx
filename
contentType
contentId
size
}
} }
} }
} }
@@ -51,4 +65,5 @@ query ShowThreadQuery($threadId: String!) {
fgColor fgColor
unread unread
} }
version
} }

View File

@@ -1,4 +1,4 @@
DEV_HOST=localhost DEV_HOST=localhost
DEV_PORT=9345 DEV_PORT=9345
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/graphql --output schema.json graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
git diff schema.json git diff schema.json

View File

@@ -4,28 +4,20 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link rel="stylesheet" href="https://jenil.github.io/bulmaswatch/cyborg/bulmaswatch.min.css">
<!--
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css">
-->
<!--
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@1.0.0/css/bulma.min.css">
-->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.3.0/css/all.min.css" <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.3.0/css/all.min.css"
integrity="sha512-SzlrxWUlpfuzQ+pcUCosxcglQRNAq/DZjVsC0lE40xsADsfeQoEypE+enwcOiGjk/bSuGGKHEyjSoQ1zVisanQ==" integrity="sha512-SzlrxWUlpfuzQ+pcUCosxcglQRNAq/DZjVsC0lE40xsADsfeQoEypE+enwcOiGjk/bSuGGKHEyjSoQ1zVisanQ=="
crossorigin="anonymous" referrerpolicy="no-referrer" /> crossorigin="anonymous" referrerpolicy="no-referrer" />
<link rel="icon" href="https://static.xinu.tv/favicon/letterbox.svg" /> <link rel="icon" href="https://static.xinu.tv/favicon/letterbox.svg" />
<link data-trunk rel="css" href="static/style.css" />
<!-- Pretty checkboxes from https://justboil.github.io/bulma-checkbox/ -->
<link data-trunk rel="css" href="static/main.css" />
<!-- tall thin font for user icon --> <!-- tall thin font for user icon -->
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet"> <link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet">
<!-- <link data-trunk rel="css" href="static/site-specific.css" /> -->
<link data-trunk rel="tailwind-css" href="./src/tailwind.css" />
</head> </head>
<body> <body>
<section id="app"></section> <section id="app"></section>
</body> </body>
</html> </html>

View File

@@ -1,10 +1,3 @@
use gloo_net::{http::Request, Error};
use log::info;
const BASE_URL: &str = "/api";
pub fn refresh() -> String {
format!("{BASE_URL}/refresh")
}
pub mod urls { pub mod urls {
use seed::Url; use seed::Url;
pub fn search(query: &str, page: usize) -> Url { pub fn search(query: &str, page: usize) -> Url {
@@ -19,9 +12,3 @@ pub mod urls {
Url::new().set_hash_path(["t", tid]) Url::new().set_hash_path(["t", tid])
} }
} }
pub async fn refresh_request() -> Result<(), Error> {
let t = Request::get(&refresh()).send().await?.text().await?;
info!("refresh {t}");
Ok(())
}

View File

@@ -44,6 +44,14 @@ pub struct AddTagMutation;
)] )]
pub struct RemoveTagMutation; pub struct RemoveTagMutation;
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "graphql/schema.json",
query_path = "graphql/refresh.graphql",
response_derives = "Debug"
)]
pub struct RefreshMutation;
pub async fn send_graphql<Body, Resp>(body: Body) -> Result<graphql_client::Response<Resp>, Error> pub async fn send_graphql<Body, Resp>(body: Body) -> Result<graphql_client::Response<Resp>, Error>
where where
Body: Serialize, Body: Serialize,

View File

@@ -1,12 +1,12 @@
use std::collections::HashSet; use std::collections::HashSet;
use graphql_client::GraphQLQuery; use graphql_client::GraphQLQuery;
use log::{error, info}; use log::{debug, error, info, warn};
use seed::{prelude::*, *}; use seed::{prelude::*, *};
use thiserror::Error; use thiserror::Error;
use web_sys::HtmlElement;
use crate::{ use crate::{
api,
api::urls, api::urls,
consts::SEARCH_RESULTS_PER_PAGE, consts::SEARCH_RESULTS_PER_PAGE,
graphql, graphql,
@@ -27,6 +27,8 @@ pub fn unread_query() -> &'static str {
// `init` describes what should happen when your app started. // `init` describes what should happen when your app started.
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model { pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
let version = shared::build_version(bi);
info!("Build Info: {}", version);
if url.hash().is_none() { if url.hash().is_none() {
orders.request_url(urls::search(unread_query(), 0)); orders.request_url(urls::search(unread_query(), 0));
} else { } else {
@@ -37,12 +39,20 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
// 'notmuch new' on the server periodically? // 'notmuch new' on the server periodically?
orders.stream(streams::interval(30_000, || Msg::RefreshStart)); orders.stream(streams::interval(30_000, || Msg::RefreshStart));
orders.subscribe(on_url_changed); orders.subscribe(on_url_changed);
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
build_info::build_info!(fn bi);
Model { Model {
context: Context::None, context: Context::None,
query: "".to_string(), query: "".to_string(),
refreshing_state: RefreshingState::None, refreshing_state: RefreshingState::None,
tags: None, tags: None,
read_completion_ratio: 0.,
content_el: ElRef::<HtmlElement>::default(),
versions: Version {
client: version,
server: None,
},
} }
} }
@@ -100,7 +110,17 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Msg::Noop => {} Msg::Noop => {}
Msg::RefreshStart => { Msg::RefreshStart => {
model.refreshing_state = RefreshingState::Loading; model.refreshing_state = RefreshingState::Loading;
orders.perform_cmd(async move { Msg::RefreshDone(api::refresh_request().await.err()) }); orders.perform_cmd(async move {
Msg::RefreshDone(
send_graphql::<_, graphql::refresh_mutation::ResponseData>(
graphql::RefreshMutation::build_query(
graphql::refresh_mutation::Variables {},
),
)
.await
.err(),
)
});
} }
Msg::RefreshDone(err) => { Msg::RefreshDone(err) => {
model.refreshing_state = if let Some(err) = err { model.refreshing_state = if let Some(err) = err {
@@ -108,10 +128,16 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} else { } else {
RefreshingState::None RefreshingState::None
}; };
orders.perform_cmd(async move { Msg::Reload }); orders.perform_cmd(async move { Msg::Refresh });
}
Msg::Refresh => {
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
} }
Msg::Reload => { Msg::Reload => {
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) }); window()
.location()
.reload()
.expect("failed to reload window");
} }
Msg::OnResize => (), Msg::OnResize => (),
@@ -154,6 +180,12 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
Context::None => (), // do nothing (yet?) Context::None => (), // do nothing (yet?)
}; };
} }
Msg::GoToSearchResults => {
let url = urls::search(&model.query, 0);
info!("GoToSearchRestuls Start");
orders.request_url(url);
info!("GoToSearchRestuls End");
}
Msg::UpdateQuery(query) => model.query = query, Msg::UpdateQuery(query) => model.query = query,
Msg::SearchQuery(query) => { Msg::SearchQuery(query) => {
@@ -161,7 +193,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
Msg::SetUnread(query, unread) => { Msg::SetUnread(query, unread) => {
let search_url = urls::search(&model.query, 0).to_string();
orders.skip().perform_cmd(async move { orders.skip().perform_cmd(async move {
let res: Result< let res: Result<
graphql_client::Response<graphql::mark_read_mutation::ResponseData>, graphql_client::Response<graphql::mark_read_mutation::ResponseData>,
@@ -176,15 +207,10 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
if let Err(e) = res { if let Err(e) = res {
error!("Failed to set read for {query} to {unread}: {e}"); error!("Failed to set read for {query} to {unread}: {e}");
} }
seed::window() Msg::Refresh
.location()
.set_href(&search_url)
.expect("failed to change location");
Msg::Noop
}); });
} }
Msg::AddTag(query, tag) => { Msg::AddTag(query, tag) => {
let search_url = urls::search(&model.query, 0).to_string();
orders.skip().perform_cmd(async move { orders.skip().perform_cmd(async move {
let res: Result< let res: Result<
graphql_client::Response<graphql::add_tag_mutation::ResponseData>, graphql_client::Response<graphql::add_tag_mutation::ResponseData>,
@@ -199,15 +225,10 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
if let Err(e) = res { if let Err(e) = res {
error!("Failed to add tag {tag} to {query}: {e}"); error!("Failed to add tag {tag} to {query}: {e}");
} }
seed::window() Msg::GoToSearchResults
.location()
.set_href(&search_url)
.expect("failed to change location");
Msg::Noop
}); });
} }
Msg::RemoveTag(query, tag) => { Msg::RemoveTag(query, tag) => {
let search_url = urls::search(&model.query, 0).to_string();
orders.skip().perform_cmd(async move { orders.skip().perform_cmd(async move {
let res: Result< let res: Result<
graphql_client::Response<graphql::remove_tag_mutation::ResponseData>, graphql_client::Response<graphql::remove_tag_mutation::ResponseData>,
@@ -223,11 +244,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
error!("Failed to remove tag {tag} to {query}: {e}"); error!("Failed to remove tag {tag} to {query}: {e}");
} }
// TODO: reconsider this behavior // TODO: reconsider this behavior
seed::window() Msg::GoToSearchResults
.location()
.set_href(&search_url)
.expect("failed to change location");
Msg::Noop
}); });
} }
@@ -286,18 +303,37 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
.map(|t| Tag { .map(|t| Tag {
name: t.name, name: t.name,
bg_color: t.bg_color, bg_color: t.bg_color,
fg_color: t.fg_color,
unread: t.unread, unread: t.unread,
}) })
.collect(), .collect(),
); );
info!("pager {:#?}", data.search.page_info);
let selected_threads = 'context: {
if let Context::SearchResult {
results,
selected_threads,
..
} = &model.context
{
let old: HashSet<_> = results.iter().map(|n| &n.thread).collect();
let new: HashSet<_> = data.search.nodes.iter().map(|n| &n.thread).collect();
if old == new {
break 'context selected_threads.clone();
}
}
HashSet::new()
};
model.context = Context::SearchResult { model.context = Context::SearchResult {
query: model.query.clone(), query: model.query.clone(),
results: data.search.nodes, results: data.search.nodes,
count: data.count as usize, count: data.count as usize,
pager: data.search.page_info, pager: data.search.page_info,
selected_threads: HashSet::new(), selected_threads,
}; };
orders.send_msg(Msg::UpdateServerVersion(data.version));
// Generate signal so progress bar is reset
orders.send_msg(Msg::WindowScrolled);
} }
Msg::ShowThreadRequest { thread_id } => { Msg::ShowThreadRequest { thread_id } => {
@@ -319,30 +355,37 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
.map(|t| Tag { .map(|t| Tag {
name: t.name, name: t.name,
bg_color: t.bg_color, bg_color: t.bg_color,
fg_color: t.fg_color,
unread: t.unread, unread: t.unread,
}) })
.collect(), .collect(),
); );
let mut open_messages: HashSet<_> = data match &data.thread {
.thread graphql::show_thread_query::ShowThreadQueryThread::EmailThread(
.messages ShowThreadQueryThreadOnEmailThread { messages, .. },
.iter() ) => {
.filter(|msg| msg.tags.iter().any(|t| t == "unread")) let mut open_messages: HashSet<_> = messages
.map(|msg| msg.id.clone()) .iter()
.collect(); .filter(|msg| msg.tags.iter().any(|t| t == "unread"))
if open_messages.is_empty() { .map(|msg| msg.id.clone())
open_messages = data .collect();
.thread if open_messages.is_empty() {
.messages open_messages = messages.iter().map(|msg| msg.id.clone()).collect();
.iter() }
.map(|msg| msg.id.clone()) model.context = Context::ThreadResult {
.collect(); thread: data.thread,
open_messages,
};
}
graphql::show_thread_query::ShowThreadQueryThread::NewsPost(..) => {
model.context = Context::ThreadResult {
thread: data.thread,
open_messages: HashSet::new(),
};
}
} }
model.context = Context::ThreadResult { orders.send_msg(Msg::UpdateServerVersion(data.version));
thread: data.thread, // Generate signal so progress bar is reset
open_messages, orders.send_msg(Msg::WindowScrolled);
};
} }
Msg::ShowThreadResult(bad) => { Msg::ShowThreadResult(bad) => {
error!("show_thread_query error: {bad:#?}"); error!("show_thread_query error: {bad:#?}");
@@ -372,7 +415,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{ {
let threads = selected_threads let threads = selected_threads
.iter() .iter()
.map(|tid| format!("thread:{tid}")) .map(|tid| tid.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" "); .join(" ");
orders orders
@@ -387,7 +430,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{ {
let threads = selected_threads let threads = selected_threads
.iter() .iter()
.map(|tid| format!("thread:{tid}")) .map(|tid| tid.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" "); .join(" ");
orders orders
@@ -402,7 +445,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{ {
let threads = selected_threads let threads = selected_threads
.iter() .iter()
.map(|tid| format!("thread:{tid}")) .map(|tid| tid.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" "); .join(" ");
orders orders
@@ -417,7 +460,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
{ {
let threads = selected_threads let threads = selected_threads
.iter() .iter()
.map(|tid| format!("thread:{tid}")) .map(|tid| tid.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" "); .join(" ");
orders orders
@@ -452,6 +495,68 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
} }
Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)), Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)),
Msg::CopyToClipboard(text) => {
let clipboard = seed::window()
.navigator()
.clipboard()
.expect("couldn't get clipboard");
orders.perform_cmd(async move {
wasm_bindgen_futures::JsFuture::from(clipboard.write_text(&text))
.await
.expect("failed to copy to clipboard");
});
}
Msg::WindowScrolled => {
info!("WindowScrolled");
if let Some(el) = model.content_el.get() {
let ih = window()
.inner_height()
.expect("window height")
.unchecked_into::<js_sys::Number>()
.value_of();
let r = el.get_bounding_client_rect();
info!("r {r:?} ih {ih}");
if r.height() < ih {
// The whole content fits in the window, no scrollbar
orders.send_msg(Msg::SetProgress(0.));
return;
}
let end: f64 = r.height() - ih;
if end < 0. {
orders.send_msg(Msg::SetProgress(0.));
return;
}
// Flip Y, normally it's 0-point when the top of the content hits the top of the
// screen and goes negative from there.
let y = -r.y();
let ratio: f64 = (y / end).max(0.);
debug!(
"WindowScrolled ih {ih} end {end} ratio {ratio:.02} {}x{} @ {},{}",
r.width(),
r.height(),
r.x(),
r.y()
);
orders.send_msg(Msg::SetProgress(ratio));
} else {
orders.send_msg(Msg::SetProgress(0.));
}
}
Msg::SetProgress(ratio) => {
model.read_completion_ratio = ratio;
}
Msg::UpdateServerVersion(version) => {
if version != model.versions.client {
warn!(
"Server ({}) and client ({}) version mismatch, reloading",
version, model.versions.client
);
orders.send_msg(Msg::Reload);
}
model.versions.server = Some(version);
}
} }
} }
// `Model` describes our app state. // `Model` describes our app state.
@@ -460,6 +565,15 @@ pub struct Model {
pub context: Context, pub context: Context,
pub refreshing_state: RefreshingState, pub refreshing_state: RefreshingState,
pub tags: Option<Vec<Tag>>, pub tags: Option<Vec<Tag>>,
pub read_completion_ratio: f64,
pub content_el: ElRef<HtmlElement>,
pub versions: Version,
}
#[derive(Debug)]
pub struct Version {
pub client: String,
pub server: Option<String>,
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
@@ -493,7 +607,6 @@ pub enum Context {
pub struct Tag { pub struct Tag {
pub name: String, pub name: String,
pub bg_color: String, pub bg_color: String,
pub fg_color: String,
pub unread: i64, pub unread: i64,
} }
@@ -507,6 +620,8 @@ pub enum RefreshingState {
pub enum Msg { pub enum Msg {
Noop, Noop,
// Tell the client to refresh its state // Tell the client to refresh its state
Refresh,
// Tell the client to reload whole page from server
Reload, Reload,
// Window has changed size // Window has changed size
OnResize, OnResize,
@@ -515,6 +630,7 @@ pub enum Msg {
RefreshDone(Option<gloo_net::Error>), RefreshDone(Option<gloo_net::Error>),
NextPage, NextPage,
PreviousPage, PreviousPage,
GoToSearchResults,
UpdateQuery(String), UpdateQuery(String),
SearchQuery(String), SearchQuery(String),
@@ -542,6 +658,8 @@ pub enum Msg {
SelectionSetNone, SelectionSetNone,
SelectionSetAll, SelectionSetAll,
SelectionAddTag(String), SelectionAddTag(String),
#[allow(dead_code)]
// TODO
SelectionRemoveTag(String), SelectionRemoveTag(String),
SelectionMarkAsRead, SelectionMarkAsRead,
SelectionMarkAsUnread, SelectionMarkAsUnread,
@@ -551,4 +669,10 @@ pub enum Msg {
MessageCollapse(String), MessageCollapse(String),
MessageExpand(String), MessageExpand(String),
MultiMsg(Vec<Msg>), MultiMsg(Vec<Msg>),
CopyToClipboard(String),
WindowScrolled,
SetProgress(f64),
UpdateServerVersion(String),
} }

3
web/src/tailwind.css Normal file
View File

@@ -0,0 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

View File

@@ -1,136 +0,0 @@
use seed::{prelude::*, *};
use seed_hooks::{state_access::CloneState, topo, use_state};
use crate::{
api::urls,
state::{Context, Model, Msg, Tag},
view::{self, view_header, view_search_results},
};
#[topo::nested]
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = true;
// Do two queries, one without `unread` so it loads fast, then a second with unread.
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
Context::SearchResult {
query,
results,
count,
pager,
selected_threads,
} => view_search_results(
&query,
results.as_slice(),
*count,
pager,
selected_threads,
show_icon_text,
),
};
fn view_tag_li(display_name: &str, indent: usize, t: &Tag, search_unread: bool) -> Node<Msg> {
let href = if search_unread {
urls::search(&format!("is:unread tag:{}", t.name), 0)
} else {
urls::search(&format!("tag:{}", t.name), 0)
};
li![a![
attrs! {
At::Href => href
},
(0..indent).map(|_| span![C!["tag-indent"], ""]),
i![
C!["tag-tag", "fa-solid", "fa-tag"],
style! {
//"--fa-primary-color" => t.fg_color,
St::Color => t.bg_color,
},
],
display_name,
IF!(t.unread>0 => format!(" ({})", t.unread))
]]
}
fn matches(a: &[&str], b: &[&str]) -> usize {
std::iter::zip(a.iter(), b.iter())
.take_while(|(a, b)| a == b)
.count()
}
fn view_tag_list<'a>(
tags: impl Iterator<Item = &'a Tag>,
search_unread: bool,
) -> Vec<Node<Msg>> {
let mut lis = Vec::new();
let mut last = Vec::new();
for t in tags {
let parts: Vec<_> = t.name.split('/').collect();
let mut n = matches(&last, &parts);
if n <= parts.len() - 2 && parts.len() > 1 {
// Synthesize fake tags for proper indenting.
for i in n..parts.len() - 1 {
let display_name = parts[n];
lis.push(view_tag_li(
&display_name,
n,
&Tag {
name: parts[..i + 1].join("/"),
bg_color: "#fff".to_string(),
fg_color: "#000".to_string(),
unread: 0,
},
search_unread,
));
}
n = parts.len() - 1;
}
let display_name = parts[n];
lis.push(view_tag_li(&display_name, n, t, search_unread));
last = parts;
}
lis
}
let unread = model
.tags
.as_ref()
.map(|tags| tags.iter().filter(|t| t.unread > 0).collect())
.unwrap_or(Vec::new());
let tags_open = use_state(|| false);
let force_tags_open = unread.is_empty();
div![
C!["main-content"],
aside![
C!["tags-menu", "menu"],
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
IF!(!unread.is_empty() => ul![C!["menu-list"], view_tag_list(unread.into_iter(),true)]),
p![
C!["menu-label"],
IF!(!force_tags_open =>
i![C![
"fa-solid",
if tags_open.get() {
"fa-angle-up"
} else {
"fa-angle-down"
}
]]),
" Tags",
ev(Ev::Click, move |_| {
tags_open.set(!tags_open.get());
})
],
ul![
C!["menu-list"],
IF!(force_tags_open||tags_open.get() => model.tags.as_ref().map(|tags| view_tag_list(tags.iter(),false))),
]
],
div![
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
]
]
}

View File

@@ -1,113 +0,0 @@
use std::collections::HashSet;
use seed::{prelude::*, *};
use crate::{
api::urls,
graphql::front_page_query::*,
state::{Context, Model, Msg},
view::{self, human_age, pretty_authors, search_toolbar, set_title, tags_chiclet, view_header},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = false;
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
Context::SearchResult {
query,
results,
count,
pager,
selected_threads,
} => search_results(
&query,
results.as_slice(),
*count,
pager,
selected_threads,
show_icon_text,
),
};
div![
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
]
}
fn search_results(
query: &str,
results: &[FrontPageQuerySearchNodes],
count: usize,
pager: &FrontPageQuerySearchPageInfo,
selected_threads: &HashSet<String>,
show_icon_text: bool,
) -> Node<Msg> {
if query.is_empty() {
set_title("all mail");
} else {
set_title(query);
}
let rows = results.iter().map(|r| {
let tid = r.thread.clone();
let check_tid = r.thread.clone();
let datetime = human_age(r.timestamp as i64);
let unread_idx = r.tags.iter().position(|e| e == &"unread");
let mut tags = r.tags.clone();
if let Some(idx) = unread_idx {
tags.remove(idx);
};
div![
C!["row"],
label![
C!["b-checkbox", "checkbox", "is-large"],
input![attrs! {
At::Type=>"checkbox",
At::Checked=>selected_threads.contains(&tid).as_at_value(),
}],
span![C!["check"]],
ev(Ev::Input, move |e| {
if let Some(input) = e
.target()
.as_ref()
.expect("failed to get reference to target")
.dyn_ref::<web_sys::HtmlInputElement>()
{
if input.checked() {
Msg::SelectionAddThread(check_tid)
} else {
Msg::SelectionRemoveThread(check_tid)
}
} else {
Msg::Noop
}
}),
],
a![
C!["has-text-light", "summary"],
IF!(unread_idx.is_some() => C!["unread"]),
attrs! {
At::Href => urls::thread(&tid)
},
div![C!["subject"], &r.subject],
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
div![
span![C!["is-size-7"], tags_chiclet(&tags, true)],
span![C!["is-size-7", "float-right", "date"], datetime]
]
]
]
});
let show_bulk_edit = !selected_threads.is_empty();
div![
C!["search-results"],
search_toolbar(count, pager, show_bulk_edit, show_icon_text),
div![C!["index"], rows],
search_toolbar(count, pager, show_bulk_edit, show_icon_text),
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,41 +0,0 @@
use seed::{prelude::*, *};
use crate::{
state::{Context, Model, Msg},
view::{self, view_header, view_search_results},
};
pub(super) fn view(model: &Model) -> Node<Msg> {
log::info!("tablet::view");
let show_icon_text = false;
// Do two queries, one without `unread` so it loads fast, then a second with unread.
let content = match &model.context {
Context::None => div![h1!["Loading"]],
Context::ThreadResult {
thread,
open_messages,
} => view::thread(thread, open_messages, show_icon_text),
Context::SearchResult {
query,
results,
count,
pager,
selected_threads,
} => view_search_results(
&query,
results.as_slice(),
*count,
pager,
selected_threads,
show_icon_text,
),
};
div![
C!["main-content"],
div![
view_header(&model.query, &model.refreshing_state),
content,
view_header(&model.query, &model.refreshing_state),
]
]
}

View File

@@ -1,268 +0,0 @@
/* Bulma Utilities */
.b-checkbox.checkbox {
-webkit-touch-callout: none;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
/* Box-shadow on hover */
.b-checkbox.checkbox {
outline: none;
display: inline-flex;
align-items: center;
}
.b-checkbox.checkbox:not(.button) {
margin-right: 0.5em;
}
.b-checkbox.checkbox:not(.button) + .checkbox:last-child {
margin-right: 0;
}
.b-checkbox.checkbox input[type=checkbox] {
position: absolute;
left: 0;
opacity: 0;
outline: none;
z-index: -1;
}
.b-checkbox.checkbox input[type=checkbox] + .check {
width: 1.25em;
height: 1.25em;
flex-shrink: 0;
border-radius: 4px;
border: 2px solid #7a7a7a;
transition: background 150ms ease-out;
background: transparent;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check {
background: #00d1b2 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #00d1b2;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-white {
background: white url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%230a0a0a' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: white;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-black {
background: #0a0a0a url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:white' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #0a0a0a;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-light {
background: whitesmoke url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:rgba(0, 0, 0, 0.7)' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: whitesmoke;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-dark {
background: #363636 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #363636;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-primary {
background: #00d1b2 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #00d1b2;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-link {
background: #485fc7 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #485fc7;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-info {
background: #3e8ed0 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #3e8ed0;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-success {
background: #48c78e url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #48c78e;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-warning {
background: #ffe08a url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:rgba(0, 0, 0, 0.7)' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #ffe08a;
}
.b-checkbox.checkbox input[type=checkbox]:checked + .check.is-danger {
background: #f14668 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Cpath style='fill:%23fff' d='M 0.04038059,0.6267767 0.14644661,0.52071068 0.42928932,0.80355339 0.3232233,0.90961941 z M 0.21715729,0.80355339 0.85355339,0.16715729 0.95961941,0.2732233 0.3232233,0.90961941 z'%3E%3C/path%3E%3C/svg%3E") no-repeat center center;
border-color: #f14668;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check {
background: #00d1b2 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #00d1b2;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-white, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-white {
background: white url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%230a0a0a' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: white;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-black, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-black {
background: #0a0a0a url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:white' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #0a0a0a;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-light, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-light {
background: whitesmoke url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:rgba(0, 0, 0, 0.7)' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: whitesmoke;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-dark, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-dark {
background: #363636 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #363636;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-primary, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-primary {
background: #00d1b2 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #00d1b2;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-link, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-link {
background: #485fc7 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #485fc7;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-info, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-info {
background: #3e8ed0 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #3e8ed0;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-success, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-success {
background: #48c78e url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #48c78e;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-warning, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-warning {
background: #ffe08a url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:rgba(0, 0, 0, 0.7)' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #ffe08a;
}
.b-checkbox.checkbox input[type=checkbox]:indeterminate + .check.is-danger, .b-checkbox.checkbox input[type=checkbox].is-indeterminate + .check.is-danger {
background: #f14668 url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1 1'%3E%3Crect style='fill:%23fff' width='0.7' height='0.2' x='.15' y='.4'%3E%3C/rect%3E%3C/svg%3E") no-repeat center center;
border-color: #f14668;
}
.b-checkbox.checkbox input[type=checkbox]:focus + .check {
box-shadow: 0 0 0.5em rgba(122, 122, 122, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check {
box-shadow: 0 0 0.5em rgba(0, 209, 178, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-white {
box-shadow: 0 0 0.5em rgba(255, 255, 255, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-black {
box-shadow: 0 0 0.5em rgba(10, 10, 10, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-light {
box-shadow: 0 0 0.5em rgba(245, 245, 245, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-dark {
box-shadow: 0 0 0.5em rgba(54, 54, 54, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-primary {
box-shadow: 0 0 0.5em rgba(0, 209, 178, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-link {
box-shadow: 0 0 0.5em rgba(72, 95, 199, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-info {
box-shadow: 0 0 0.5em rgba(62, 142, 208, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-success {
box-shadow: 0 0 0.5em rgba(72, 199, 142, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-warning {
box-shadow: 0 0 0.5em rgba(255, 224, 138, 0.8);
}
.b-checkbox.checkbox input[type=checkbox]:focus:checked + .check.is-danger {
box-shadow: 0 0 0.5em rgba(241, 70, 104, 0.8);
}
.b-checkbox.checkbox .control-label {
padding-left: calc(0.75em - 1px);
}
.b-checkbox.checkbox.button {
display: flex;
}
.b-checkbox.checkbox[disabled] {
opacity: 0.5;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check {
border-color: #00d1b2;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-white {
border-color: white;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-black {
border-color: #0a0a0a;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-light {
border-color: whitesmoke;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-dark {
border-color: #363636;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-primary {
border-color: #00d1b2;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-link {
border-color: #485fc7;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-info {
border-color: #3e8ed0;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-success {
border-color: #48c78e;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-warning {
border-color: #ffe08a;
}
.b-checkbox.checkbox:hover input[type=checkbox]:not(:disabled) + .check.is-danger {
border-color: #f14668;
}
.b-checkbox.checkbox.is-small {
border-radius: 2px;
font-size: 0.75rem;
}
.b-checkbox.checkbox.is-medium {
font-size: 1.25rem;
}
.b-checkbox.checkbox.is-large {
font-size: 1.5rem;
}

View File

@@ -0,0 +1,60 @@
.body figcaption {
color: var(--color-text) !important;
}
.body.news-post em {
border: 0 !important;
font-style: italic;
margin: inherit !important;
padding: inherit !important;
}
.body.news-post hr {
background-color: #aaa !important;
margin: .25rem 0 !important;
}
.body.news-post .number {
align-items: inherit;
background-color: inherit;
border-radius: inherit;
display: inherit;
font-size: inherit;
height: inherit;
justify-content: inherit;
margin-right: inherit;
min-width: inherit;
padding: inherit;
text-align: inherit;
vertical-align: inherit;
}
.body.news-post.site-saturday-morning-breakfast-cereal {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
}
.body.news-post.site-slashdot i {
border-left: 2px solid #ddd;
display: block;
font-style: normal !important;
margin-bottom: 1em;
margin-top: 1em;
padding-left: 1em;
}
.body.news-post.site-news-on-redox-your-next-gen-os h1,
.body.news-post.site-news-on-redox-your-next-gen-os h2,
.body.news-post.site-news-on-redox-your-next-gen-os h3,
.body.news-post.site-news-on-redox-your-next-gen-os h4,
.body.news-post.site-news-on-redox-your-next-gen-os h5 {
color: var(--color-text) !important;
}
.body.mail code,
.body.mail pre {
color: var(--color-text);
background-color: var(--color-bg-secondary);
}

View File

@@ -1,33 +1,44 @@
.message { :root {
display: inline-block; --active-brightness: 0.85;
padding: 0.5em; --border-radius: 5px;
width: 100%; --box-shadow: 2px 2px 10px;
--color-accent: #118bee15;
--color-bg: #fff;
--color-bg-secondary: #e9e9e9;
--color-link: #118bee;
--color-secondary: #920de9;
--color-secondary-accent: #920de90b;
--color-shadow: #f4f4f4;
--color-table: #118bee;
--color-text: #000;
--color-text-secondary: #999;
--color-scrollbar: #cacae8;
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
--hover-brightness: 1.2;
--justify-important: center;
--justify-normal: left;
--line-height: 1.5;
/*
--width-card: 285px;
--width-card-medium: 460px;
--width-card-wide: 800px;
*/
--width-content: 1080px;
} }
.message .header table td { @media (prefers-color-scheme: dark) {
border: 0; :root[color-mode="user"] {
padding: 0; --color-accent: #0097fc4f;
} --color-bg: #333;
--color-bg-secondary: #555;
.message .header .media-right { --color-link: #0097fc;
padding: 1rem; --color-secondary: #e20de9;
} --color-secondary-accent: #e20de94f;
--color-shadow: #bbbbbb20;
.message .headers { --color-table: #0097fc;
position: relative; --color-text: #f7f7f7;
width: 100%; --color-text-secondary: #aaa;
} }
.message .headers .read-status {
position: absolute;
right: 1em;
top: 0em;
}
.message .headers .header {
overflow: clip;
text-overflow: ellipsis;
white-space: nowrap;
} }
.message .body { .message .body {
@@ -65,8 +76,9 @@
} }
.view-part-text-plain { .view-part-text-plain {
padding: 0.5em; font-family: monospace;
overflow-wrap: break-word; overflow-wrap: break-word;
padding: 0.5em;
white-space: pre-wrap; white-space: pre-wrap;
word-break: break-word; word-break: break-word;
word-wrap: break-word; word-wrap: break-word;
@@ -167,6 +179,10 @@ input::placeholder,
padding: 1em; padding: 1em;
} }
.search-results>nav {
margin: 1.25rem;
}
.tablet .thread h3, .tablet .thread h3,
.mobile .thread h3 { .mobile .thread h3 {
overflow-wrap: break-word; overflow-wrap: break-word;
@@ -189,8 +205,6 @@ input::placeholder,
width: 100%; width: 100%;
} }
.search-results .row .checkbox {}
.search-results .row .summary { .search-results .row .summary {
min-width: 0; min-width: 0;
width: 100%; width: 100%;
@@ -202,16 +216,13 @@ input::placeholder,
white-space: nowrap; white-space: nowrap;
} }
.search-results td.subject {}
.search-results .subject .tag {}
.search-results .subject .text { .search-results .subject .text {
padding-left: 0.5rem; display: inline-block;
width: 100%;
overflow: hidden; overflow: hidden;
padding-left: 0.5rem;
text-overflow: ellipsis; text-overflow: ellipsis;
white-space: nowrap; white-space: nowrap;
width: 100%;
} }
.search-results .row .from { .search-results .row .from {
@@ -300,3 +311,14 @@ display: none;
.button.spam { .button.spam {
color: #f00; color: #f00;
} }
progress.read-progress {
border-radius: 0;
position: fixed;
top: 0;
z-index: 999;
}
progress.read-progress.is-small {
height: .25rem;
}

9
web/tailwind.config.js Normal file
View File

@@ -0,0 +1,9 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.rs'],
theme: {
extend: {},
},
plugins: [],
}