Compare commits
956 Commits
server-sid
...
letterbox-
| Author | SHA1 | Date | |
|---|---|---|---|
| 93d569fb14 | |||
| f86a5f464d | |||
| 956c20b156 | |||
| 1eb498712b | |||
| f12979c0be | |||
| 4665f34e54 | |||
| bbdc35061c | |||
| f11f0b4d23 | |||
| c7c47e4a73 | |||
| c3835522b2 | |||
| dfa80f9046 | |||
| b8dfdabf8d | |||
| bbcf52b006 | |||
| f92c05cd28 | |||
| 885bbe0a8c | |||
| 8b1d111837 | |||
| 08abf31fa9 | |||
| fa99959508 | |||
| 0f6af0f475 | |||
| 4c486e9168 | |||
| 109d380ea7 | |||
| 4244fa0d82 | |||
| 4b15e71893 | |||
| 1bbebad01b | |||
| 27edffd090 | |||
| 08212a9f78 | |||
| 877ec6c4b0 | |||
| 3ce92d6bdf | |||
| 1a28bb2021 | |||
| b86f72f75c | |||
| 1a8b98d420 | |||
| 383a7d800f | |||
| 453561140a | |||
| f6d5d3755b | |||
| 5226fe090e | |||
| c10ad00ca7 | |||
| 64fc92c3d6 | |||
| b9c116d5b6 | |||
| 007200b37b | |||
| 9824ad1e18 | |||
| a8819c7551 | |||
| 8cdfbdd08f | |||
| b2d1dc9276 | |||
| 1f79b43a85 | |||
| 904619bccd | |||
| 14104f6469 | |||
| dccfb6f71f | |||
| 547266a705 | |||
| 273562b58c | |||
| dc39eed1a7 | |||
| 9178badfd0 | |||
| 38e75ec251 | |||
| c1496bf87b | |||
| 4da888b240 | |||
| c703be2ca5 | |||
| 5cec8add5e | |||
| 0225dbde3a | |||
| f84b8fa6c2 | |||
| 979cbcd23e | |||
| b3070e1919 | |||
| e5fdde8f30 | |||
| 7de36bbc3d | |||
| 1c4f27902e | |||
| 7ee86f0d2f | |||
| a0b06fd5ef | |||
| 630bb20b35 | |||
| 17ea2a35cb | |||
| 7d9376d607 | |||
| 122e949072 | |||
| 9a69b4c51e | |||
| 251151244b | |||
| 9d232b666b | |||
| 1832d77e78 | |||
| aca6bce1ff | |||
| 7bb2f405da | |||
| 60e2824167 | |||
| cffc228b3a | |||
| 318c366d82 | |||
| 90d7f79ca0 | |||
| 3f87038776 | |||
| 92b880f03b | |||
| 94f1e84857 | |||
| 221b4f10df | |||
| 225615f4ea | |||
| b8ef753f85 | |||
| 33edd22f8f | |||
| 75e9232095 | |||
| 6daddf11de | |||
| 36d9eda303 | |||
| 4eb2d4c689 | |||
| edc7119fbf | |||
| aa1736a285 | |||
| 6f93aa4f34 | |||
| 0662e6230e | |||
| 30f3f14040 | |||
| f2042f284e | |||
| b2c73ffa15 | |||
| d7217d1b3c | |||
| 638d55a36c | |||
| b11f6b5149 | |||
| d0b5ecf4f2 | |||
| 7a67c30a2c | |||
| 5ea4694eb8 | |||
| e01dabe6ed | |||
| ecaf0dd0fc | |||
| 3d4dcc9e6b | |||
| 28a5d9f219 | |||
| 81876d37ea | |||
| 4a6b159ddb | |||
| d84957cc8c | |||
| d53db5b49a | |||
| 0448368011 | |||
| 36754136fd | |||
| 489acccf77 | |||
| 8ef4db63ad | |||
| 9f63205ff3 | |||
| 5a0378948d | |||
| 2b4c45be74 | |||
| 147896dc80 | |||
| 1ff6ec7653 | |||
| acd590111e | |||
| b5f24ba1f2 | |||
| 79ed24135f | |||
| a4949a25b5 | |||
| f16edef124 | |||
| 2fd6479cb9 | |||
| 85a6b3a9a4 | |||
| 9ac5216d6e | |||
| 82987dbd20 | |||
| 29de7c0727 | |||
| 5f6580fa2f | |||
| 5d4732d75d | |||
| a13bac813a | |||
| 85dcc9f7bd | |||
| b696629ad9 | |||
| b9e3128718 | |||
| 88fac4c2bc | |||
| 1fad5ec536 | |||
| 8e7214d531 | |||
| 333c4a3ebb | |||
| b9ba5a3bea | |||
| 2a0989e74d | |||
| e9319dc491 | |||
| 57481a77cd | |||
| 44915cce54 | |||
| 1225483b57 | |||
| daeb8c88a1 | |||
| 8a6b3ff501 | |||
| a6fffeafdc | |||
| d791b4ce49 | |||
| 8a0e4eb441 | |||
| fc84562419 | |||
| 37ebe1ebb3 | |||
| 2d06f070ea | |||
| 527a62069a | |||
| 40afafe1a8 | |||
| e3acf9ae6d | |||
| a68d067a68 | |||
| 5547c65af0 | |||
| b622bb7d7d | |||
| 43efdf18a0 | |||
| c71ab8e9e8 | |||
| 408d6ed8ba | |||
| 1411961e36 | |||
| dfd7ef466c | |||
| 2aa3dfbd0f | |||
| fba10e27cf | |||
| 5417c74f9c | |||
| eb0b0dbe81 | |||
| 561f522658 | |||
| 32d2ffeb3d | |||
| d41946e0a5 | |||
| 61402858f4 | |||
| 17de318645 | |||
| 3aa0144e8d | |||
| f9eafff4c7 | |||
| 4c6d67901d | |||
| e9aa97a089 | |||
| a82b047f75 | |||
| 9a8b44a8df | |||
| a96693004c | |||
| ed9fe11fbf | |||
| 09fb14a796 | |||
| 58a7936bba | |||
| cd0ee361f5 | |||
| 77bd5abe0d | |||
| 450c5496b3 | |||
| 4411e45a3c | |||
| e7d20896d5 | |||
| 32a1115abd | |||
| 4982057500 | |||
| 8977f8bab5 | |||
| 0962a6b3cf | |||
| 3c72929a4f | |||
| e4eb495a70 | |||
| 00e8b0342e | |||
| b1f9867c06 | |||
| 77943b3570 | |||
| 45e4edb1dd | |||
| 9bf53afebf | |||
| e1a502ac4b | |||
| 9346c46e62 | |||
| 1452746305 | |||
| 2e526dace1 | |||
| 76be5b7cac | |||
| 3f0b2caedf | |||
| ec6dc35ca8 | |||
| 01e1ca927e | |||
| 1cc52d6c96 | |||
| e6b3a5b5a9 | |||
| bc4b15a5aa | |||
| 00f61cf6be | |||
| 52e24437bd | |||
| 393ffc8506 | |||
| 2b6cb6ec6e | |||
| 0cba3a624c | |||
| 73433711ca | |||
| 965afa6871 | |||
| e70dbaf917 | |||
| 6b4ce11743 | |||
| d1980a55a7 | |||
| 8b78b39d4c | |||
| ae17651eb5 | |||
| 22fd8409f6 | |||
| d0a4ba417f | |||
| 7b09b098a4 | |||
| bd4c10a8fb | |||
| ed3c5f152e | |||
| 63232d1e92 | |||
| 4a3eba80d5 | |||
| 71d3745342 | |||
| 5fdc98633d | |||
| 57877f268d | |||
| 871a93d58f | |||
| 4b7cbd4f9b | |||
| aa2a9815df | |||
| 2e5b18a008 | |||
| d0a38114cc | |||
| ccc1d516c7 | |||
| 246b710fdd | |||
| 1a21c9fa8e | |||
| 9fd912b1d4 | |||
| 9ded32f97b | |||
| 10aac046bc | |||
| f4527baf89 | |||
| 11ec5bf747 | |||
| 6a53679755 | |||
| 7bedec0692 | |||
| 78feb95811 | |||
| 3aad2bb80e | |||
| 0df8de3661 | |||
| 83ecc73fbd | |||
| c10313cd12 | |||
| 4c98bcd9cb | |||
| 004de235a8 | |||
| 90dbeb6f20 | |||
| 9aa298febe | |||
| 5a13a497dc | |||
| 37711e14dd | |||
| e89fd28707 | |||
| 7a91ee2f49 | |||
| 4b76ea5392 | |||
| d2a81b7bd9 | |||
| 9dd39509b5 | |||
| d605bcfe7a | |||
| 73abdb535a | |||
| ab9506c4f6 | |||
| 994a629401 | |||
| 00c55160a7 | |||
| e3c6edb894 | |||
| 4574c016cd | |||
| ca6c19f4c8 | |||
| 0f51f6e71f | |||
| 4bd672bf94 | |||
| 136fd77f3b | |||
| ee9b6be95e | |||
| 38c553d385 | |||
| 1b073665a7 | |||
| 2076596f50 | |||
| d1beaded09 | |||
| 2562bdfedf | |||
| 86c6face7d | |||
| 4a7ff8bf7b | |||
| 8c280d3616 | |||
| eb4d4164ef | |||
| c7740811bf | |||
| 55679cf61b | |||
| 1b1c80b1b8 | |||
| 8743b1f56b | |||
| eb6f1b5346 | |||
| 6bb6d380a9 | |||
| 39eea04bf6 | |||
| 2711147cd6 | |||
| 083b7c9f1c | |||
| 5ade886a72 | |||
| 52575e13f6 | |||
| 3aaee8add3 | |||
| 5e188a70f9 | |||
| f9e5c87d2b | |||
| 7d40cf8a4a | |||
| 1836026736 | |||
| 79db0f8cfa | |||
| 95c29dc73c | |||
| 2b0ee42cdc | |||
| c90ac1d4fc | |||
| a9803bb6a1 | |||
| 74219ad333 | |||
| 2073b7b132 | |||
| 58dae5df6f | |||
| c89fc9b6d4 | |||
| f7ab08c1e6 | |||
| 221fead7dc | |||
| 3491cb9593 | |||
| 037b3231ac | |||
| 75f38c1e94 | |||
| 977bcd0bf4 | |||
| 838459e5a8 | |||
| d208a31348 | |||
| 0a640bea6f | |||
| 84a2962561 | |||
| 6c71be7a3a | |||
| 77562505b4 | |||
| c83d3dcf1d | |||
| 081077d2c2 | |||
| 4cfc6a73fc | |||
| f1c132830f | |||
| 5aff7c6e85 | |||
| 2c09713e20 | |||
| 3d544feeb5 | |||
| 5830ed0bb1 | |||
| 83aed683f5 | |||
| 72385b3987 | |||
| f21893b52e | |||
| 0b81529509 | |||
| 9790bbea83 | |||
| 7aa620a9da | |||
| 2e67db0b4e | |||
| cd777b2894 | |||
| 049e9728a2 | |||
| 0952cdf9cb | |||
| 5f4a4e81cb | |||
| 38c2c508e8 | |||
| 4cd3664e32 | |||
| 71996f6c48 | |||
| 6e227de00f | |||
| 3576e67af7 | |||
| 19f0f60653 | |||
| 3502eeb711 | |||
| fd770d03ab | |||
| d99b7ae34c | |||
| f18aa8c8d4 | |||
| dcdcb5b5a3 | |||
| 884e4b5831 | |||
| 5981356492 | |||
| 386b6915c5 | |||
| 5a6f04536f | |||
| ae1d9e6db7 | |||
| 24d50c21f5 | |||
| b4d72da639 | |||
| dacb258289 | |||
| 5c674d4603 | |||
| 2e9753e91d | |||
| 971e1049c7 | |||
| 11c76332f3 | |||
| 52d03ae964 | |||
| c4043f6c56 | |||
| dfbac38281 | |||
| f857c38625 | |||
| 23823cd85e | |||
| 30b5d0ff9f | |||
| 60a3b1ef88 | |||
| a46390d110 | |||
| 5baac0c77a | |||
| e6181d41ed | |||
| 6a228cfd5e | |||
| 8d81067206 | |||
| b2e47a9bd4 | |||
| 4eaf50cde4 | |||
| f20afe5447 | |||
| 53093f4cce | |||
| 9324a34d31 | |||
| eecc4bc3ef | |||
| 795029cb06 | |||
| bc0135106f | |||
| bd2803f81c | |||
| 215addc2c0 | |||
| 69f8e24689 | |||
| 0817a7a51b | |||
| 200933591a | |||
| 8b7c819b17 | |||
| dce433ab5a | |||
| eb4f2d8b5d | |||
| 2008457911 | |||
| f6b57e63fd | |||
| d681612e8e | |||
| 80454cbc7e | |||
| 78cf59333e | |||
| ab47f32b52 | |||
| d9d58afed9 | |||
| d01f9a7e08 | |||
| c6aabf88b9 | |||
| 29bf6d9b6d | |||
| 92bf45bd15 | |||
| 12c8e0e33b | |||
| c7aa32b922 | |||
| 94be4ec572 | |||
| 66c299bc4c | |||
| d5c4176392 | |||
| bd00542c28 | |||
| 19f029cb6b | |||
| 198db1492a | |||
| f6665b6b6e | |||
| ee93d725ba | |||
| 70fb635eda | |||
| b9fbefe05c | |||
| 46f823baae | |||
| cc1e998ec5 | |||
| fb73d8272e | |||
| 87321fb669 | |||
| 44b60d5070 | |||
| 89897aa48f | |||
| b2879211e4 | |||
| 6b3567fb1b | |||
| c27bcac549 | |||
| 25d31a6ce7 | |||
| ea280dd366 | |||
| 9842c8c99c | |||
| 906ebd73b2 | |||
| de95781ce7 | |||
| c58234fa2e | |||
| 4099bbe732 | |||
| c693d4e78a | |||
| f90ff72316 | |||
| bed6ae01f2 | |||
| 087d6b9a60 | |||
| b04caa9d5d | |||
| 17b1125ea3 | |||
| a8ac79d396 | |||
| 30cbc260dc | |||
| 4601b7e6d3 | |||
| 28b6f565fd | |||
| 48b63b19d5 | |||
| 184afbb4ee | |||
| f6217810ea | |||
| 46e2de341b | |||
| 9c56fde0b6 | |||
| 2051e5ebf2 | |||
| 5a997e61da | |||
| f27f0deb38 | |||
| 70f437b939 | |||
| 59648a1b25 | |||
| 76482c6c15 | |||
| de23bae8bd | |||
| e07c0616a2 | |||
| 13a7de4956 | |||
| 9ce0aacab0 | |||
| ae502a7dfe | |||
| 947c5970d8 | |||
| 686d163cf6 | |||
| 7c720e66f9 | |||
| 1029fd7aa2 | |||
| 61e59ea315 | |||
| 5047094bd7 | |||
| 28bd9a9d89 | |||
| 4b327eeccc | |||
| d13b5477a5 | |||
| 8cad404098 | |||
| 23de7186d6 | |||
| a26559a07e | |||
| 1bc7ad9b95 | |||
| 1ac844c08d | |||
| d7f7954e59 | |||
| ba16e537e6 | |||
| 60304a23cc | |||
| ce6aa7d167 | |||
| fb55d87876 | |||
| 63374871ac | |||
| 405dcc5ca6 | |||
| 1544405d3a | |||
| 3b547f6925 | |||
| 777f33e212 | |||
| 7c7a8c0dcb | |||
| 6c2722314b | |||
| 7827c24016 | |||
| 043e46128a | |||
| dad30357ac | |||
| 4c6b9cde39 | |||
| ffb210babb | |||
| 145d1c1787 | |||
| 1708526e33 | |||
| f8f9b753a6 | |||
| 7fbb0e0f43 | |||
| 2686670df7 | |||
| 732fb5054a | |||
| 2abfbda2f0 | |||
| cce693174e | |||
| bec7ee40b4 | |||
| 79a6245773 | |||
| 3ae1c3fdff | |||
| eab96b3f84 | |||
| 07b8db317b | |||
| 9debec8daa | |||
| b129b99fd9 | |||
| a397bcf190 | |||
| 13c80fe68f | |||
| 438ab0015e | |||
| 93f5145937 | |||
| 36fcc349ec | |||
| 63a1919872 | |||
| 5b6d18bdbc | |||
| 868d2fb434 | |||
| 6ad66a35e7 | |||
| cd750e7267 | |||
| 40be07cb07 | |||
| e794a902dd | |||
| 94576e98fc | |||
| b7dcb2e875 | |||
| aa9a243894 | |||
| 1911367aeb | |||
| 93bb4a27b9 | |||
| 0456efeed4 | |||
| 3ac2fa290f | |||
| e7feb73f6f | |||
| 5ddb4452ff | |||
| 760f90762d | |||
| 51154044cc | |||
| 06c5cb6cbf | |||
| 0dc1f2cebe | |||
| 0dec7aaf0e | |||
| 6fa8d1856a | |||
| 95a0279c68 | |||
| 80d23204fe | |||
| f45123d6d9 | |||
| 503913c54a | |||
| c4627a13b6 | |||
| e4427fe725 | |||
| 78f5f00225 | |||
| c6fc34136a | |||
| 1a270997c8 | |||
| 390fbcceac | |||
| d7214f4f29 | |||
| b9aaf87dc2 | |||
| 5ee9d754ba | |||
| dc04d54455 | |||
| 9f730e937d | |||
| 13eaf33b1a | |||
| e36f4f97f9 | |||
| 092d5781ca | |||
| 0697a5ea41 | |||
| 607e9e2251 | |||
| c547170efb | |||
| 0222985f4d | |||
| 94c03a9c7c | |||
| 4f4e474e66 | |||
| 7a1dec03a3 | |||
| f49bc071c2 | |||
| 8551f0c756 | |||
| ac4aaeb0f7 | |||
| 4ad963c3be | |||
| 7c943afc2b | |||
| 39ea5c5458 | |||
| 6d8b2de608 | |||
| 05cdcec244 | |||
| a0eb8dcba6 | |||
| 9fbfa378bb | |||
| 872771b02a | |||
| 416d82042f | |||
| a0eb291371 | |||
| 4c88ee18d3 | |||
| 410e582b44 | |||
| a3f720a51e | |||
| 962b3542ce | |||
| a6f0971f0f | |||
| 21789df60a | |||
| 584ff1504d | |||
| caff1a1ed3 | |||
| d7b4411017 | |||
| 66ada655fc | |||
| 8dea1f1bd6 | |||
| e7a865204d | |||
| 3138379e7d | |||
| 7828fa0ac8 | |||
| b770bb8986 | |||
| 07c0150d3e | |||
| f678338822 | |||
| 6e15e69254 | |||
| 2671a3b787 | |||
| 93073c9602 | |||
| 88f8a9d537 | |||
| b75b298a9d | |||
| 031b8ce80e | |||
| b0ceba3bcf | |||
| e5f5b8ff3c | |||
| afb1d291ec | |||
| 55b46ff929 | |||
| 58acd8018a | |||
| e0d0ede2ce | |||
| ac46b0e4d0 | |||
| e12ea2d7e4 | |||
| 5f052facdf | |||
| 4476749203 | |||
| 0fa860bc71 | |||
| b858b23584 | |||
| 6500e60c40 | |||
| efc991923d | |||
| 0b5e057fe6 | |||
| 822e1b0a9c | |||
| 4f21814be0 | |||
| 17da489229 | |||
| 5b8639b80f | |||
| 6c9ef912e6 | |||
| da636ca1f3 | |||
| 7880eddccd | |||
| 3ec1741f10 | |||
| f36d1e0c29 | |||
| ebf32a9905 | |||
| 005a457348 | |||
| a89a279764 | |||
| fbc426f218 | |||
| 27b480e118 | |||
| dee6ff9ba0 | |||
| 73bdcd5441 | |||
| 64a38e024d | |||
| 441b40532f | |||
| bfb6a6226d | |||
| f464585fad | |||
| 3fe61f8b09 | |||
| 43b3625656 | |||
| 6505c90f32 | |||
| 104eb189fe | |||
| b70e0018d7 | |||
| d962d515f5 | |||
| 3c8d7d4f81 | |||
| d1604f8e70 | |||
| 6f07817c0e | |||
| 0ac959ab76 | |||
| 62b17bd6a6 | |||
| c0bac99d5a | |||
| 3b69c5e74b | |||
| 539fd469cc | |||
| 442688c35c | |||
| da27f02237 | |||
| 9460e354b7 | |||
| 6bab128ed9 | |||
| 3856b4ca5a | |||
| bef39eefa5 | |||
| b0366c7b4d | |||
| ca02d84d63 | |||
| 461d5de886 | |||
| f8134dad7a | |||
| 30f510bb03 | |||
| e7cbf9cc45 | |||
| 5108213af5 | |||
| d148f625ac | |||
| a9b8f5a88f | |||
| 539b584d9b | |||
| 2f8d83fc4b | |||
| 86ee1257fa | |||
| 03f1035e0e | |||
| bd578191a8 | |||
| d4fc2e2ef1 | |||
| cde30de81c | |||
| 96be74e3ee | |||
| b78d34b27e | |||
| b4b64c33a6 | |||
| 47b1875022 | |||
| b06cbd1381 | |||
| 9e35f8ca6c | |||
| 8eaefde67d | |||
| d5a3324837 | |||
| f5c90d8770 | |||
| 825a125a62 | |||
| da7cf37dae | |||
| 1985ae1f49 | |||
| 91eb3019f9 | |||
| 66e8e00a9b | |||
| 4b8923d852 | |||
| baba720749 | |||
| 1ec22599cc | |||
| c69017bc36 | |||
| 48bf57fbbe | |||
| 3491856784 | |||
| f887c15b46 | |||
| 7786f850d1 | |||
| cad778734e | |||
| 1210f7038a | |||
| f9ab7284a3 | |||
| 100865c923 | |||
| b8c1710a83 | |||
| 215b8cd41d | |||
| 487d7084c3 | |||
| b1e761b26f | |||
| 3efe90ca21 | |||
| 61649e1e04 | |||
| 13ac352a10 | |||
| 5ca7a25e8d | |||
| 7bb8ef0938 | |||
| 5c55a290ac | |||
| 4e3e1b075d | |||
| a8c5a164ff | |||
| 1f393f1c7f | |||
| fdaff70231 | |||
| 7218c13b9e | |||
| 934cb9d91b | |||
| 4faef5e017 | |||
| 5c813e7350 | |||
| fb754469ce | |||
| 548b5a0ab0 | |||
| f77d0776c4 | |||
| e73f70af8f | |||
| a9e6120f81 | |||
| 090a010a63 | |||
| 85c762a297 | |||
| a8d5617cf2 | |||
| 760cec01a8 | |||
| 446fcfe37f | |||
| 71de3ef8ae | |||
| d98d429b5c | |||
| cf5a6fadfd | |||
| 9a078cd238 | |||
| a81a803cca | |||
| 816587b688 | |||
| 4083c58bbd | |||
| 8769e5acd4 | |||
| 3edf9fdb5d | |||
| ac0ce29c76 | |||
| 5279578c64 | |||
| 632f64261e | |||
| b5e25eef78 | |||
| 8a237bf8e1 | |||
| c5def6c0e3 | |||
| d1cfc77148 | |||
| c314e3c798 | |||
| 7c5ef96ff0 | |||
| 474cf38180 | |||
| e81a452dfb | |||
| e570202ba2 | |||
| a84c9f0eaf | |||
| 530bd8e350 | |||
| 359e798cfa | |||
| d7d257a6b5 | |||
| 9ad9ff6879 | |||
| 56bc1cf7ed | |||
| e0863ac085 | |||
| d5fa89b38c | |||
| 605af13a37 | |||
| 3838cbd6e2 | |||
| c76df0ef90 | |||
| cd77d302df | |||
| 71348d562d | |||
| b6ae46db93 | |||
| 6cb84054ed | |||
| 7b511c1673 | |||
| bfd5e12bea | |||
| ad8fb77857 | |||
| 831466ddda | |||
| 4ee34444ae | |||
| 879ddb112e | |||
| 331fb4f11b | |||
| 4e5275ca0e | |||
| 1106377550 | |||
| b5468bced2 | |||
| 01cbe6c037 | |||
| d0a02c2f61 | |||
| c499672dde | |||
| 3aa0b94db4 | |||
| cdb64ed952 | |||
| 834efc5c94 | |||
| 79db94f67f | |||
| ec41f840d5 | |||
| d9d57c66f8 | |||
| 9746c9912b | |||
| abaaddae3a | |||
| 0bf64004ff | |||
| 6fae9cd018 | |||
| 65fcbd4b77 | |||
| dd09bc3168 | |||
| 0bf865fdef | |||
| 5c0c45b99f | |||
| 221f046664 | |||
| 2a9d5b393e | |||
| 90860e5511 | |||
| 0b1f806276 | |||
| 0482713241 | |||
| bb3e18519f | |||
| 3a4d08facc | |||
| 30064d5904 | |||
| c288b7fd67 | |||
| b4d1528612 | |||
| 5fc272054c | |||
| 714e73aeb1 | |||
| 3dfd2d48b3 | |||
| 3a5a9bd66a | |||
| 55d7aec516 | |||
| 96d3e4a7d6 | |||
| beb96aba14 | |||
| 48f66c7096 | |||
| a96b553b08 | |||
| 31a3ac66b6 | |||
| a33e1f5d3c | |||
| 423ea10d34 | |||
| 1b221d5c16 | |||
| d4038f40d6 | |||
| dc7b3dd3e8 | |||
| 1f5f10f78d | |||
| 7df11639ed | |||
| b0305b7411 | |||
| 8abf9398e9 | |||
| 1b196a2703 | |||
| a24f456136 | |||
| d8fef54606 | |||
| 9a5dc20f83 | |||
| ff1c3f5791 | |||
| c74cd66826 | |||
| c30cfec09d | |||
| e20e794508 | |||
| d09efd3a69 | |||
| 1ac7f5b6dc | |||
| fc7a4a747c | |||
| facea2326e | |||
| 56311bbe05 | |||
| 994631e872 | |||
| 43471d162f | |||
| b997a61da8 | |||
| f69dd0b198 | |||
| 523584fbbc | |||
| 4139ec38d8 | |||
| 5379ae09dc | |||
| ebb16aef9e | |||
| fc87fd702c | |||
| 42484043a1 | |||
| 3f268415e9 | |||
| c2a5fe19e3 | |||
| 42ce88d931 | |||
| cda99fc7a5 | |||
| b33a252698 | |||
| 9e3ae22827 | |||
| 5923547159 | |||
| fe980c5468 | |||
| f50fe7196e | |||
| de3f392bd7 | |||
| 02c0d36f90 | |||
| 04592ddcc4 | |||
| c8e0f68278 | |||
| 4957b485a0 | |||
| 7ebe517a34 | |||
| 516eedb086 | |||
| ce836cd1e8 | |||
| f7010fa278 | |||
| 5451dd2056 | |||
| 81ed3a8ca2 | |||
| 0f1a60a348 | |||
| c59a883351 | |||
| 568d83f029 | |||
| 569781b592 | |||
| 1b00c9e944 | |||
| 901785e47c | |||
| 8c47f01758 | |||
| 304819275d | |||
| b1ea44963d | |||
| 181965968c | |||
| 5b3eadb7bd | |||
| 28d484117b | |||
| a0b0689e01 | |||
| 33ec63f097 | |||
| 7b22f85429 | |||
| fa7df55b0e | |||
| d2cf270dda | |||
| f1b5e78962 | |||
| fae4e43682 | |||
| 37eb3d1dfd | |||
| e0890f1181 | |||
| c31f9d581f | |||
| f2347345b4 | |||
| e34f2a1f39 | |||
| 7a6000be26 | |||
| dd1a8c2eae | |||
| 42590b3cbc | |||
| 94f7ad109a | |||
| f6bdf302fe | |||
| b76c535738 | |||
| 29949c703d | |||
| f5f9eb175d | |||
| 488c3b86f8 | |||
| be8fd59703 | |||
| 071fe2e206 | |||
| ac5660a6d0 | |||
| 99a104517d | |||
| c3692cadec | |||
| b14000952c | |||
| 7a32d5c630 | |||
| 714b057fdb | |||
| 4c2526c70b | |||
| a8f4aa03bd | |||
| 28d5562491 | |||
| e6f20e538a | |||
| 970bb55c73 | |||
| 12f0491455 | |||
| ef8362d6f2 | |||
| 0a7cdefda3 | |||
| cfe1446668 | |||
| 7c38962d21 | |||
| 7102f26c9e | |||
| 71a3315fe8 | |||
| 7cac81cddb | |||
| 3a5ca74d71 | |||
| 71af8179ec | |||
| d66a7d3d53 | |||
| e0fbb0253e | |||
| 48466808d3 | |||
| 87dfe4ace7 | |||
| d45f223d52 | |||
| e8c58bdbd0 | |||
| 87d687cde5 | |||
| c8147ded60 | |||
| 1261bdf8a9 | |||
| 11366b6fac | |||
| 1cdabc348b | |||
| 02e16b4547 | |||
| d5a001bf03 | |||
| 0ae72b63d0 | |||
| 447a4a3387 | |||
| 0737f5aac5 | |||
| 3e3024dd5c | |||
| 24414b04bb | |||
| f7df834325 | |||
| bce2c741c4 | |||
| 1b44bc57bb | |||
| ff6675b08f | |||
| 64912be4eb | |||
| 57ccef18cb | |||
| 2a24a20529 | |||
| e6692059b4 | |||
| a7b172099b | |||
| f52a76dba3 | |||
| 43e4334890 | |||
| 1d00bdb757 | |||
| 6901c9fde9 | |||
| 6251c54873 | |||
| f6c1835b18 | |||
| 95976c2860 | |||
| 01589d7136 | |||
| a2664473c8 | |||
| da15ef0f15 | |||
| 035508f3ad | |||
| 69558f15b4 | |||
| a8f538eddf | |||
| 01e5ea14ab | |||
| 042d475c75 | |||
| dd0af52feb | |||
| 130f9bbeba | |||
| 0a05b32a7a | |||
| c3f897c61a | |||
| c62bac037f | |||
| 79a57f3082 | |||
| c33de9d754 |
9
.cargo/config.toml
Normal file
9
.cargo/config.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
|
||||||
|
[build]
|
||||||
|
rustflags = [ "--cfg=web_sys_unstable_apis" ]
|
||||||
|
|
||||||
|
[registry]
|
||||||
|
global-credential-providers = ["cargo:token"]
|
||||||
|
|
||||||
|
[registries.xinu]
|
||||||
|
index = "sparse+https://git.z.xinu.tv/api/packages/wathiede/cargo/"
|
||||||
10
.envrc
Normal file
10
.envrc
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
source_up
|
||||||
|
|
||||||
|
export DATABASE_USER="newsreader";
|
||||||
|
export DATABASE_NAME="newsreader";
|
||||||
|
export DATABASE_HOST="nixos-07.h.xinu.tv";
|
||||||
|
export DATABASE_URL="postgres://${DATABASE_USER}@${DATABASE_HOST}/${DATABASE_NAME}";
|
||||||
|
export PROD_DATABASE_USER="newsreader";
|
||||||
|
export PROD_DATABASE_NAME="newsreader";
|
||||||
|
export PROD_DATABASE_HOST="postgres.h.xinu.tv";
|
||||||
|
export PROD_DATABASE_URL="postgres://${PROD_DATABASE_USER}@${PROD_DATABASE_HOST}/${PROD_DATABASE_NAME}";
|
||||||
67
.gitea/workflows/rust.yml
Normal file
67
.gitea/workflows/rust.yml
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
on: [push]
|
||||||
|
|
||||||
|
name: Continuous integration
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
- run: cargo check
|
||||||
|
|
||||||
|
test:
|
||||||
|
name: Test Suite
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
- run: cargo test
|
||||||
|
|
||||||
|
trunk:
|
||||||
|
name: Trunk
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
target: wasm32-unknown-unknown
|
||||||
|
- run: cargo install trunk
|
||||||
|
- run: cd web; trunk build
|
||||||
|
|
||||||
|
fmt:
|
||||||
|
name: Rustfmt
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
components: rustfmt
|
||||||
|
- name: Rustfmt Check
|
||||||
|
uses: actions-rust-lang/rustfmt@v1
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
- run: cargo build
|
||||||
|
|
||||||
|
udeps:
|
||||||
|
name: Disallow unused dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
|
- name: Run cargo-udeps
|
||||||
|
uses: aig787/cargo-udeps-action@v1
|
||||||
|
with:
|
||||||
|
version: 'latest'
|
||||||
|
args: '--all-targets'
|
||||||
|
|
||||||
7106
Cargo.lock
generated
7106
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
20
Cargo.toml
20
Cargo.toml
@@ -1,12 +1,18 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
default-members = ["server"]
|
||||||
"web",
|
members = ["web", "server", "notmuch", "procmail2notmuch", "shared"]
|
||||||
"server",
|
|
||||||
"notmuch",
|
[workspace.package]
|
||||||
"procmail2notmuch",
|
authors = ["Bill Thiede <git@xinu.tv>"]
|
||||||
"shared"
|
edition = "2021"
|
||||||
]
|
license = "UNLICENSED"
|
||||||
|
publish = ["xinu"]
|
||||||
|
version = "0.17.22"
|
||||||
|
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||||
|
|
||||||
|
[profile.dev]
|
||||||
|
opt-level = 1
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
lto = true
|
lto = true
|
||||||
|
|||||||
19
Justfile
Normal file
19
Justfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
export CARGO_INCREMENTAL := "0"
|
||||||
|
export RUSTFLAGS := "-D warnings"
|
||||||
|
|
||||||
|
default:
|
||||||
|
@echo "Run: just patch|minor|major"
|
||||||
|
|
||||||
|
major: (_release "major")
|
||||||
|
minor: (_release "minor")
|
||||||
|
patch: (_release "patch")
|
||||||
|
|
||||||
|
sqlx-prepare:
|
||||||
|
cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true
|
||||||
|
|
||||||
|
pull:
|
||||||
|
git pull
|
||||||
|
|
||||||
|
|
||||||
|
_release level: pull sqlx-prepare
|
||||||
|
cargo-release release -x {{ level }} --workspace --no-confirm --registry=xinu
|
||||||
7
Makefile
Normal file
7
Makefile
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
.PHONEY: release
|
||||||
|
release:
|
||||||
|
(cd server; cargo sqlx prepare && git add .sqlx; git commit -m "cargo sqlx prepare" .sqlx || true)
|
||||||
|
bash scripts/update-crate-version.sh
|
||||||
|
git push
|
||||||
|
|
||||||
|
all: release
|
||||||
4
dev.sh
4
dev.sh
@@ -1,7 +1,7 @@
|
|||||||
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
|
cd -- "$( dirname -- "${BASH_SOURCE[0]}" )"
|
||||||
tmux new-session -d -s letterbox-dev
|
tmux new-session -d -s letterbox-dev
|
||||||
tmux rename-window web
|
tmux rename-window web
|
||||||
tmux send-keys "cd web; trunk serve --release --address 0.0.0.0 --port 6758 --proxy-backend http://localhost:9345/ --proxy-rewrite=/api/ -w ../shared -w ../notmuch -w ./" C-m
|
tmux send-keys "cd web; trunk serve -w ../.git -w ../shared -w ../notmuch -w ./" C-m
|
||||||
tmux new-window -n server
|
tmux new-window -n server
|
||||||
tmux send-keys "cd server; cargo watch -x run -w ../shared -w ../notmuch -w ./" C-m
|
tmux send-keys "cd server; cargo watch -c -w ../.git -w ../shared -w ../notmuch -w ./ -x 'run postgres://newsreader@nixos-07.h.xinu.tv/newsreader ../target/database/newsreader /tmp/letterbox/slurp'" C-m
|
||||||
tmux attach -d -t letterbox-dev
|
tmux attach -d -t letterbox-dev
|
||||||
|
|||||||
@@ -1,17 +1,24 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "notmuch"
|
name = "letterbox-notmuch"
|
||||||
version = "0.1.0"
|
exclude = ["/testdata"]
|
||||||
edition = "2021"
|
description = "Wrapper for calling notmuch cli"
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
version.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.14"
|
log = "0.4.27"
|
||||||
|
mailparse = "0.16.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||||
thiserror = "1.0.30"
|
thiserror = "2.0.12"
|
||||||
|
tracing = "0.1.41"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
itertools = "0.10.1"
|
itertools = "0.14.0"
|
||||||
pretty_assertions = "1"
|
pretty_assertions = "1"
|
||||||
rayon = "1.5"
|
rayon = "1.10"
|
||||||
|
|||||||
@@ -207,14 +207,16 @@
|
|||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
io::{self, BufRead, BufReader, Lines},
|
io::{self},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::{Child, ChildStdout, Command, Stdio},
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::info;
|
use log::{error, info};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// # Number of seconds since the Epoch
|
/// # Number of seconds since the Epoch
|
||||||
pub type UnixTime = isize;
|
pub type UnixTime = isize;
|
||||||
@@ -269,6 +271,12 @@ pub struct Headers {
|
|||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub bcc: Option<String>,
|
pub bcc: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
#[serde(alias = "Delivered-To")]
|
||||||
|
pub delivered_to: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
#[serde(alias = "X-Original-To")]
|
||||||
|
pub x_original_to: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub reply_to: Option<String>,
|
pub reply_to: Option<String>,
|
||||||
pub date: String,
|
pub date: String,
|
||||||
}
|
}
|
||||||
@@ -454,15 +462,21 @@ pub enum NotmuchError {
|
|||||||
SerdeJson(#[from] serde_json::Error),
|
SerdeJson(#[from] serde_json::Error),
|
||||||
#[error("failed to parse bytes as str")]
|
#[error("failed to parse bytes as str")]
|
||||||
Utf8Error(#[from] std::str::Utf8Error),
|
Utf8Error(#[from] std::str::Utf8Error),
|
||||||
|
#[error("failed to parse bytes as String")]
|
||||||
|
StringUtf8Error(#[from] std::string::FromUtf8Error),
|
||||||
#[error("failed to parse str as int")]
|
#[error("failed to parse str as int")]
|
||||||
ParseIntError(#[from] std::num::ParseIntError),
|
ParseIntError(#[from] std::num::ParseIntError),
|
||||||
|
#[error("failed to parse mail: {0}")]
|
||||||
|
MailParseError(#[from] mailparse::MailParseError),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct Notmuch {
|
pub struct Notmuch {
|
||||||
config_path: Option<PathBuf>,
|
config_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: rewrite to use tokio::process::Command and make everything async to see if that helps with
|
||||||
|
// concurrency being more parallel.
|
||||||
impl Notmuch {
|
impl Notmuch {
|
||||||
pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch {
|
pub fn with_config<P: AsRef<Path>>(config_path: P) -> Notmuch {
|
||||||
Notmuch {
|
Notmuch {
|
||||||
@@ -470,6 +484,7 @@ impl Notmuch {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
pub fn new(&self) -> Result<Vec<u8>, NotmuchError> {
|
pub fn new(&self) -> Result<Vec<u8>, NotmuchError> {
|
||||||
self.run_notmuch(["new"])
|
self.run_notmuch(["new"])
|
||||||
}
|
}
|
||||||
@@ -478,38 +493,88 @@ impl Notmuch {
|
|||||||
self.run_notmuch(std::iter::empty::<&str>())
|
self.run_notmuch(std::iter::empty::<&str>())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query))]
|
||||||
|
pub fn tags_for_query(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
|
let res = self.run_notmuch(["search", "--format=json", "--output=tags", query])?;
|
||||||
|
Ok(serde_json::from_slice(&res)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tags(&self) -> Result<Vec<String>, NotmuchError> {
|
||||||
|
self.tags_for_query("*")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag_add(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||||
|
self.tags_add(tag, &[search_term])
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
||||||
|
pub fn tags_add(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
||||||
|
let tag = format!("+{tag}");
|
||||||
|
let mut args = vec!["tag", &tag];
|
||||||
|
args.extend(search_term);
|
||||||
|
self.run_notmuch(&args)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag_remove(&self, tag: &str, search_term: &str) -> Result<(), NotmuchError> {
|
||||||
|
self.tags_remove(tag, &[search_term])
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(tag=tag,search_term=?search_term))]
|
||||||
|
pub fn tags_remove(&self, tag: &str, search_term: &[&str]) -> Result<(), NotmuchError> {
|
||||||
|
let tag = format!("-{tag}");
|
||||||
|
let mut args = vec!["tag", &tag];
|
||||||
|
args.extend(search_term);
|
||||||
|
self.run_notmuch(&args)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query,offset=offset,limit=limit))]
|
||||||
pub fn search(
|
pub fn search(
|
||||||
&self,
|
&self,
|
||||||
query: &str,
|
query: &str,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
) -> Result<SearchSummary, NotmuchError> {
|
) -> Result<SearchSummary, NotmuchError> {
|
||||||
let res = self.run_notmuch([
|
let query = if query.is_empty() { "*" } else { query };
|
||||||
"search",
|
|
||||||
"--format=json",
|
let res = self
|
||||||
&format!("--offset={offset}"),
|
.run_notmuch([
|
||||||
&format!("--limit={limit}"),
|
"search",
|
||||||
query,
|
"--format=json",
|
||||||
])?;
|
&format!("--offset={offset}"),
|
||||||
Ok(serde_json::from_slice(&res)?)
|
&format!("--limit={limit}"),
|
||||||
|
query,
|
||||||
|
])
|
||||||
|
.inspect_err(|err| error!("failed to notmuch search for query '{query}': {err}"))?;
|
||||||
|
Ok(serde_json::from_slice(&res).unwrap_or_else(|err| {
|
||||||
|
error!("failed to decode search result for query '{query}': {err}");
|
||||||
|
SearchSummary(Vec::new())
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query))]
|
||||||
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
pub fn count(&self, query: &str) -> Result<usize, NotmuchError> {
|
||||||
|
// NOTE: --output=threads is technically more correct, but really slow
|
||||||
|
// TODO: find a fast thread count path
|
||||||
|
// let res = self.run_notmuch(["count", "--output=threads", query])?;
|
||||||
let res = self.run_notmuch(["count", query])?;
|
let res = self.run_notmuch(["count", query])?;
|
||||||
// Strip '\n' from res.
|
// Strip '\n' from res.
|
||||||
let s = std::str::from_utf8(&res[..res.len() - 1])?;
|
let s = std::str::from_utf8(&res)?.trim();
|
||||||
Ok(s.parse()?)
|
Ok(s.parse()
|
||||||
|
.inspect_err(|err| error!("failed to parse count for query '{query}': {err}"))
|
||||||
|
.unwrap_or(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query))]
|
||||||
pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> {
|
pub fn show(&self, query: &str) -> Result<ThreadSet, NotmuchError> {
|
||||||
let slice = self.run_notmuch([
|
let slice = self.run_notmuch([
|
||||||
"show",
|
"show",
|
||||||
"--include-html=true",
|
"--include-html=true",
|
||||||
"--entire-thread=true",
|
"--entire-thread=false",
|
||||||
"--format=json",
|
"--format=json",
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@@ -519,6 +584,7 @@ impl Notmuch {
|
|||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query,part=part))]
|
||||||
pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> {
|
pub fn show_part(&self, query: &str, part: usize) -> Result<Part, NotmuchError> {
|
||||||
let slice = self.run_notmuch([
|
let slice = self.run_notmuch([
|
||||||
"show",
|
"show",
|
||||||
@@ -528,7 +594,7 @@ impl Notmuch {
|
|||||||
&format!("--part={}", part),
|
&format!("--part={}", part),
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@@ -538,21 +604,107 @@ impl Notmuch {
|
|||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(id=id))]
|
||||||
pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> {
|
pub fn show_original(&self, id: &MessageId) -> Result<Vec<u8>, NotmuchError> {
|
||||||
self.show_original_part(id, 0)
|
self.show_original_part(id, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(id=id,part=part))]
|
||||||
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
pub fn show_original_part(&self, id: &MessageId, part: usize) -> Result<Vec<u8>, NotmuchError> {
|
||||||
|
let id = if id.starts_with("id:") {
|
||||||
|
id
|
||||||
|
} else {
|
||||||
|
&format!("id:{id}")
|
||||||
|
};
|
||||||
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
let res = self.run_notmuch(["show", "--part", &part.to_string(), id])?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_ids(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
#[instrument(skip_all, fields(query=query))]
|
||||||
let mut child = self.run_notmuch_pipe(["search", "--output=messages", query])?;
|
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
||||||
|
Ok(serde_json::from_slice(&res)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(wathiede): implement tags() based on "notmuch search --output=tags '*'"
|
#[instrument(skip_all, fields(query=query))]
|
||||||
|
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
|
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
||||||
|
Ok(serde_json::from_slice(&res)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
pub fn unread_recipients(&self) -> Result<HashMap<String, usize>, NotmuchError> {
|
||||||
|
let slice = self.run_notmuch([
|
||||||
|
"show",
|
||||||
|
"--include-html=false",
|
||||||
|
"--entire-thread=false",
|
||||||
|
"--body=false",
|
||||||
|
"--format=json",
|
||||||
|
// Arbitrary limit to prevent too much work
|
||||||
|
"--limit=1000",
|
||||||
|
"is:unread",
|
||||||
|
])?;
|
||||||
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||||
|
// use that for parsing in rust.
|
||||||
|
let s = String::from_utf8_lossy(&slice);
|
||||||
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
|
deserializer.disable_recursion_limit();
|
||||||
|
let ts: ThreadSet = serde::de::Deserialize::deserialize(&mut deserializer)?;
|
||||||
|
deserializer.end()?;
|
||||||
|
let mut r = HashMap::new();
|
||||||
|
fn collect_from_thread_node(
|
||||||
|
r: &mut HashMap<String, usize>,
|
||||||
|
tn: &ThreadNode,
|
||||||
|
) -> Result<(), NotmuchError> {
|
||||||
|
let Some(msg) = &tn.0 else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
let mut addrs = vec![];
|
||||||
|
let hdr = &msg.headers.to;
|
||||||
|
if let Some(to) = hdr {
|
||||||
|
addrs.push(to);
|
||||||
|
} else {
|
||||||
|
let hdr = &msg.headers.x_original_to;
|
||||||
|
if let Some(to) = hdr {
|
||||||
|
addrs.push(to);
|
||||||
|
} else {
|
||||||
|
let hdr = &msg.headers.delivered_to;
|
||||||
|
if let Some(to) = hdr {
|
||||||
|
addrs.push(to);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
let hdr = &msg.headers.cc;
|
||||||
|
if let Some(cc) = hdr {
|
||||||
|
addrs.push(cc);
|
||||||
|
};
|
||||||
|
for recipient in addrs {
|
||||||
|
mailparse::addrparse(&recipient)?
|
||||||
|
.into_inner()
|
||||||
|
.iter()
|
||||||
|
.for_each(|a| {
|
||||||
|
let mailparse::MailAddr::Single(si) = a else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let addr = &si.addr;
|
||||||
|
|
||||||
|
if addr == "couchmoney@gmail.com" || addr.ends_with("@xinu.tv") {
|
||||||
|
*r.entry(addr.to_lowercase()).or_default() += 1;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
for t in ts.0 {
|
||||||
|
for tn in t.0 {
|
||||||
|
collect_from_thread_node(&mut r, &tn)?;
|
||||||
|
for sub_tn in tn.1 {
|
||||||
|
collect_from_thread_node(&mut r, &sub_tn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(r)
|
||||||
|
}
|
||||||
|
|
||||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||||
where
|
where
|
||||||
@@ -568,21 +720,6 @@ impl Notmuch {
|
|||||||
let out = cmd.output()?;
|
let out = cmd.output()?;
|
||||||
Ok(out.stdout)
|
Ok(out.stdout)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_notmuch_pipe<I, S>(&self, args: I) -> Result<Child, NotmuchError>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = S>,
|
|
||||||
S: AsRef<OsStr>,
|
|
||||||
{
|
|
||||||
let mut cmd = Command::new("notmuch");
|
|
||||||
if let Some(config_path) = &self.config_path {
|
|
||||||
cmd.arg("--config").arg(config_path);
|
|
||||||
}
|
|
||||||
cmd.args(args);
|
|
||||||
info!("{:?}", &cmd);
|
|
||||||
let child = cmd.stdout(Stdio::piped()).spawn()?;
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
use std::{
|
use std::{
|
||||||
error::Error,
|
error::Error,
|
||||||
io::{stdout, Write},
|
io::{stdout, Write},
|
||||||
time::{Duration, Instant},
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use letterbox_notmuch::Notmuch;
|
||||||
use notmuch::{Notmuch, NotmuchError, SearchSummary, ThreadSet};
|
|
||||||
use rayon::iter::{ParallelBridge, ParallelIterator};
|
use rayon::iter::{ParallelBridge, ParallelIterator};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -23,11 +22,11 @@ fn parse_one() -> Result<(), Box<dyn Error>> {
|
|||||||
let total = nm.count("*")? as f32;
|
let total = nm.count("*")? as f32;
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
nm.message_ids("*")?
|
nm.message_ids("*")?
|
||||||
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.par_bridge()
|
.par_bridge()
|
||||||
.for_each(|(i, msg)| {
|
.for_each(|(i, msg)| {
|
||||||
let msg = msg.expect("failed to unwrap msg");
|
let _ts = nm
|
||||||
let ts = nm
|
|
||||||
.show(&msg)
|
.show(&msg)
|
||||||
.expect(&format!("failed to show msg: {}", msg));
|
.expect(&format!("failed to show msg: {}", msg));
|
||||||
//println!("{:?}", ts);
|
//println!("{:?}", ts);
|
||||||
@@ -77,11 +76,9 @@ fn parse_bulk() -> Result<(), Box<dyn Error>> {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
//.par_bridge()
|
//.par_bridge()
|
||||||
.for_each(|(i, chunk)| {
|
.for_each(|(i, msgs)| {
|
||||||
let msgs: Result<Vec<_>, _> = chunk.collect();
|
|
||||||
let msgs = msgs.expect("failed to unwrap msg");
|
|
||||||
let query = msgs.join(" OR ");
|
let query = msgs.join(" OR ");
|
||||||
let ts = nm
|
let _ts = nm
|
||||||
.show(&query)
|
.show(&query)
|
||||||
.expect(&format!("failed to show msgs: {}", query));
|
.expect(&format!("failed to show msgs: {}", query));
|
||||||
//println!("{:?}", ts);
|
//println!("{:?}", ts);
|
||||||
|
|||||||
@@ -1,9 +1,20 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "procmail2notmuch"
|
name = "letterbox-procmail2notmuch"
|
||||||
version = "0.1.0"
|
description = "Tool for generating notmuch rules from procmail"
|
||||||
edition = "2021"
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
version.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.69"
|
anyhow = "1.0.98"
|
||||||
|
clap = { version = "4.5.37", features = ["derive", "env"] }
|
||||||
|
letterbox-notmuch = { version = "0.17.9", registry = "xinu" }
|
||||||
|
letterbox-shared = { version = "0.17.9", registry = "xinu" }
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio"] }
|
||||||
|
tokio = { version = "1.44.2", features = ["rt", "macros", "rt-multi-thread"] }
|
||||||
|
|||||||
@@ -1,210 +1,36 @@
|
|||||||
use std::{convert::Infallible, io::Write, str::FromStr};
|
use std::{collections::HashMap, io::Write};
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
use clap::{Parser, Subcommand};
|
||||||
enum MatchType {
|
use letterbox_shared::{cleanup_match, Match, MatchType, Rule};
|
||||||
From,
|
use sqlx::{types::Json, PgPool};
|
||||||
Sender,
|
|
||||||
To,
|
#[derive(Debug, Subcommand)]
|
||||||
Cc,
|
enum Mode {
|
||||||
Subject,
|
Debug,
|
||||||
List,
|
Notmuchrc,
|
||||||
DeliveredTo,
|
LoadSql {
|
||||||
XForwardedTo,
|
#[arg(short, long)]
|
||||||
ReplyTo,
|
dsn: String,
|
||||||
XOriginalTo,
|
},
|
||||||
XSpam,
|
|
||||||
Body,
|
|
||||||
#[default]
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
struct Match {
|
|
||||||
match_type: MatchType,
|
|
||||||
needle: String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
/// Simple program to greet a person
|
||||||
struct Rule {
|
#[derive(Parser, Debug)]
|
||||||
matches: Vec<Match>,
|
#[command(version, about, long_about = None)]
|
||||||
tags: Vec<String>,
|
struct Args {
|
||||||
|
#[arg(short, long, default_value = "/home/wathiede/dotfiles/procmailrc")]
|
||||||
|
input: String,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
mode: Mode,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unescape(s: &str) -> String {
|
#[tokio::main]
|
||||||
s.replace('\\', "")
|
async fn main() -> anyhow::Result<()> {
|
||||||
}
|
let args = Args::parse();
|
||||||
|
|
||||||
fn cleanup_match(prefix: &str, s: &str) -> String {
|
|
||||||
unescape(&s[prefix.len()..]).replace(".*", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
mod matches {
|
|
||||||
pub const TO: &'static str = "TO";
|
|
||||||
pub const CC: &'static str = "Cc";
|
|
||||||
pub const TOCC: &'static str = "(TO|Cc)";
|
|
||||||
pub const FROM: &'static str = "From";
|
|
||||||
pub const SENDER: &'static str = "Sender";
|
|
||||||
pub const SUBJECT: &'static str = "Subject";
|
|
||||||
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
|
||||||
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
|
||||||
pub const REPLY_TO: &'static str = "Reply-To";
|
|
||||||
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
|
||||||
pub const LIST_ID: &'static str = "List-ID";
|
|
||||||
pub const X_SPAM: &'static str = "X-Spam";
|
|
||||||
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Match {
|
|
||||||
type Err = Infallible;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
// Examples:
|
|
||||||
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
|
||||||
// "* ^TOsonyrewards.com@xinu.tv"
|
|
||||||
let mut it = s.split_whitespace().skip(1);
|
|
||||||
let mut needle = it.next().unwrap();
|
|
||||||
if needle == "1^0" {
|
|
||||||
needle = it.next().unwrap();
|
|
||||||
}
|
|
||||||
let mut needle = vec![needle];
|
|
||||||
needle.extend(it);
|
|
||||||
let needle = needle.join(" ");
|
|
||||||
let first = needle.chars().nth(0).unwrap_or(' ');
|
|
||||||
use matches::*;
|
|
||||||
if first == '^' {
|
|
||||||
let needle = &needle[1..];
|
|
||||||
if needle.starts_with(TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(FROM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::From,
|
|
||||||
needle: cleanup_match(FROM, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(CC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Cc,
|
|
||||||
needle: cleanup_match(CC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(TOCC) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::To,
|
|
||||||
needle: cleanup_match(TOCC, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SENDER) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Sender,
|
|
||||||
needle: cleanup_match(SENDER, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(SUBJECT) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Subject,
|
|
||||||
needle: cleanup_match(SUBJECT, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_ORIGINAL_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XOriginalTo,
|
|
||||||
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(LIST_ID) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::List,
|
|
||||||
needle: cleanup_match(LIST_ID, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(REPLY_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::ReplyTo,
|
|
||||||
needle: cleanup_match(REPLY_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM_FLAG) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_SPAM) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XSpam,
|
|
||||||
needle: '*'.to_string(),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(DELIVERED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::DeliveredTo,
|
|
||||||
needle: cleanup_match(DELIVERED_TO, needle),
|
|
||||||
});
|
|
||||||
} else if needle.starts_with(X_FORWARDED_TO) {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::XForwardedTo,
|
|
||||||
needle: cleanup_match(X_FORWARDED_TO, needle),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
unreachable!("needle: '{needle}'")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(Match {
|
|
||||||
match_type: MatchType::Body,
|
|
||||||
needle: cleanup_match("", &needle),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
|
||||||
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
|
||||||
// faster.
|
|
||||||
let mut lines = Vec::new();
|
|
||||||
for r in rules {
|
|
||||||
for m in &r.matches {
|
|
||||||
for t in &r.tags {
|
|
||||||
if let MatchType::Unknown = m.match_type {
|
|
||||||
eprintln!("rule has unknown match {:?}", r);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let rule = match m.match_type {
|
|
||||||
MatchType::From => "from:",
|
|
||||||
// TODO(wathiede): something more specific?
|
|
||||||
MatchType::Sender => "from:",
|
|
||||||
MatchType::To => "to:",
|
|
||||||
MatchType::Cc => "to:",
|
|
||||||
MatchType::Subject => "subject:",
|
|
||||||
MatchType::List => "List-ID:",
|
|
||||||
MatchType::Body => "",
|
|
||||||
// TODO(wathiede): these will probably require adding fields to notmuch
|
|
||||||
// index. Handle them later.
|
|
||||||
MatchType::DeliveredTo
|
|
||||||
| MatchType::XForwardedTo
|
|
||||||
| MatchType::ReplyTo
|
|
||||||
| MatchType::XOriginalTo
|
|
||||||
| MatchType::XSpam => continue,
|
|
||||||
MatchType::Unknown => unreachable!(),
|
|
||||||
};
|
|
||||||
// Preserve unread status if run with --remove-all
|
|
||||||
lines.push(format!(
|
|
||||||
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
|
||||||
t, rule, m.needle
|
|
||||||
));
|
|
||||||
lines.push(format!(
|
|
||||||
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
|
||||||
// `tag:unprocessed` to all new mail.
|
|
||||||
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
|
||||||
t, rule, m.needle
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lines.sort();
|
|
||||||
for l in lines {
|
|
||||||
writeln!(w, "{l}")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
|
||||||
let input = "/home/wathiede/dotfiles/procmailrc";
|
|
||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
let mut cur_rule = Rule::default();
|
let mut cur_rule = Rule::default();
|
||||||
for l in std::fs::read_to_string(input)?.lines() {
|
for l in std::fs::read_to_string(args.input)?.lines() {
|
||||||
let l = if let Some(idx) = l.find('#') {
|
let l = if let Some(idx) = l.find('#') {
|
||||||
&l[..idx]
|
&l[..idx]
|
||||||
} else {
|
} else {
|
||||||
@@ -222,6 +48,9 @@ fn main() -> anyhow::Result<()> {
|
|||||||
match first {
|
match first {
|
||||||
':' => {
|
':' => {
|
||||||
// start of rule
|
// start of rule
|
||||||
|
|
||||||
|
// If carbon-copy flag present, don't stop on match
|
||||||
|
cur_rule.stop_on_match = !l.contains('c');
|
||||||
}
|
}
|
||||||
'*' => {
|
'*' => {
|
||||||
// add to current rule
|
// add to current rule
|
||||||
@@ -230,26 +59,119 @@ fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
'.' => {
|
'.' => {
|
||||||
// delivery to folder
|
// delivery to folder
|
||||||
cur_rule.tags.push(cleanup_match(
|
cur_rule.tag = cleanup_match(
|
||||||
"",
|
"",
|
||||||
&l.replace('.', "/")
|
&l.replace('.', "/")
|
||||||
.replace(' ', "")
|
.replace(' ', "")
|
||||||
.trim_matches('/')
|
.trim_matches('/')
|
||||||
.to_string(),
|
.to_string(),
|
||||||
));
|
);
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
}
|
}
|
||||||
|
'/' => cur_rule = Rule::default(), // Ex. /dev/null
|
||||||
'|' => cur_rule = Rule::default(), // external command
|
'|' => cur_rule = Rule::default(), // external command
|
||||||
'$' => {
|
'$' => {
|
||||||
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
// TODO(wathiede): tag messages with no other tag as 'inbox'
|
||||||
cur_rule.tags.push(cleanup_match("", "inbox"));
|
cur_rule.tag = cleanup_match("", "inbox");
|
||||||
rules.push(cur_rule);
|
rules.push(cur_rule);
|
||||||
cur_rule = Rule::default();
|
cur_rule = Rule::default();
|
||||||
} // variable, should only be $DEFAULT in my config
|
} // variable, should only be $DEFAULT in my config
|
||||||
_ => panic!("Unhandled first character '{}' {}", first, l),
|
_ => panic!("Unhandled first character '{}'\nLine: {}", first, l),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
notmuch_from_rules(std::io::stdout(), &rules)?;
|
match args.mode {
|
||||||
|
Mode::Debug => print_rules(&rules),
|
||||||
|
Mode::Notmuchrc => notmuch_from_rules(std::io::stdout(), &rules)?,
|
||||||
|
Mode::LoadSql { dsn } => load_sql(&dsn, &rules).await?,
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_rules(rules: &[Rule]) {
|
||||||
|
let mut tally = HashMap::new();
|
||||||
|
for r in rules {
|
||||||
|
for m in &r.matches {
|
||||||
|
*tally.entry(m.match_type).or_insert(0) += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut sorted: Vec<_> = tally.iter().map(|(k, v)| (v, k)).collect();
|
||||||
|
sorted.sort();
|
||||||
|
sorted.reverse();
|
||||||
|
for (v, k) in sorted {
|
||||||
|
println!("{k:?}: {v}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn notmuch_from_rules<W: Write>(mut w: W, rules: &[Rule]) -> anyhow::Result<()> {
|
||||||
|
// TODO(wathiede): if reindexing this many tags is too slow, see if combining rules per tag is
|
||||||
|
// faster.
|
||||||
|
let mut lines = Vec::new();
|
||||||
|
for r in rules {
|
||||||
|
for m in &r.matches {
|
||||||
|
let t = &r.tag;
|
||||||
|
if let MatchType::Unknown = m.match_type {
|
||||||
|
eprintln!("rule has unknown match {:?}", r);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rule = match m.match_type {
|
||||||
|
MatchType::From => "from:",
|
||||||
|
// TODO(wathiede): something more specific?
|
||||||
|
MatchType::Sender => "from:",
|
||||||
|
MatchType::To => "to:",
|
||||||
|
MatchType::Cc => "to:",
|
||||||
|
MatchType::Subject => "subject:",
|
||||||
|
MatchType::ListId => "List-ID:",
|
||||||
|
MatchType::Body => "",
|
||||||
|
// TODO(wathiede): these will probably require adding fields to notmuch
|
||||||
|
// index. Handle them later.
|
||||||
|
MatchType::DeliveredTo
|
||||||
|
| MatchType::XForwardedTo
|
||||||
|
| MatchType::ReplyTo
|
||||||
|
| MatchType::XOriginalTo
|
||||||
|
| MatchType::XSpam => continue,
|
||||||
|
MatchType::Unknown => unreachable!(),
|
||||||
|
};
|
||||||
|
// Preserve unread status if run with --remove-all
|
||||||
|
lines.push(format!(
|
||||||
|
r#"-unprocessed +{} +unread -- is:unread tag:unprocessed {}"{}""#,
|
||||||
|
t, rule, m.needle
|
||||||
|
));
|
||||||
|
lines.push(format!(
|
||||||
|
// TODO(wathiede): this assumes `notmuch new` is configured to add
|
||||||
|
// `tag:unprocessed` to all new mail.
|
||||||
|
r#"-unprocessed +{} -- tag:unprocessed {}"{}""#,
|
||||||
|
t, rule, m.needle
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines.sort();
|
||||||
|
for l in lines {
|
||||||
|
writeln!(w, "{l}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_sql(dsn: &str, rules: &[Rule]) -> anyhow::Result<()> {
|
||||||
|
let pool = PgPool::connect(dsn).await?;
|
||||||
|
println!("clearing email_rule table");
|
||||||
|
sqlx::query!("DELETE FROM email_rule")
|
||||||
|
.execute(&pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for (order, rule) in rules.iter().enumerate() {
|
||||||
|
println!("inserting {order}: {rule:?}");
|
||||||
|
sqlx::query!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO email_rule (sort_order, rule)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
"#,
|
||||||
|
order as i32,
|
||||||
|
Json(rule) as _
|
||||||
|
)
|
||||||
|
.execute(&pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
10
procmail2notmuch/update.sh
Executable file
10
procmail2notmuch/update.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
set -e
|
||||||
|
cd ~/dotfiles
|
||||||
|
git diff
|
||||||
|
scp nasx:.procmailrc procmailrc
|
||||||
|
git diff
|
||||||
|
cd ~/src/xinu.tv/letterbox/procmail2notmuch
|
||||||
|
cargo run > /tmp/notmuch.tags
|
||||||
|
mv /tmp/notmuch.tags ~/dotfiles/notmuch.tags
|
||||||
|
cd ~/dotfiles
|
||||||
|
git diff
|
||||||
6
renovate.json
Normal file
6
renovate.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"config:recommended"
|
||||||
|
]
|
||||||
|
}
|
||||||
5
scripts/update-crate-version.sh
Executable file
5
scripts/update-crate-version.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#!env bash
|
||||||
|
set -e -x
|
||||||
|
cargo-set-version set-version --bump patch
|
||||||
|
VERSION="$(awk -F\" '/^version/ {print $2}' server/Cargo.toml)"
|
||||||
|
git commit Cargo.lock */Cargo.toml -m "Bumping version to ${VERSION:?}"
|
||||||
22
server/.sqlx/query-126e16a4675e8d79f330b235f9e1b8614ab1e1526e4e69691c5ebc70d54a42ef.json
generated
Normal file
22
server/.sqlx/query-126e16a4675e8d79f330b235f9e1b8614ab1e1526e4e69691c5ebc70d54a42ef.json
generated
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\nSELECT\n url\nFROM email_photo ep\nJOIN email_address ea\nON ep.id = ea.email_photo_id\nWHERE\n address = $1\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "url",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "126e16a4675e8d79f330b235f9e1b8614ab1e1526e4e69691c5ebc70d54a42ef"
|
||||||
|
}
|
||||||
32
server/.sqlx/query-2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270.json
generated
Normal file
32
server/.sqlx/query-2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270.json
generated
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n name,\n count (\n NOT is_read\n OR NULL\n ) unread\nFROM\n post AS p\n JOIN feed AS f ON p.site = f.slug --\n -- TODO: figure this out to make the query faster when only looking for unread\n --WHERE\n -- (\n -- NOT $1\n -- OR NOT is_read\n -- )\nGROUP BY\n 1,\n 2\nORDER BY\n site\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "unread",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "2dcbedef656e1b725c5ba4fb67d31ce7962d8714449b2fb630f49a7ed1acc270"
|
||||||
|
}
|
||||||
70
server/.sqlx/query-383221a94bc3746322ba78e41cde37994440ee67dc32e88d2394c51211bde6cd.json
generated
Normal file
70
server/.sqlx/query-383221a94bc3746322ba78e41cde37994440ee67dc32e88d2394c51211bde6cd.json
generated
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n date,\n is_read,\n link,\n site,\n summary,\n clean_summary,\n title,\n name,\n homepage\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug\nWHERE\n uid = $1\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "link",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "clean_summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 8,
|
||||||
|
"name": "homepage",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "383221a94bc3746322ba78e41cde37994440ee67dc32e88d2394c51211bde6cd"
|
||||||
|
}
|
||||||
32
server/.sqlx/query-3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04.json
generated
Normal file
32
server/.sqlx/query-3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04.json
generated
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n p.id,\n link,\n clean_summary\nFROM\n post AS p\nINNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts\nWHERE\n search_summary IS NULL\n -- TODO remove AND link ~ '^<'\nORDER BY\n ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)\nLIMIT 100;\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "link",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "clean_summary",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "3d271b404f06497a5dcde68cf6bf07291d70fa56058ea736ac24e91d33050c04"
|
||||||
|
}
|
||||||
20
server/.sqlx/query-6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c.json
generated
Normal file
20
server/.sqlx/query-6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c.json
generated
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT rule as \"rule: Json<Rule>\"\n FROM email_rule\n ORDER BY sort_order\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "rule: Json<Rule>",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "6c5b0a96f45f78795732ea428cc01b4eab28b7150aa37387e7439a6b0b58e88c"
|
||||||
|
}
|
||||||
24
server/.sqlx/query-8c1b3c78649135e98b89092237750088433f7ff1b7c2ddeedec553406ea9f203.json
generated
Normal file
24
server/.sqlx/query-8c1b3c78649135e98b89092237750088433f7ff1b7c2ddeedec553406ea9f203.json
generated
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT COUNT(*) AS count\nFROM\n post\nWHERE\n (\n $1::text IS NULL\n OR site = $1\n )\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $3::text IS NULL\n OR TO_TSVECTOR('english', search_summary)\n @@ WEBSEARCH_TO_TSQUERY('english', $3)\n )\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "count",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Bool",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "8c1b3c78649135e98b89092237750088433f7ff1b7c2ddeedec553406ea9f203"
|
||||||
|
}
|
||||||
15
server/.sqlx/query-b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9.json
generated
Normal file
15
server/.sqlx/query-b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9.json
generated
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "UPDATE\n post\nSET\n is_read = $1\nWHERE\n uid = $2\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Bool",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "b39147b9d06171cb742141eda4675688cb702fb284758b1224ed3aa2d7f3b3d9"
|
||||||
|
}
|
||||||
15
server/.sqlx/query-ef8327f039dbfa8f4e59b7a77a6411252a346bf51cf940024a17d9fbb2df173c.json
generated
Normal file
15
server/.sqlx/query-ef8327f039dbfa8f4e59b7a77a6411252a346bf51cf940024a17d9fbb2df173c.json
generated
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "UPDATE post SET search_summary = $1 WHERE id = $2",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Int4"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "ef8327f039dbfa8f4e59b7a77a6411252a346bf51cf940024a17d9fbb2df173c"
|
||||||
|
}
|
||||||
56
server/.sqlx/query-fc4607f02cc76a5f3a6629cce4507c74f52ae44820897b47365da3f339d1da06.json
generated
Normal file
56
server/.sqlx/query-fc4607f02cc76a5f3a6629cce4507c74f52ae44820897b47365da3f339d1da06.json
generated
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n site,\n date,\n is_read,\n title,\n uid,\n name\nFROM\n post p\n JOIN feed f ON p.site = f.slug\nWHERE\n ($1::text IS NULL OR site = $1)\n AND (\n NOT $2\n OR NOT is_read\n )\n AND (\n $5 :: text IS NULL\n OR to_tsvector('english', search_summary) @@ websearch_to_tsquery('english', $5)\n )\nORDER BY\n date DESC,\n title OFFSET $3\nLIMIT\n $4\n",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "site",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "date",
|
||||||
|
"type_info": "Timestamp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "is_read",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "uid",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "name",
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text",
|
||||||
|
"Bool",
|
||||||
|
"Int8",
|
||||||
|
"Int8",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "fc4607f02cc76a5f3a6629cce4507c74f52ae44820897b47365da3f339d1da06"
|
||||||
|
}
|
||||||
@@ -1,23 +1,59 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "server"
|
name = "letterbox-server"
|
||||||
version = "0.1.0"
|
default-run = "letterbox-server"
|
||||||
edition = "2021"
|
description = "Backend for letterbox"
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
version.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rocket = { version = "0.5.0-rc.2", features = [ "json" ] }
|
ammonia = "4.1.0"
|
||||||
rocket_cors = { git = "https://github.com/lawliet89/rocket_cors", branch = "master" }
|
anyhow = "1.0.98"
|
||||||
notmuch = { path = "../notmuch" }
|
async-graphql = { version = "7", features = ["log"] }
|
||||||
shared = { path = "../shared" }
|
async-graphql-axum = "7.0.16"
|
||||||
serde_json = "1.0.87"
|
async-trait = "0.1.88"
|
||||||
thiserror = "1.0.37"
|
axum = { version = "0.8.3", features = ["ws"] }
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
axum-macros = "0.5.0"
|
||||||
log = "0.4.17"
|
build-info = "0.0.40"
|
||||||
tokio = "1.26.0"
|
cacher = { version = "0.2.0", registry = "xinu" }
|
||||||
glog = "0.1.0"
|
chrono = "0.4.40"
|
||||||
|
clap = { version = "4.5.37", features = ["derive"] }
|
||||||
|
css-inline = "0.14.4"
|
||||||
|
futures = "0.3.31"
|
||||||
|
headers = "0.4.0"
|
||||||
|
html-escape = "0.2.13"
|
||||||
|
letterbox-notmuch = { path = "../notmuch", version = "0.17.22", registry = "xinu" }
|
||||||
|
letterbox-shared = { path = "../shared", version = "0.17.22", registry = "xinu" }
|
||||||
|
linkify = "0.10.0"
|
||||||
|
lol_html = "2.3.0"
|
||||||
|
mailparse = "0.16.1"
|
||||||
|
maplit = "1.0.2"
|
||||||
|
memmap = "0.7.0"
|
||||||
|
regex = "1.11.1"
|
||||||
|
reqwest = { version = "0.12.15", features = ["blocking"] }
|
||||||
|
scraper = "0.23.1"
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
serde_json = "1.0.140"
|
||||||
|
sqlx = { version = "0.8.5", features = ["postgres", "runtime-tokio", "time"] }
|
||||||
|
tantivy = { version = "0.24.1", optional = true }
|
||||||
|
thiserror = "2.0.12"
|
||||||
|
tokio = "1.44.2"
|
||||||
|
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||||
|
tracing = "0.1.41"
|
||||||
|
url = "2.5.4"
|
||||||
|
urlencoding = "2.1.3"
|
||||||
|
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
||||||
|
#xtracing = { path = "../../xtracing" }
|
||||||
|
xtracing = { version = "0.3.2", registry = "xinu" }
|
||||||
|
|
||||||
[dependencies.rocket_contrib]
|
[build-dependencies]
|
||||||
version = "0.4.11"
|
build-info-build = "0.0.40"
|
||||||
default-features = false
|
|
||||||
features = ["json"]
|
[features]
|
||||||
|
#default = [ "tantivy" ]
|
||||||
|
tantivy = ["dep:tantivy"]
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
[release]
|
[release]
|
||||||
address = "0.0.0.0"
|
address = "0.0.0.0"
|
||||||
port = 9345
|
port = 9345
|
||||||
|
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||||
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
|
|
||||||
[debug]
|
[debug]
|
||||||
address = "0.0.0.0"
|
|
||||||
port = 9345
|
port = 9345
|
||||||
# Uncomment to make it production like.
|
# Uncomment to make it production like.
|
||||||
#log_level = "critical"
|
#log_level = "critical"
|
||||||
|
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||||
|
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||||
|
slurp_cache_path = "/tmp/letterbox/slurp"
|
||||||
|
|||||||
5
server/build.rs
Normal file
5
server/build.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
fn main() {
|
||||||
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
|
// and `build_info::format!` in the main program.
|
||||||
|
build_info_build::build_script();
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
DROP INDEX IF EXISTS post_summary_idx;
|
||||||
|
DROP INDEX IF EXISTS post_site_idx;
|
||||||
|
DROP INDEX IF EXISTS post_title_idx;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
CREATE INDEX post_summary_idx ON post USING GIN (to_tsvector('english', summary));
|
||||||
|
CREATE INDEX post_site_idx ON post USING GIN (to_tsvector('english', site));
|
||||||
|
CREATE INDEX post_title_idx ON post USING GIN (to_tsvector('english', title));
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
BEGIN;
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS public."Email";
|
||||||
|
DROP TABLE IF EXISTS public."EmailDisplayName";
|
||||||
|
DROP TABLE IF EXISTS public."Message";
|
||||||
|
DROP TABLE IF EXISTS public."Header";
|
||||||
|
DROP TABLE IF EXISTS public."File";
|
||||||
|
DROP TABLE IF EXISTS public."Avatar";
|
||||||
|
DROP TABLE IF EXISTS public."Body";
|
||||||
|
DROP TABLE IF EXISTS public."Thread";
|
||||||
|
DROP TABLE IF EXISTS public."Tag";
|
||||||
|
|
||||||
|
END;
|
||||||
174
server/migrations/20241218010438_create-email-tables.up.sql
Normal file
174
server/migrations/20241218010438_create-email-tables.up.sql
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
-- This script was generated by the ERD tool in pgAdmin 4.
|
||||||
|
-- Please log an issue at https://github.com/pgadmin-org/pgadmin4/issues/new/choose if you find any bugs, including reproduction steps.
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Email" DROP CONSTRAINT IF EXISTS email_avatar_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."EmailDisplayName" DROP CONSTRAINT IF EXISTS email_id_fk;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_to_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_cc_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_from_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_header_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_file_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_body_id_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_thread_fkey;
|
||||||
|
ALTER TABLE IF EXISTS public."Message" DROP CONSTRAINT IF EXISTS message_tag_fkey;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Email"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
address text NOT NULL,
|
||||||
|
avatar_id integer,
|
||||||
|
PRIMARY KEY (id),
|
||||||
|
CONSTRAINT avatar_id UNIQUE (avatar_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."EmailDisplayName"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
email_id integer NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Message"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
subject text,
|
||||||
|
"from" integer,
|
||||||
|
"to" integer,
|
||||||
|
cc integer,
|
||||||
|
header_id integer,
|
||||||
|
hash text NOT NULL,
|
||||||
|
file_id integer NOT NULL,
|
||||||
|
date timestamp with time zone NOT NULL,
|
||||||
|
unread boolean NOT NULL,
|
||||||
|
body_id integer NOT NULL,
|
||||||
|
thread_id integer NOT NULL,
|
||||||
|
tag_id integer,
|
||||||
|
CONSTRAINT body_id UNIQUE (body_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Header"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
key text NOT NULL,
|
||||||
|
value text NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."File"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
path text NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Avatar"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
url text NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Body"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
text text NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Thread"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public."Tag"
|
||||||
|
(
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
name text NOT NULL,
|
||||||
|
display text,
|
||||||
|
fg_color integer,
|
||||||
|
bg_color integer,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Email"
|
||||||
|
ADD CONSTRAINT email_avatar_fkey FOREIGN KEY (avatar_id)
|
||||||
|
REFERENCES public."Avatar" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."EmailDisplayName"
|
||||||
|
ADD CONSTRAINT email_id_fk FOREIGN KEY (email_id)
|
||||||
|
REFERENCES public."Email" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_to_fkey FOREIGN KEY ("to")
|
||||||
|
REFERENCES public."Email" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_cc_fkey FOREIGN KEY (cc)
|
||||||
|
REFERENCES public."Email" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_from_fkey FOREIGN KEY ("from")
|
||||||
|
REFERENCES public."Email" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_header_fkey FOREIGN KEY (header_id)
|
||||||
|
REFERENCES public."Header" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_file_fkey FOREIGN KEY (file_id)
|
||||||
|
REFERENCES public."File" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_body_id_fkey FOREIGN KEY (body_id)
|
||||||
|
REFERENCES public."Body" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_thread_fkey FOREIGN KEY (thread_id)
|
||||||
|
REFERENCES public."Thread" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public."Message"
|
||||||
|
ADD CONSTRAINT message_tag_fkey FOREIGN KEY (tag_id)
|
||||||
|
REFERENCES public."Tag" (id) MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
NOT VALID;
|
||||||
|
|
||||||
|
END;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- Add down migration script here
|
||||||
|
ALTER TABLE
|
||||||
|
post DROP CONSTRAINT post_link_key;
|
||||||
28
server/migrations/20241225054216_post-link-unique.up.sql
Normal file
28
server/migrations/20241225054216_post-link-unique.up.sql
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
WITH dupes AS (
|
||||||
|
SELECT
|
||||||
|
uid,
|
||||||
|
link,
|
||||||
|
Row_number() over(
|
||||||
|
PARTITION by link
|
||||||
|
ORDER BY
|
||||||
|
link
|
||||||
|
) AS RowNumber
|
||||||
|
FROM
|
||||||
|
post
|
||||||
|
)
|
||||||
|
DELETE FROM
|
||||||
|
post
|
||||||
|
WHERE
|
||||||
|
uid IN (
|
||||||
|
SELECT
|
||||||
|
uid
|
||||||
|
FROM
|
||||||
|
dupes
|
||||||
|
WHERE
|
||||||
|
RowNumber > 1
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE
|
||||||
|
post
|
||||||
|
ADD
|
||||||
|
UNIQUE (link);
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE
|
||||||
|
post
|
||||||
|
ALTER COLUMN
|
||||||
|
link DROP NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE
|
||||||
|
post DROP CONSTRAINT link;
|
||||||
17
server/migrations/20241225180250_post-link-non-empty.up.sql
Normal file
17
server/migrations/20241225180250_post-link-non-empty.up.sql
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
DELETE FROM
|
||||||
|
post
|
||||||
|
WHERE
|
||||||
|
link IS NULL
|
||||||
|
OR link = '';
|
||||||
|
|
||||||
|
ALTER TABLE
|
||||||
|
post
|
||||||
|
ALTER COLUMN
|
||||||
|
link
|
||||||
|
SET
|
||||||
|
NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE
|
||||||
|
post
|
||||||
|
ADD
|
||||||
|
CONSTRAINT link CHECK (link <> '');
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE IF EXISTS email_address;
|
||||||
|
DROP TABLE IF EXISTS photo;
|
||||||
|
DROP TABLE IF EXISTS google_person;
|
||||||
19
server/migrations/20250114045217_create-email-photos.up.sql
Normal file
19
server/migrations/20250114045217_create-email-photos.up.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
-- Add up migration script here
|
||||||
|
CREATE TABLE IF NOT EXISTS google_person (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
resource_name TEXT NOT NULL UNIQUE,
|
||||||
|
display_name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS email_photo (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
google_person_id INTEGER REFERENCES google_person (id) UNIQUE,
|
||||||
|
url TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS email_address (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
address TEXT NOT NULL UNIQUE,
|
||||||
|
email_photo_id INTEGER REFERENCES email_photo (id),
|
||||||
|
google_person_id INTEGER REFERENCES google_person (id)
|
||||||
|
);
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
-- Add down migration script here
|
||||||
|
DROP INDEX post_summary_idx;
|
||||||
|
CREATE INDEX post_summary_idx ON post USING gin (
|
||||||
|
to_tsvector('english', summary)
|
||||||
|
);
|
||||||
11
server/migrations/20250117225159_strip-html-index.up.sql
Normal file
11
server/migrations/20250117225159_strip-html-index.up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
-- Something like this around summary in the idx w/ tsvector
|
||||||
|
DROP INDEX post_summary_idx;
|
||||||
|
CREATE INDEX post_summary_idx ON post USING gin (to_tsvector(
|
||||||
|
'english',
|
||||||
|
regexp_replace(
|
||||||
|
regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
||||||
|
'\s+',
|
||||||
|
' ',
|
||||||
|
'g'
|
||||||
|
)
|
||||||
|
));
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- Add down migration script here
|
||||||
|
DROP INDEX nzb_posts_created_at_idx;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- Add up migration script here
|
||||||
|
CREATE INDEX nzb_posts_created_at_idx ON nzb_posts USING btree (created_at);
|
||||||
15
server/migrations/20250128234348_add-search-summary.down.sql
Normal file
15
server/migrations/20250128234348_add-search-summary.down.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- Add down migration script here
|
||||||
|
BEGIN;
|
||||||
|
DROP INDEX IF EXISTS post_search_summary_idx;
|
||||||
|
ALTER TABLE post DROP search_summary;
|
||||||
|
|
||||||
|
-- CREATE INDEX post_summary_idx ON post USING gin (to_tsvector(
|
||||||
|
-- 'english',
|
||||||
|
-- regexp_replace(
|
||||||
|
-- regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
||||||
|
-- '\s+',
|
||||||
|
-- ' ',
|
||||||
|
-- 'g'
|
||||||
|
-- )
|
||||||
|
-- ));
|
||||||
|
COMMIT;
|
||||||
14
server/migrations/20250128234348_add-search-summary.up.sql
Normal file
14
server/migrations/20250128234348_add-search-summary.up.sql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
-- Add up migration script here
|
||||||
|
BEGIN;
|
||||||
|
DROP INDEX IF EXISTS post_summary_idx;
|
||||||
|
ALTER TABLE post ADD search_summary TEXT;
|
||||||
|
CREATE INDEX post_search_summary_idx ON post USING gin (
|
||||||
|
to_tsvector('english', search_summary)
|
||||||
|
);
|
||||||
|
UPDATE post SET search_summary = regexp_replace(
|
||||||
|
regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
||||||
|
'\s+',
|
||||||
|
' ',
|
||||||
|
'g'
|
||||||
|
);
|
||||||
|
COMMIT;
|
||||||
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
-- Bad examples:
|
||||||
|
-- https://nzbfinder.ws/getnzb/d2c3e5a08abadd985dccc6a574122892030b6a9a.nzb&i=95972&r=b55082d289937c050dedc203c9653850
|
||||||
|
-- https://nzbfinder.ws/getnzb?id=45add174-7da4-4445-bf2b-a67dbbfc07fe.nzb&r=b55082d289937c050dedc203c9653850
|
||||||
|
-- https://nzbfinder.ws/api/v1/getnzb?id=82486020-c192-4fa0-a7e7-798d7d72e973.nzb&r=b55082d289937c050dedc203c9653850
|
||||||
|
UPDATE nzb_posts
|
||||||
|
SET link =
|
||||||
|
regexp_replace(
|
||||||
|
regexp_replace(
|
||||||
|
regexp_replace(
|
||||||
|
link,
|
||||||
|
'https://nzbfinder.ws/getnzb/',
|
||||||
|
'https://nzbfinder.ws/api/v1/getnzb?id='
|
||||||
|
),
|
||||||
|
'https://nzbfinder.ws/getnzb',
|
||||||
|
'https://nzbfinder.ws/api/v1/getnzb'
|
||||||
|
),
|
||||||
|
'&r=',
|
||||||
|
'&apikey='
|
||||||
|
)
|
||||||
|
;
|
||||||
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
3
server/migrations/20250419202131_email-rules.down.sql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE IF NOT EXISTS email_rule;
|
||||||
|
|
||||||
|
-- Add down migration script here
|
||||||
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
5
server/migrations/20250419202131_email-rules.up.sql
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS email_rule (
|
||||||
|
id integer NOT NULL GENERATED ALWAYS AS IDENTITY,
|
||||||
|
sort_order integer NOT NULL,
|
||||||
|
rule jsonb NOT NULL
|
||||||
|
);
|
||||||
14
server/sql/all-posts.sql
Normal file
14
server/sql/all-posts.sql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
title,
|
||||||
|
summary,
|
||||||
|
link,
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
uid,
|
||||||
|
p.id id
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
||||||
|
ORDER BY
|
||||||
|
date DESC;
|
||||||
6
server/sql/all-uids.sql
Normal file
6
server/sql/all-uids.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
SELECT
|
||||||
|
uid
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
||||||
|
;
|
||||||
17
server/sql/count.sql
Normal file
17
server/sql/count.sql
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
SELECT COUNT(*) AS count
|
||||||
|
FROM
|
||||||
|
post
|
||||||
|
WHERE
|
||||||
|
(
|
||||||
|
$1::text IS NULL
|
||||||
|
OR site = $1
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
NOT $2
|
||||||
|
OR NOT is_read
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
$3::text IS NULL
|
||||||
|
OR TO_TSVECTOR('english', search_summary)
|
||||||
|
@@ WEBSEARCH_TO_TSQUERY('english', $3)
|
||||||
|
)
|
||||||
13
server/sql/need-search-summary.sql
Normal file
13
server/sql/need-search-summary.sql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
SELECT
|
||||||
|
p.id,
|
||||||
|
link,
|
||||||
|
clean_summary
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
INNER JOIN feed AS f ON p.site = f.slug -- necessary to weed out nzb posts
|
||||||
|
WHERE
|
||||||
|
search_summary IS NULL
|
||||||
|
-- TODO remove AND link ~ '^<'
|
||||||
|
ORDER BY
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY site ORDER BY date DESC)
|
||||||
|
LIMIT 100;
|
||||||
14
server/sql/posts-from-uids.sql
Normal file
14
server/sql/posts-from-uids.sql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
SELECT
|
||||||
|
site AS "site!",
|
||||||
|
title AS "title!",
|
||||||
|
summary AS "summary!",
|
||||||
|
link AS "link!",
|
||||||
|
date AS "date!",
|
||||||
|
is_read AS "is_read!",
|
||||||
|
uid AS "uid!",
|
||||||
|
p.id id
|
||||||
|
FROM
|
||||||
|
post p
|
||||||
|
JOIN feed f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
uid = ANY ($1);
|
||||||
6
server/sql/set_unread.sql
Normal file
6
server/sql/set_unread.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
UPDATE
|
||||||
|
post
|
||||||
|
SET
|
||||||
|
is_read = $1
|
||||||
|
WHERE
|
||||||
|
uid = $2
|
||||||
21
server/sql/tags.sql
Normal file
21
server/sql/tags.sql
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
name,
|
||||||
|
count (
|
||||||
|
NOT is_read
|
||||||
|
OR NULL
|
||||||
|
) unread
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
JOIN feed AS f ON p.site = f.slug --
|
||||||
|
-- TODO: figure this out to make the query faster when only looking for unread
|
||||||
|
--WHERE
|
||||||
|
-- (
|
||||||
|
-- NOT $1
|
||||||
|
-- OR NOT is_read
|
||||||
|
-- )
|
||||||
|
GROUP BY
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
ORDER BY
|
||||||
|
site
|
||||||
15
server/sql/thread.sql
Normal file
15
server/sql/thread.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
SELECT
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
link,
|
||||||
|
site,
|
||||||
|
summary,
|
||||||
|
clean_summary,
|
||||||
|
title,
|
||||||
|
name,
|
||||||
|
homepage
|
||||||
|
FROM
|
||||||
|
post AS p
|
||||||
|
INNER JOIN feed AS f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
uid = $1
|
||||||
14
server/sql/threads-from-uid.sql
Normal file
14
server/sql/threads-from-uid.sql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
title,
|
||||||
|
uid,
|
||||||
|
name
|
||||||
|
FROM
|
||||||
|
post p
|
||||||
|
JOIN feed f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
uid = ANY ($1)
|
||||||
|
ORDER BY
|
||||||
|
date DESC;
|
||||||
25
server/sql/threads.sql
Normal file
25
server/sql/threads.sql
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
SELECT
|
||||||
|
site,
|
||||||
|
date,
|
||||||
|
is_read,
|
||||||
|
title,
|
||||||
|
uid,
|
||||||
|
name
|
||||||
|
FROM
|
||||||
|
post p
|
||||||
|
JOIN feed f ON p.site = f.slug
|
||||||
|
WHERE
|
||||||
|
($1::text IS NULL OR site = $1)
|
||||||
|
AND (
|
||||||
|
NOT $2
|
||||||
|
OR NOT is_read
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
$5 :: text IS NULL
|
||||||
|
OR to_tsvector('english', search_summary) @@ websearch_to_tsquery('english', $5)
|
||||||
|
)
|
||||||
|
ORDER BY
|
||||||
|
date DESC,
|
||||||
|
title OFFSET $3
|
||||||
|
LIMIT
|
||||||
|
$4
|
||||||
13
server/sql/too-long-for-search.sql
Normal file
13
server/sql/too-long-for-search.sql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
select t.id, tt.tokid, tt.alias, length(t.token), t.token from (
|
||||||
|
select id, (ts_parse('default',
|
||||||
|
-- regexp_replace(
|
||||||
|
-- regexp_replace(summary, '<[^>]+>', ' ', 'g'),
|
||||||
|
-- '\s+',
|
||||||
|
-- ' ',
|
||||||
|
-- 'g'
|
||||||
|
-- )
|
||||||
|
summary
|
||||||
|
)).* from post) t
|
||||||
|
inner join ts_token_type('default') tt
|
||||||
|
on t.tokid = tt.tokid
|
||||||
|
where length(token) >= 2*1024;
|
||||||
16
server/src/bin/cleanhtml.rs
Normal file
16
server/src/bin/cleanhtml.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use letterbox_server::sanitize_html;
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
let mut args = std::env::args().skip(1);
|
||||||
|
let src = args.next().expect("source not specified");
|
||||||
|
let dst = args.next().expect("destination not specified");
|
||||||
|
println!("Sanitizing {src} into {dst}");
|
||||||
|
let bytes = fs::read(src)?;
|
||||||
|
let html = String::from_utf8_lossy(&bytes);
|
||||||
|
let html = sanitize_html(&html, "", &None)?;
|
||||||
|
fs::write(dst, html)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
21
server/src/bin/inline_html.rs
Normal file
21
server/src/bin/inline_html.rs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
println!("PWD: {}", std::env::current_dir()?.display());
|
||||||
|
let _url = "https://slashdot.org/story/25/01/24/1813201/walgreens-replaced-fridge-doors-with-smart-screens-its-now-a-200-million-fiasco?utm_source=rss1.0mainlinkanon&utm_medium=feed";
|
||||||
|
let _url = "https://hackaday.com/2025/01/24/hackaday-podcast-episode-305-caustic-clocks-practice-bones-and-brick-layers/";
|
||||||
|
let _url = "https://theonion.com/monster-devastated-to-see-film-depicting-things-he-told-guillermo-del-toro-in-confidence/";
|
||||||
|
let _url = "https://trofi.github.io/posts/330-another-nix-language-nondeterminism-example.html";
|
||||||
|
let _url = "https://blog.cloudflare.com/ddos-threat-report-for-2024-q4/";
|
||||||
|
let url = "https://trofi.github.io/posts/330-another-nix-language-nondeterminism-example.html";
|
||||||
|
let body = reqwest::blocking::get(url)?.text()?;
|
||||||
|
let output = "/tmp/h2md/output.html";
|
||||||
|
let inliner = css_inline::CSSInliner::options()
|
||||||
|
.base_url(Url::parse(url).ok())
|
||||||
|
.build();
|
||||||
|
let inlined = inliner.inline(&body)?;
|
||||||
|
fs::write(output, inlined)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
343
server/src/bin/letterbox-server.rs
Normal file
343
server/src/bin/letterbox-server.rs
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
// Rocket generates a lot of warnings for handlers
|
||||||
|
// TODO: figure out why
|
||||||
|
#![allow(unreachable_patterns)]
|
||||||
|
use std::{error::Error, net::SocketAddr, sync::Arc, time::Duration};
|
||||||
|
|
||||||
|
use async_graphql::{extensions, http::GraphiQLSource, Schema};
|
||||||
|
use async_graphql_axum::{GraphQL, GraphQLSubscription};
|
||||||
|
//allows to extract the IP of connecting user
|
||||||
|
use axum::extract::connect_info::ConnectInfo;
|
||||||
|
use axum::{
|
||||||
|
extract::{self, ws::WebSocketUpgrade, Query, State},
|
||||||
|
http::{header, StatusCode},
|
||||||
|
response::{self, IntoResponse, Response},
|
||||||
|
routing::{any, get, post},
|
||||||
|
Router,
|
||||||
|
};
|
||||||
|
use cacher::FilesystemCacher;
|
||||||
|
use clap::Parser;
|
||||||
|
use letterbox_notmuch::Notmuch;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
use letterbox_server::tantivy::TantivyConnection;
|
||||||
|
use letterbox_server::{
|
||||||
|
graphql::{compute_catchup_ids, Attachment, MutationRoot, QueryRoot, SubscriptionRoot},
|
||||||
|
nm::{attachment_bytes, cid_attachment_bytes, label_unprocessed},
|
||||||
|
ws::ConnectionTracker,
|
||||||
|
};
|
||||||
|
use letterbox_shared::WebsocketMessage;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tokio::{net::TcpListener, sync::Mutex};
|
||||||
|
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
|
// Make our own error that wraps `ServerError`.
|
||||||
|
struct AppError(letterbox_server::ServerError);
|
||||||
|
|
||||||
|
// Tell axum how to convert `AppError` into a response.
|
||||||
|
impl IntoResponse for AppError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
format!("Something went wrong: {}", self.0),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// This enables using `?` on functions that return `Result<_, letterbox_server::Error>` to turn them into
|
||||||
|
// `Result<_, AppError>`. That way you don't need to do that manually.
|
||||||
|
impl<E> From<E> for AppError
|
||||||
|
where
|
||||||
|
E: Into<letterbox_server::ServerError>,
|
||||||
|
{
|
||||||
|
fn from(err: E) -> Self {
|
||||||
|
Self(err.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inline_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||||
|
info!("attachment filename {:?}", attachment.filename);
|
||||||
|
let mut hdr_map = headers::HeaderMap::new();
|
||||||
|
if let Some(filename) = attachment.filename {
|
||||||
|
hdr_map.insert(
|
||||||
|
header::CONTENT_DISPOSITION,
|
||||||
|
format!(r#"inline; filename="{}""#, filename)
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if let Some(ct) = attachment.content_type {
|
||||||
|
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||||
|
}
|
||||||
|
info!("hdr_map {hdr_map:?}");
|
||||||
|
(hdr_map, attachment.bytes).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn download_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||||
|
info!("attachment filename {:?}", attachment.filename);
|
||||||
|
let mut hdr_map = headers::HeaderMap::new();
|
||||||
|
if let Some(filename) = attachment.filename {
|
||||||
|
hdr_map.insert(
|
||||||
|
header::CONTENT_DISPOSITION,
|
||||||
|
format!(r#"attachment; filename="{}""#, filename)
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if let Some(ct) = attachment.content_type {
|
||||||
|
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||||
|
}
|
||||||
|
info!("hdr_map {hdr_map:?}");
|
||||||
|
(hdr_map, attachment.bytes).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[axum_macros::debug_handler]
|
||||||
|
async fn view_attachment(
|
||||||
|
State(AppState { nm, .. }): State<AppState>,
|
||||||
|
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||||
|
) -> Result<impl IntoResponse, AppError> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("view attachment {mid} {idx}");
|
||||||
|
let idx: Vec<_> = idx
|
||||||
|
.split('.')
|
||||||
|
.map(|s| s.parse().expect("not a usize"))
|
||||||
|
.collect();
|
||||||
|
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||||
|
Ok(inline_attachment_response(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_attachment(
|
||||||
|
State(AppState { nm, .. }): State<AppState>,
|
||||||
|
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||||
|
) -> Result<impl IntoResponse, AppError> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("download attachment {mid} {idx}");
|
||||||
|
let idx: Vec<_> = idx
|
||||||
|
.split('.')
|
||||||
|
.map(|s| s.parse().expect("not a usize"))
|
||||||
|
.collect();
|
||||||
|
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||||
|
Ok(download_attachment_response(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn view_cid(
|
||||||
|
State(AppState { nm, .. }): State<AppState>,
|
||||||
|
extract::Path((id, cid)): extract::Path<(String, String)>,
|
||||||
|
) -> Result<impl IntoResponse, AppError> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
info!("view cid attachment {mid} {cid}");
|
||||||
|
let attachment = cid_attachment_bytes(&nm, &mid, &cid)?;
|
||||||
|
Ok(inline_attachment_response(attachment))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO make this work with gitea message ids like `wathiede/letterbox/pulls/91@git.z.xinu.tv`
|
||||||
|
async fn view_original(
|
||||||
|
State(AppState { nm, .. }): State<AppState>,
|
||||||
|
extract::Path(id): extract::Path<String>,
|
||||||
|
) -> Result<impl IntoResponse, AppError> {
|
||||||
|
info!("view_original {id}");
|
||||||
|
let bytes = nm.show_original(&id)?;
|
||||||
|
let s = String::from_utf8_lossy(&bytes).to_string();
|
||||||
|
Ok(s.into_response())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn graphiql() -> impl IntoResponse {
|
||||||
|
response::Html(
|
||||||
|
GraphiQLSource::build()
|
||||||
|
.endpoint("/api/graphql/")
|
||||||
|
.subscription_endpoint("/api/graphql/ws")
|
||||||
|
.finish(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn start_ws(
|
||||||
|
ws: WebSocketUpgrade,
|
||||||
|
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||||
|
State(AppState {
|
||||||
|
connection_tracker, ..
|
||||||
|
}): State<AppState>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
info!("intiating websocket connection for {addr}");
|
||||||
|
ws.on_upgrade(async move |socket| connection_tracker.lock().await.add_peer(socket, addr).await)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct NotificationParams {
|
||||||
|
delay_ms: Option<u64>,
|
||||||
|
num_unprocessed: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_refresh_websocket_handler(
|
||||||
|
State(AppState {
|
||||||
|
nm,
|
||||||
|
pool,
|
||||||
|
connection_tracker,
|
||||||
|
..
|
||||||
|
}): State<AppState>,
|
||||||
|
params: Query<NotificationParams>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
info!("send_refresh_websocket_handler params {params:?}");
|
||||||
|
if let Some(delay_ms) = params.delay_ms {
|
||||||
|
let delay = Duration::from_millis(delay_ms);
|
||||||
|
info!("sleeping {delay:?}");
|
||||||
|
tokio::time::sleep(delay).await;
|
||||||
|
}
|
||||||
|
let limit = match params.num_unprocessed {
|
||||||
|
Some(0) => None,
|
||||||
|
Some(limit) => Some(limit),
|
||||||
|
None => Some(10),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ids = None;
|
||||||
|
match label_unprocessed(&nm, &pool, false, limit, "tag:unprocessed").await {
|
||||||
|
Ok(i) => ids = Some(i),
|
||||||
|
Err(err) => error!("Failed to label_unprocessed: {err:?}"),
|
||||||
|
};
|
||||||
|
connection_tracker
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||||
|
.await;
|
||||||
|
if let Some(ids) = ids {
|
||||||
|
format!("{ids:?}")
|
||||||
|
} else {
|
||||||
|
"refresh triggered".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn watch_new(
|
||||||
|
nm: Notmuch,
|
||||||
|
pool: PgPool,
|
||||||
|
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||||
|
poll_time: Duration,
|
||||||
|
) -> Result<(), async_graphql::Error> {
|
||||||
|
async fn watch_new_iteration(
|
||||||
|
nm: &Notmuch,
|
||||||
|
pool: &PgPool,
|
||||||
|
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||||
|
old_ids: &[String],
|
||||||
|
) -> Result<Vec<String>, async_graphql::Error> {
|
||||||
|
let ids = compute_catchup_ids(&nm, &pool, "is:unread").await?;
|
||||||
|
info!("old_ids: {} ids: {}", old_ids.len(), ids.len());
|
||||||
|
if old_ids != ids {
|
||||||
|
label_unprocessed(&nm, &pool, false, Some(100), "tag:unprocessed").await?;
|
||||||
|
conn_tracker
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
Ok(ids)
|
||||||
|
}
|
||||||
|
let mut old_ids = Vec::new();
|
||||||
|
loop {
|
||||||
|
old_ids = match watch_new_iteration(&nm, &pool, conn_tracker.clone(), &old_ids).await {
|
||||||
|
Ok(old_ids) => old_ids,
|
||||||
|
Err(err) => {
|
||||||
|
error!("watch_new_iteration failed: {err:?}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
tokio::time::sleep(poll_time).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct AppState {
|
||||||
|
nm: Notmuch,
|
||||||
|
pool: PgPool,
|
||||||
|
connection_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(version, about, long_about = None)]
|
||||||
|
struct Cli {
|
||||||
|
#[arg(short, long, default_value = "0.0.0.0:9345")]
|
||||||
|
addr: SocketAddr,
|
||||||
|
newsreader_database_url: String,
|
||||||
|
newsreader_tantivy_db_path: String,
|
||||||
|
slurp_cache_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
||||||
|
if !std::fs::exists(&cli.slurp_cache_path)? {
|
||||||
|
info!("Creating slurp cache @ '{}'", &cli.slurp_cache_path);
|
||||||
|
std::fs::create_dir_all(&cli.slurp_cache_path)?;
|
||||||
|
}
|
||||||
|
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
||||||
|
let nm = Notmuch::default();
|
||||||
|
sqlx::migrate!("./migrations").run(&pool).await?;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_conn = TantivyConnection::new(&cli.newsreader_tantivy_db_path)?;
|
||||||
|
|
||||||
|
let cacher = FilesystemCacher::new(&cli.slurp_cache_path)?;
|
||||||
|
let schema = Schema::build(QueryRoot, MutationRoot, SubscriptionRoot)
|
||||||
|
.data(nm.clone())
|
||||||
|
.data(cacher)
|
||||||
|
.data(pool.clone());
|
||||||
|
|
||||||
|
let schema = schema.extension(extensions::Logger).finish();
|
||||||
|
|
||||||
|
let connection_tracker = Arc::new(Mutex::new(ConnectionTracker::default()));
|
||||||
|
let ct = Arc::clone(&connection_tracker);
|
||||||
|
let poll_time = Duration::from_secs(60);
|
||||||
|
let _h = tokio::spawn(watch_new(nm.clone(), pool.clone(), ct, poll_time));
|
||||||
|
|
||||||
|
let api_routes = Router::new()
|
||||||
|
.route(
|
||||||
|
"/download/attachment/{id}/{idx}/{*rest}",
|
||||||
|
get(download_attachment),
|
||||||
|
)
|
||||||
|
.route("/view/attachment/{id}/{idx}/{*rest}", get(view_attachment))
|
||||||
|
.route("/original/{id}", get(view_original))
|
||||||
|
.route("/cid/{id}/{cid}", get(view_cid))
|
||||||
|
.route("/ws", any(start_ws))
|
||||||
|
.route_service("/graphql/ws", GraphQLSubscription::new(schema.clone()))
|
||||||
|
.route(
|
||||||
|
"/graphql/",
|
||||||
|
get(graphiql).post_service(GraphQL::new(schema.clone())),
|
||||||
|
);
|
||||||
|
|
||||||
|
let notification_routes = Router::new()
|
||||||
|
.route("/mail", post(send_refresh_websocket_handler))
|
||||||
|
.route("/news", post(send_refresh_websocket_handler));
|
||||||
|
let app = Router::new()
|
||||||
|
.nest("/api", api_routes)
|
||||||
|
.nest("/notification", notification_routes)
|
||||||
|
.with_state(AppState {
|
||||||
|
nm,
|
||||||
|
pool,
|
||||||
|
connection_tracker,
|
||||||
|
})
|
||||||
|
.layer(
|
||||||
|
TraceLayer::new_for_http()
|
||||||
|
.make_span_with(DefaultMakeSpan::default().include_headers(true)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let listener = TcpListener::bind(cli.addr).await.unwrap();
|
||||||
|
tracing::info!("listening on {}", listener.local_addr().unwrap());
|
||||||
|
axum::serve(
|
||||||
|
listener,
|
||||||
|
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
39
server/src/bin/test-labeling.rs
Normal file
39
server/src/bin/test-labeling.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
use letterbox_notmuch::Notmuch;
|
||||||
|
use letterbox_server::nm::label_unprocessed;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(version, about, long_about = None)]
|
||||||
|
struct Cli {
|
||||||
|
#[arg(short, long)]
|
||||||
|
newsreader_database_url: String,
|
||||||
|
#[arg(short, long, default_value = "10")]
|
||||||
|
/// Set to 0 to process all matches
|
||||||
|
messages_to_process: usize,
|
||||||
|
#[arg(short, long, default_value = "false")]
|
||||||
|
execute: bool,
|
||||||
|
/// Process messages matching this notmuch query
|
||||||
|
#[arg(short, long, default_value = "tag:unprocessed")]
|
||||||
|
query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
||||||
|
let pool = PgPool::connect(&cli.newsreader_database_url).await?;
|
||||||
|
let nm = Notmuch::default();
|
||||||
|
let limit = if cli.messages_to_process > 0 {
|
||||||
|
Some(cli.messages_to_process)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
label_unprocessed(&nm, &pool, !cli.execute, limit, &cli.query).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
1841
server/src/chrome-default.css
Normal file
1841
server/src/chrome-default.css
Normal file
File diff suppressed because it is too large
Load Diff
7
server/src/config.rs
Normal file
7
server/src/config.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct Config {
|
||||||
|
pub newsreader_database_url: String,
|
||||||
|
pub newsreader_tantivy_db_path: String,
|
||||||
|
pub slurp_cache_path: String,
|
||||||
|
}
|
||||||
42
server/src/error.rs
Normal file
42
server/src/error.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
|
use mailparse::MailParseError;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
use tantivy::{query::QueryParserError, TantivyError};
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::TransformError;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum ServerError {
|
||||||
|
#[error("notmuch: {0}")]
|
||||||
|
NotmuchError(#[from] letterbox_notmuch::NotmuchError),
|
||||||
|
#[error("flatten")]
|
||||||
|
FlattenError,
|
||||||
|
#[error("mail parse error: {0}")]
|
||||||
|
MailParseError(#[from] MailParseError),
|
||||||
|
#[error("IO error: {0}")]
|
||||||
|
IoError(#[from] std::io::Error),
|
||||||
|
#[error("attachement not found")]
|
||||||
|
PartNotFound,
|
||||||
|
#[error("sqlx error: {0}")]
|
||||||
|
SQLXError(#[from] sqlx::Error),
|
||||||
|
#[error("html transform error: {0}")]
|
||||||
|
TransformError(#[from] TransformError),
|
||||||
|
#[error("UTF8 error: {0}")]
|
||||||
|
Utf8Error(#[from] Utf8Error),
|
||||||
|
#[error("FromUTF8 error: {0}")]
|
||||||
|
FromUtf8Error(#[from] FromUtf8Error),
|
||||||
|
#[error("error: {0}")]
|
||||||
|
StringError(String),
|
||||||
|
#[error("invalid url: {0}")]
|
||||||
|
UrlParseError(#[from] url::ParseError),
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
#[error("tantivy error: {0}")]
|
||||||
|
TantivyError(#[from] TantivyError),
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
#[error("tantivy query parse error: {0}")]
|
||||||
|
QueryParseError(#[from] QueryParserError),
|
||||||
|
#[error("impossible: {0}")]
|
||||||
|
InfaillibleError(#[from] Infallible),
|
||||||
|
}
|
||||||
692
server/src/graphql.rs
Normal file
692
server/src/graphql.rs
Normal file
@@ -0,0 +1,692 @@
|
|||||||
|
use std::{fmt, str::FromStr};
|
||||||
|
|
||||||
|
use async_graphql::{
|
||||||
|
connection::{self, Connection, Edge, OpaqueCursor},
|
||||||
|
futures_util::Stream,
|
||||||
|
Context, Enum, Error, FieldResult, InputObject, Object, Schema, SimpleObject, Subscription,
|
||||||
|
Union,
|
||||||
|
};
|
||||||
|
use cacher::FilesystemCacher;
|
||||||
|
use futures::stream;
|
||||||
|
use letterbox_notmuch::Notmuch;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tokio::join;
|
||||||
|
use tracing::{info, instrument};
|
||||||
|
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
use crate::tantivy::TantivyConnection;
|
||||||
|
use crate::{newsreader, nm, nm::label_unprocessed, Query};
|
||||||
|
|
||||||
|
/// # Number of seconds since the Epoch
|
||||||
|
pub type UnixTime = isize;
|
||||||
|
|
||||||
|
/// # Thread ID, sans "thread:"
|
||||||
|
pub type ThreadId = String;
|
||||||
|
|
||||||
|
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum Corpus {
|
||||||
|
Notmuch,
|
||||||
|
Newsreader,
|
||||||
|
Tantivy,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Corpus {
|
||||||
|
type Err = String;
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(match s {
|
||||||
|
"notmuch" => Corpus::Notmuch,
|
||||||
|
"newsreader" => Corpus::Newsreader,
|
||||||
|
"tantivy" => Corpus::Tantivy,
|
||||||
|
s => return Err(format!("unknown corpus: '{s}'")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: add is_read field and remove all use of 'tag:unread'
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct ThreadSummary {
|
||||||
|
pub thread: ThreadId,
|
||||||
|
pub timestamp: UnixTime,
|
||||||
|
/// user-friendly timestamp
|
||||||
|
pub date_relative: String,
|
||||||
|
/// number of matched messages
|
||||||
|
pub matched: isize,
|
||||||
|
/// total messages in thread
|
||||||
|
pub total: isize,
|
||||||
|
/// comma-separated names with | between matched and unmatched
|
||||||
|
pub authors: String,
|
||||||
|
pub subject: String,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
pub corpus: Corpus,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Union)]
|
||||||
|
pub enum Thread {
|
||||||
|
Email(EmailThread),
|
||||||
|
News(NewsPost),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct NewsPost {
|
||||||
|
pub thread_id: String,
|
||||||
|
pub is_read: bool,
|
||||||
|
pub slug: String,
|
||||||
|
pub site: String,
|
||||||
|
pub title: String,
|
||||||
|
pub body: String,
|
||||||
|
pub url: String,
|
||||||
|
pub timestamp: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct EmailThread {
|
||||||
|
pub thread_id: String,
|
||||||
|
pub subject: String,
|
||||||
|
pub messages: Vec<Message>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct Message {
|
||||||
|
// Message-ID for message, prepend `id:<id>` to search in notmuch
|
||||||
|
pub id: String,
|
||||||
|
// First From header found in email
|
||||||
|
pub from: Option<Email>,
|
||||||
|
// All To headers found in email
|
||||||
|
pub to: Vec<Email>,
|
||||||
|
// All CC headers found in email
|
||||||
|
pub cc: Vec<Email>,
|
||||||
|
// X-Original-To header found in email
|
||||||
|
pub x_original_to: Option<Email>,
|
||||||
|
// Delivered-To header found in email
|
||||||
|
pub delivered_to: Option<Email>,
|
||||||
|
// First Subject header found in email
|
||||||
|
pub subject: Option<String>,
|
||||||
|
// Parsed Date header, if found and valid
|
||||||
|
pub timestamp: Option<i64>,
|
||||||
|
// Headers
|
||||||
|
pub headers: Vec<Header>,
|
||||||
|
// The body contents
|
||||||
|
pub body: Body,
|
||||||
|
// On disk location of message
|
||||||
|
pub path: String,
|
||||||
|
pub attachments: Vec<Attachment>,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Content-Type: image/jpeg; name="PXL_20231125_204826860.jpg"
|
||||||
|
// Content-Disposition: attachment; filename="PXL_20231125_204826860.jpg"
|
||||||
|
// Content-Transfer-Encoding: base64
|
||||||
|
// Content-ID: <f_lponoluo1>
|
||||||
|
// X-Attachment-Id: f_lponoluo1
|
||||||
|
#[derive(Default, Debug, SimpleObject)]
|
||||||
|
pub struct Attachment {
|
||||||
|
pub id: String,
|
||||||
|
pub idx: String,
|
||||||
|
pub filename: Option<String>,
|
||||||
|
pub size: usize,
|
||||||
|
pub content_type: Option<String>,
|
||||||
|
pub content_id: Option<String>,
|
||||||
|
pub disposition: DispositionType,
|
||||||
|
pub bytes: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct Disposition {
|
||||||
|
pub r#type: DispositionType,
|
||||||
|
pub filename: Option<String>,
|
||||||
|
pub size: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum DispositionType {
|
||||||
|
Inline,
|
||||||
|
Attachment,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<mailparse::DispositionType> for DispositionType {
|
||||||
|
fn from(value: mailparse::DispositionType) -> Self {
|
||||||
|
match value {
|
||||||
|
mailparse::DispositionType::Inline => DispositionType::Inline,
|
||||||
|
mailparse::DispositionType::Attachment => DispositionType::Attachment,
|
||||||
|
dt => panic!("unhandled DispositionType {dt:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DispositionType {
|
||||||
|
fn default() -> Self {
|
||||||
|
DispositionType::Attachment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct Header {
|
||||||
|
pub key: String,
|
||||||
|
pub value: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnhandledContentType {
|
||||||
|
pub text: String,
|
||||||
|
pub content_tree: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[Object]
|
||||||
|
impl UnhandledContentType {
|
||||||
|
async fn contents(&self) -> &str {
|
||||||
|
&self.text
|
||||||
|
}
|
||||||
|
async fn content_tree(&self) -> &str {
|
||||||
|
&self.content_tree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PlainText {
|
||||||
|
pub text: String,
|
||||||
|
pub content_tree: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[Object]
|
||||||
|
impl PlainText {
|
||||||
|
async fn contents(&self) -> &str {
|
||||||
|
&self.text
|
||||||
|
}
|
||||||
|
async fn content_tree(&self) -> &str {
|
||||||
|
&self.content_tree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Html {
|
||||||
|
pub html: String,
|
||||||
|
pub content_tree: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[Object]
|
||||||
|
impl Html {
|
||||||
|
async fn contents(&self) -> &str {
|
||||||
|
&self.html
|
||||||
|
}
|
||||||
|
async fn content_tree(&self) -> &str {
|
||||||
|
&self.content_tree
|
||||||
|
}
|
||||||
|
async fn headers(&self) -> Vec<Header> {
|
||||||
|
Vec::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Union)]
|
||||||
|
pub enum Body {
|
||||||
|
UnhandledContentType(UnhandledContentType),
|
||||||
|
PlainText(PlainText),
|
||||||
|
Html(Html),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Body {
|
||||||
|
pub fn html(html: String) -> Body {
|
||||||
|
Body::Html(Html {
|
||||||
|
html,
|
||||||
|
content_tree: "".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pub fn text(text: String) -> Body {
|
||||||
|
Body::PlainText(PlainText {
|
||||||
|
text,
|
||||||
|
content_tree: "".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct Email {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub addr: Option<String>,
|
||||||
|
pub photo_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Email {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
|
match (&self.name, &self.addr) {
|
||||||
|
(Some(name), Some(addr)) => write!(f, "{name} <{addr}>")?,
|
||||||
|
(Some(name), None) => write!(f, "{name}")?,
|
||||||
|
(None, Some(addr)) => write!(f, "{addr}")?,
|
||||||
|
(None, None) => write!(f, "<UNKNOWN>")?,
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(SimpleObject)]
|
||||||
|
pub struct Tag {
|
||||||
|
pub name: String,
|
||||||
|
pub fg_color: String,
|
||||||
|
pub bg_color: String,
|
||||||
|
pub unread: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, InputObject)]
|
||||||
|
struct SearchCursor {
|
||||||
|
newsreader_offset: i32,
|
||||||
|
notmuch_offset: i32,
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
tantivy_offset: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_id() -> String {
|
||||||
|
let now = std::time::SystemTime::now();
|
||||||
|
let nanos = now
|
||||||
|
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.as_nanos();
|
||||||
|
format!("{nanos:x}")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct QueryRoot;
|
||||||
|
#[Object]
|
||||||
|
impl QueryRoot {
|
||||||
|
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
Ok(letterbox_shared::build_version(bi))
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||||
|
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
|
||||||
|
let newsreader_query: Query = query.parse()?;
|
||||||
|
|
||||||
|
let newsreader_count = newsreader::count(pool, &newsreader_query).await?;
|
||||||
|
let notmuch_count = nm::count(nm, &newsreader_query).await?;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_count = tantivy.count(&newsreader_query).await?;
|
||||||
|
#[cfg(not(feature = "tantivy"))]
|
||||||
|
let tantivy_count = 0;
|
||||||
|
|
||||||
|
let total = newsreader_count + notmuch_count + tantivy_count;
|
||||||
|
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
|
||||||
|
Ok(total)
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||||
|
async fn catchup<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
query: String,
|
||||||
|
) -> Result<Vec<String>, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
compute_catchup_ids(nm, pool, &query).await
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this function doesn't get parallelism, possibly because notmuch is sync and blocks,
|
||||||
|
// rewrite that with tokio::process:Command
|
||||||
|
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||||
|
async fn search<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
after: Option<String>,
|
||||||
|
before: Option<String>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: String,
|
||||||
|
) -> Result<Connection<OpaqueCursor<SearchCursor>, ThreadSummary>, Error> {
|
||||||
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",);
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
|
||||||
|
Ok(connection::query(
|
||||||
|
after,
|
||||||
|
before,
|
||||||
|
first,
|
||||||
|
last,
|
||||||
|
|after: Option<OpaqueCursor<SearchCursor>>,
|
||||||
|
before: Option<OpaqueCursor<SearchCursor>>,
|
||||||
|
first: Option<usize>,
|
||||||
|
last: Option<usize>| async move {
|
||||||
|
info!(
|
||||||
|
"search(after {:?} before {:?} first {first:?} last {last:?} query: {query:?})",
|
||||||
|
after.as_ref().map(|v| &v.0),
|
||||||
|
before.as_ref().map(|v| &v.0)
|
||||||
|
);
|
||||||
|
let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset);
|
||||||
|
let notmuch_after = after.as_ref().map(|sc| sc.notmuch_offset);
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_after = after.as_ref().map(|sc| sc.tantivy_offset);
|
||||||
|
|
||||||
|
let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset);
|
||||||
|
let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset);
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_before = before.as_ref().map(|sc| sc.tantivy_offset);
|
||||||
|
let first = first.map(|v| v as i32);
|
||||||
|
let last = last.map(|v| v as i32);
|
||||||
|
|
||||||
|
let query: Query = query.parse()?;
|
||||||
|
info!("newsreader_query {query:?}");
|
||||||
|
|
||||||
|
let newsreader_fut = newsreader_search(
|
||||||
|
pool,
|
||||||
|
newsreader_after,
|
||||||
|
newsreader_before,
|
||||||
|
first,
|
||||||
|
last,
|
||||||
|
&query,
|
||||||
|
);
|
||||||
|
let notmuch_fut =
|
||||||
|
notmuch_search(nm, notmuch_after, notmuch_before, first, last, &query);
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_fut = tantivy_search(
|
||||||
|
tantivy,
|
||||||
|
pool,
|
||||||
|
tantivy_after,
|
||||||
|
tantivy_before,
|
||||||
|
first,
|
||||||
|
last,
|
||||||
|
&query,
|
||||||
|
);
|
||||||
|
#[cfg(not(feature = "tantivy"))]
|
||||||
|
let tantivy_fut =
|
||||||
|
async { Ok::<Vec<ThreadSummaryCursor>, async_graphql::Error>(Vec::new()) };
|
||||||
|
|
||||||
|
let (newsreader_results, notmuch_results, tantivy_results) =
|
||||||
|
join!(newsreader_fut, notmuch_fut, tantivy_fut);
|
||||||
|
|
||||||
|
let newsreader_results = newsreader_results?;
|
||||||
|
let notmuch_results = notmuch_results?;
|
||||||
|
let tantivy_results = tantivy_results?;
|
||||||
|
info!(
|
||||||
|
"newsreader_results ({}) notmuch_results ({}) tantivy_results ({})",
|
||||||
|
newsreader_results.len(),
|
||||||
|
notmuch_results.len(),
|
||||||
|
tantivy_results.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut results: Vec<_> = newsreader_results
|
||||||
|
.into_iter()
|
||||||
|
.chain(notmuch_results)
|
||||||
|
.chain(tantivy_results)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// The leading '-' is to reverse sort
|
||||||
|
results.sort_by_key(|item| match item {
|
||||||
|
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
||||||
|
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
ThreadSummaryCursor::Tantivy(_, ts) => -ts.timestamp,
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut has_next_page = before.is_some();
|
||||||
|
if let Some(first) = first {
|
||||||
|
let first = first as usize;
|
||||||
|
if results.len() > first {
|
||||||
|
has_next_page = true;
|
||||||
|
results.truncate(first);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut has_previous_page = after.is_some();
|
||||||
|
if let Some(last) = last {
|
||||||
|
let last = last as usize;
|
||||||
|
if results.len() > last {
|
||||||
|
has_previous_page = true;
|
||||||
|
results.truncate(last);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut connection = Connection::new(has_previous_page, has_next_page);
|
||||||
|
// Set starting offset as the value from cursor to preserve state if no results from a corpus survived the truncation
|
||||||
|
let mut newsreader_offset =
|
||||||
|
after.as_ref().map(|sc| sc.newsreader_offset).unwrap_or(0);
|
||||||
|
let mut notmuch_offset = after.as_ref().map(|sc| sc.notmuch_offset).unwrap_or(0);
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy_offset = after.as_ref().map(|sc| sc.tantivy_offset).unwrap_or(0);
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"newsreader_offset ({}) notmuch_offset ({})",
|
||||||
|
newsreader_offset, notmuch_offset,
|
||||||
|
);
|
||||||
|
|
||||||
|
connection.edges.extend(results.into_iter().map(|item| {
|
||||||
|
let thread_summary;
|
||||||
|
match item {
|
||||||
|
ThreadSummaryCursor::Newsreader(offset, ts) => {
|
||||||
|
thread_summary = ts;
|
||||||
|
newsreader_offset = offset;
|
||||||
|
}
|
||||||
|
ThreadSummaryCursor::Notmuch(offset, ts) => {
|
||||||
|
thread_summary = ts;
|
||||||
|
notmuch_offset = offset;
|
||||||
|
}
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
ThreadSummaryCursor::Tantivy(offset, ts) => {
|
||||||
|
thread_summary = ts;
|
||||||
|
tantivy_offset = offset;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let cur = OpaqueCursor(SearchCursor {
|
||||||
|
newsreader_offset,
|
||||||
|
notmuch_offset,
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
tantivy_offset,
|
||||||
|
});
|
||||||
|
Edge::new(cur, thread_summary)
|
||||||
|
}));
|
||||||
|
Ok::<_, async_graphql::Error>(connection)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(rid=request_id()))]
|
||||||
|
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
let needs_unread = ctx.look_ahead().field("unread").exists();
|
||||||
|
let mut tags = newsreader::tags(pool, needs_unread).await?;
|
||||||
|
tags.append(&mut nm::tags(nm, needs_unread)?);
|
||||||
|
Ok(tags)
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(thread_id=thread_id, rid=request_id()))]
|
||||||
|
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
let debug_content_tree = ctx
|
||||||
|
.look_ahead()
|
||||||
|
.field("messages")
|
||||||
|
.field("body")
|
||||||
|
.field("contentTree")
|
||||||
|
.exists();
|
||||||
|
if newsreader::is_newsreader_thread(&thread_id) {
|
||||||
|
Ok(newsreader::thread(cacher, pool, thread_id).await?)
|
||||||
|
} else {
|
||||||
|
Ok(nm::thread(nm, pool, thread_id, debug_content_tree).await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ThreadSummaryCursor {
|
||||||
|
Newsreader(i32, ThreadSummary),
|
||||||
|
Notmuch(i32, ThreadSummary),
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
Tantivy(i32, ThreadSummary),
|
||||||
|
}
|
||||||
|
async fn newsreader_search(
|
||||||
|
pool: &PgPool,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
||||||
|
Ok(newsreader::search(pool, after, before, first, last, &query)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn notmuch_search(
|
||||||
|
nm: &Notmuch,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
||||||
|
Ok(nm::search(nm, after, before, first, last, &query)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
async fn tantivy_search(
|
||||||
|
tantivy: &TantivyConnection,
|
||||||
|
pool: &PgPool,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
||||||
|
Ok(tantivy
|
||||||
|
.search(pool, after, before, first, last, &query)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(cur, ts)| ThreadSummaryCursor::Tantivy(cur, ts))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MutationRoot;
|
||||||
|
#[Object]
|
||||||
|
impl MutationRoot {
|
||||||
|
#[instrument(skip_all, fields(query=query, unread=unread, rid=request_id()))]
|
||||||
|
async fn set_read_status<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
query: String,
|
||||||
|
unread: bool,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
|
||||||
|
let query: Query = query.parse()?;
|
||||||
|
newsreader::set_read_status(pool, &query, unread).await?;
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
tantivy.reindex_thread(pool, &query).await?;
|
||||||
|
nm::set_read_status(nm, &query, unread).await?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
||||||
|
async fn tag_add<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
query: String,
|
||||||
|
tag: String,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
info!("tag_add({tag}, {query})");
|
||||||
|
nm.tag_add(&tag, &query)?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(query=query, tag=tag, rid=request_id()))]
|
||||||
|
async fn tag_remove<'ctx>(
|
||||||
|
&self,
|
||||||
|
ctx: &Context<'ctx>,
|
||||||
|
query: String,
|
||||||
|
tag: String,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
info!("tag_remove({tag}, {query})");
|
||||||
|
nm.tag_remove(&tag, &query)?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
/// Drop and recreate tantivy index. Warning this is slow
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
|
||||||
|
tantivy.drop_and_load_index()?;
|
||||||
|
tantivy.reindex_all(pool).await?;
|
||||||
|
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
#[instrument(skip_all, fields(rid=request_id()))]
|
||||||
|
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
||||||
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
|
let cacher = ctx.data_unchecked::<FilesystemCacher>();
|
||||||
|
let pool = ctx.data_unchecked::<PgPool>();
|
||||||
|
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
||||||
|
newsreader::refresh(pool, cacher).await?;
|
||||||
|
|
||||||
|
// Process email labels
|
||||||
|
label_unprocessed(&nm, &pool, false, Some(10), "tag:unprocessed").await?;
|
||||||
|
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
{
|
||||||
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
||||||
|
// TODO: parallelize
|
||||||
|
tantivy.refresh(pool).await?;
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SubscriptionRoot;
|
||||||
|
#[Subscription]
|
||||||
|
impl SubscriptionRoot {
|
||||||
|
async fn values(&self, _ctx: &Context<'_>) -> Result<impl Stream<Item = usize>, Error> {
|
||||||
|
Ok(stream::iter(0..10))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(query=query))]
|
||||||
|
pub async fn compute_catchup_ids(
|
||||||
|
nm: &Notmuch,
|
||||||
|
pool: &PgPool,
|
||||||
|
query: &str,
|
||||||
|
) -> Result<Vec<String>, Error> {
|
||||||
|
let query: Query = query.parse()?;
|
||||||
|
// TODO: implement optimized versions of fetching just IDs
|
||||||
|
let newsreader_fut = newsreader_search(pool, None, None, None, None, &query);
|
||||||
|
let notmuch_fut = notmuch_search(nm, None, None, None, None, &query);
|
||||||
|
let (newsreader_results, notmuch_results) = join!(newsreader_fut, notmuch_fut);
|
||||||
|
|
||||||
|
let newsreader_results = newsreader_results?;
|
||||||
|
let notmuch_results = notmuch_results?;
|
||||||
|
info!(
|
||||||
|
"newsreader_results ({}) notmuch_results ({})",
|
||||||
|
newsreader_results.len(),
|
||||||
|
notmuch_results.len(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut results: Vec<_> = newsreader_results
|
||||||
|
.into_iter()
|
||||||
|
.chain(notmuch_results)
|
||||||
|
.collect();
|
||||||
|
// The leading '-' is to reverse sort
|
||||||
|
results.sort_by_key(|item| match item {
|
||||||
|
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
||||||
|
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
||||||
|
});
|
||||||
|
let ids = results
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| match r {
|
||||||
|
ThreadSummaryCursor::Newsreader(_, ts) => ts.thread,
|
||||||
|
ThreadSummaryCursor::Notmuch(_, ts) => ts.thread,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(ids)
|
||||||
|
}
|
||||||
962
server/src/lib.rs
Normal file
962
server/src/lib.rs
Normal file
@@ -0,0 +1,962 @@
|
|||||||
|
pub mod config;
|
||||||
|
pub mod error;
|
||||||
|
pub mod graphql;
|
||||||
|
pub mod newsreader;
|
||||||
|
pub mod nm;
|
||||||
|
pub mod ws;
|
||||||
|
|
||||||
|
#[cfg(feature = "tantivy")]
|
||||||
|
pub mod tantivy;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
convert::Infallible,
|
||||||
|
fmt,
|
||||||
|
str::FromStr,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use cacher::{Cacher, FilesystemCacher};
|
||||||
|
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||||
|
pub use error::ServerError;
|
||||||
|
use linkify::{LinkFinder, LinkKind};
|
||||||
|
use lol_html::{
|
||||||
|
element, errors::RewritingError, html_content::ContentType, rewrite_str, text,
|
||||||
|
RewriteStrSettings,
|
||||||
|
};
|
||||||
|
use maplit::{hashmap, hashset};
|
||||||
|
use regex::Regex;
|
||||||
|
use reqwest::StatusCode;
|
||||||
|
use scraper::{Html, Selector};
|
||||||
|
use sqlx::types::time::PrimitiveDateTime;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tracing::{debug, error, info, warn};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
graphql::{Corpus, ThreadSummary},
|
||||||
|
newsreader::is_newsreader_thread,
|
||||||
|
nm::is_notmuch_thread_or_id,
|
||||||
|
};
|
||||||
|
|
||||||
|
const NEWSREADER_TAG_PREFIX: &'static str = "News/";
|
||||||
|
const NEWSREADER_THREAD_PREFIX: &'static str = "news:";
|
||||||
|
|
||||||
|
// TODO: figure out how to use Cow
|
||||||
|
#[async_trait]
|
||||||
|
trait Transformer: Send + Sync {
|
||||||
|
fn should_run(&self, _addr: &Option<Url>, _html: &str) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
// TODO: should html be something like `html_escape` uses:
|
||||||
|
// <S: ?Sized + AsRef<str>>(text: &S) -> Cow<str>
|
||||||
|
async fn transform(&self, addr: &Option<Url>, html: &str) -> Result<String, TransformError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: how would we make this more generic to allow good implementations of Transformer outside
|
||||||
|
// of this module?
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum TransformError {
|
||||||
|
#[error("lol-html rewrite error: {0}")]
|
||||||
|
RewritingError(#[from] RewritingError),
|
||||||
|
#[error("css inline error: {0}")]
|
||||||
|
InlineError(#[from] InlineError),
|
||||||
|
#[error("failed to fetch url error: {0}")]
|
||||||
|
ReqwestError(#[from] reqwest::Error),
|
||||||
|
#[error("failed to parse HTML: {0}")]
|
||||||
|
HtmlParsingError(String),
|
||||||
|
#[error("got a retryable error code {0} for {1}")]
|
||||||
|
RetryableHttpStatusError(StatusCode, String),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SanitizeHtml<'a> {
|
||||||
|
cid_prefix: &'a str,
|
||||||
|
base_url: &'a Option<Url>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<'a> Transformer for SanitizeHtml<'a> {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(sanitize_html(html, self.cid_prefix, self.base_url)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct EscapeHtml;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for EscapeHtml {
|
||||||
|
fn should_run(&self, _: &Option<Url>, html: &str) -> bool {
|
||||||
|
html.contains("&")
|
||||||
|
}
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(html_escape::decode_html_entities(html).to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StripHtml;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for StripHtml {
|
||||||
|
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
||||||
|
debug!("StripHtml should_run {link:?} {}", html.contains("<"));
|
||||||
|
// Lame test
|
||||||
|
html.contains("<")
|
||||||
|
}
|
||||||
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
debug!("StripHtml {link:?}");
|
||||||
|
let mut text = String::new();
|
||||||
|
let element_content_handlers = vec![
|
||||||
|
element!("style", |el| {
|
||||||
|
el.remove();
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
element!("script", |el| {
|
||||||
|
el.remove();
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
let html = rewrite_str(
|
||||||
|
html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers,
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
let element_content_handlers = vec![text!("*", |t| {
|
||||||
|
text += t.as_str();
|
||||||
|
Ok(())
|
||||||
|
})];
|
||||||
|
let _ = rewrite_str(
|
||||||
|
&html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers,
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
let re = Regex::new(r"\s+").expect("failed to parse regex");
|
||||||
|
let text = re.replace_all(&text, " ").to_string();
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct InlineStyle;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for InlineStyle {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
let css = concat!(
|
||||||
|
"/* chrome-default.css */\n",
|
||||||
|
include_str!("chrome-default.css"),
|
||||||
|
//"\n/* mvp.css */\n",
|
||||||
|
//include_str!("mvp.css"),
|
||||||
|
//"\n/* Xinu Specific overrides */\n",
|
||||||
|
//include_str!("custom.css"),
|
||||||
|
);
|
||||||
|
let inline_opts = InlineOptions {
|
||||||
|
inline_style_tags: true,
|
||||||
|
keep_style_tags: false,
|
||||||
|
keep_link_tags: true,
|
||||||
|
base_url: None,
|
||||||
|
load_remote_stylesheets: true,
|
||||||
|
extra_css: Some(css.into()),
|
||||||
|
preallocate_node_capacity: 32,
|
||||||
|
..InlineOptions::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
//info!("HTML:\n{html}");
|
||||||
|
Ok(match CSSInliner::new(inline_opts).inline(&html) {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to inline CSS: {err}");
|
||||||
|
html.to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process images will extract any alt or title tags on images and place them as labels below said
|
||||||
|
/// image. It also handles data-src and data-cfsrc attributes
|
||||||
|
struct FrameImages;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for FrameImages {
|
||||||
|
async fn transform(&self, _: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
Ok(rewrite_str(
|
||||||
|
html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers: vec![
|
||||||
|
element!("img[data-src]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("data-src")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
element!("img[data-cfsrc]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("data-cfsrc")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
element!("img[alt], img[title]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("src")
|
||||||
|
.unwrap_or("https://placehold.co/600x400".to_string());
|
||||||
|
let alt = el.get_attribute("alt");
|
||||||
|
let title = el.get_attribute("title");
|
||||||
|
let mut frags =
|
||||||
|
vec!["<figure>".to_string(), format!(r#"<img src="{src}">"#)];
|
||||||
|
alt.map(|t| {
|
||||||
|
if !t.is_empty() {
|
||||||
|
frags.push(format!("<figcaption>Alt: {t}</figcaption>"))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
title.map(|t| {
|
||||||
|
if !t.is_empty() {
|
||||||
|
frags.push(format!("<figcaption>Title: {t}</figcaption>"))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
frags.push("</figure>".to_string());
|
||||||
|
el.replace(&frags.join("\n"), ContentType::Html);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
struct AddOutlink;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Transformer for AddOutlink {
|
||||||
|
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
||||||
|
if let Some(link) = link {
|
||||||
|
link.scheme().starts_with("http") && !html.contains(link.as_str())
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
if let Some(link) = link {
|
||||||
|
Ok(format!(
|
||||||
|
r#"
|
||||||
|
{html}
|
||||||
|
<div><a href="{}">View on site</a></div>
|
||||||
|
"#,
|
||||||
|
link
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(html.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SlurpContents<'c> {
|
||||||
|
cacher: &'c FilesystemCacher,
|
||||||
|
inline_css: bool,
|
||||||
|
site_selectors: HashMap<String, Vec<Selector>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'c> SlurpContents<'c> {
|
||||||
|
fn get_selectors(&self, link: &Url) -> Option<&[Selector]> {
|
||||||
|
for (host, selector) in self.site_selectors.iter() {
|
||||||
|
if link.host_str().map(|h| h.contains(host)).unwrap_or(false) {
|
||||||
|
return Some(&selector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<'c> Transformer for SlurpContents<'c> {
|
||||||
|
fn should_run(&self, link: &Option<Url>, html: &str) -> bool {
|
||||||
|
debug!("SlurpContents should_run {link:?}");
|
||||||
|
let mut will_slurp = false;
|
||||||
|
if let Some(link) = link {
|
||||||
|
will_slurp = self.get_selectors(link).is_some();
|
||||||
|
}
|
||||||
|
if !will_slurp && self.inline_css {
|
||||||
|
return InlineStyle {}.should_run(link, html);
|
||||||
|
}
|
||||||
|
will_slurp
|
||||||
|
}
|
||||||
|
async fn transform(&self, link: &Option<Url>, html: &str) -> Result<String, TransformError> {
|
||||||
|
debug!("SlurpContents {link:?}");
|
||||||
|
let retryable_status: HashSet<StatusCode> = vec![
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
StatusCode::FORBIDDEN,
|
||||||
|
StatusCode::REQUEST_TIMEOUT,
|
||||||
|
StatusCode::TOO_MANY_REQUESTS,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
if let Some(test_link) = link {
|
||||||
|
// If SlurpContents is configured for inline CSS, but no
|
||||||
|
// configuration found for this site, use the local InlineStyle
|
||||||
|
// transform.
|
||||||
|
if self.inline_css && self.get_selectors(test_link).is_none() {
|
||||||
|
debug!("local inline CSS for {link:?}");
|
||||||
|
return InlineStyle {}.transform(link, html).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let Some(link) = link else {
|
||||||
|
return Ok(html.to_string());
|
||||||
|
};
|
||||||
|
let Some(selectors) = self.get_selectors(&link) else {
|
||||||
|
return Ok(html.to_string());
|
||||||
|
};
|
||||||
|
let cacher = self.cacher;
|
||||||
|
let body = if let Some(body) = cacher.get(link.as_str()) {
|
||||||
|
String::from_utf8_lossy(&body).to_string()
|
||||||
|
} else {
|
||||||
|
let resp = reqwest::get(link.as_str()).await?;
|
||||||
|
let status = resp.status();
|
||||||
|
if status.is_server_error() {
|
||||||
|
error!("status error for {link}: {status}");
|
||||||
|
return Ok(html.to_string());
|
||||||
|
}
|
||||||
|
if retryable_status.contains(&status) {
|
||||||
|
error!("retryable error for {link}: {status}");
|
||||||
|
return Ok(html.to_string());
|
||||||
|
}
|
||||||
|
if !status.is_success() {
|
||||||
|
error!("unsuccessful for {link}: {status}");
|
||||||
|
return Ok(html.to_string());
|
||||||
|
}
|
||||||
|
let body = resp.text().await?;
|
||||||
|
cacher.set(link.as_str(), body.as_bytes());
|
||||||
|
body
|
||||||
|
};
|
||||||
|
let body = Arc::new(body);
|
||||||
|
let base_url = Some(link.clone());
|
||||||
|
let body = if self.inline_css {
|
||||||
|
debug!("inlining CSS for {link}");
|
||||||
|
let inner_body = Arc::clone(&body);
|
||||||
|
let res = tokio::task::spawn_blocking(move || {
|
||||||
|
let css = concat!(
|
||||||
|
"/* chrome-default.css */\n",
|
||||||
|
include_str!("chrome-default.css"),
|
||||||
|
"\n/* vars.css */\n",
|
||||||
|
include_str!("../static/vars.css"),
|
||||||
|
//"\n/* Xinu Specific overrides */\n",
|
||||||
|
//include_str!("custom.css"),
|
||||||
|
);
|
||||||
|
let res = CSSInliner::options()
|
||||||
|
.base_url(base_url)
|
||||||
|
.extra_css(Some(std::borrow::Cow::Borrowed(css)))
|
||||||
|
.build()
|
||||||
|
.inline(&inner_body);
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to inline remote CSS: {err}");
|
||||||
|
Arc::into_inner(inner_body).expect("failed to take body out of Arc")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
match res {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to spawn inline remote CSS: {err}");
|
||||||
|
Arc::into_inner(body).expect("failed to take body out of Arc")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!("using body as-is for {link:?}");
|
||||||
|
Arc::into_inner(body).expect("failed to take body out of Arc")
|
||||||
|
};
|
||||||
|
|
||||||
|
let doc = Html::parse_document(&body);
|
||||||
|
|
||||||
|
let mut results = Vec::new();
|
||||||
|
for selector in selectors {
|
||||||
|
for frag in doc.select(&selector) {
|
||||||
|
results.push(frag.html())
|
||||||
|
// TODO: figure out how to warn if there were no hits
|
||||||
|
//warn!("couldn't find '{:?}' in {}", selector, link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(results.join("<br>"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn linkify_html(text: &str) -> String {
|
||||||
|
let mut finder = LinkFinder::new();
|
||||||
|
let finder = finder.url_must_have_scheme(false).kinds(&[LinkKind::Url]);
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
for span in finder.spans(text) {
|
||||||
|
// TODO(wathiede): use Cow<str>?
|
||||||
|
match span.kind() {
|
||||||
|
// Text as-is
|
||||||
|
None => parts.push(span.as_str().to_string()),
|
||||||
|
// Wrap in anchor tag
|
||||||
|
Some(LinkKind::Url) => {
|
||||||
|
let text = span.as_str();
|
||||||
|
let schema = if text.starts_with("http") {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
"http://"
|
||||||
|
};
|
||||||
|
let a = format!(r#"<a href="{schema}{0}">{0}</a>"#, text);
|
||||||
|
parts.push(a);
|
||||||
|
}
|
||||||
|
_ => todo!("unhandled kind: {:?}", span.kind().unwrap()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parts.join("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// html contains the content to be cleaned, and cid_prefix is used to resolve mixed part image
|
||||||
|
// referrences
|
||||||
|
pub fn sanitize_html(
|
||||||
|
html: &str,
|
||||||
|
cid_prefix: &str,
|
||||||
|
base_url: &Option<Url>,
|
||||||
|
) -> Result<String, TransformError> {
|
||||||
|
let inline_opts = InlineOptions {
|
||||||
|
inline_style_tags: true,
|
||||||
|
keep_style_tags: true,
|
||||||
|
keep_link_tags: false,
|
||||||
|
base_url: None,
|
||||||
|
load_remote_stylesheets: false,
|
||||||
|
extra_css: None,
|
||||||
|
preallocate_node_capacity: 32,
|
||||||
|
..InlineOptions::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let html = match CSSInliner::new(inline_opts).inline(&html) {
|
||||||
|
Ok(inlined_html) => inlined_html,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to inline CSS: {err}");
|
||||||
|
html.to_string()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut element_content_handlers = vec![
|
||||||
|
// Remove width and height attributes on elements
|
||||||
|
element!("[width],[height]", |el| {
|
||||||
|
el.remove_attribute("width");
|
||||||
|
el.remove_attribute("height");
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Remove width and height values from inline styles
|
||||||
|
element!("[style]", |el| {
|
||||||
|
let style = el.get_attribute("style").unwrap();
|
||||||
|
let style = style
|
||||||
|
.split(";")
|
||||||
|
.filter(|s| {
|
||||||
|
let Some((k, _)) = s.split_once(':') else {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
match k {
|
||||||
|
"width" | "max-width" | "min-width" | "height" | "max-height"
|
||||||
|
| "min-height" => false,
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(";");
|
||||||
|
if let Err(e) = el.set_attribute("style", &style) {
|
||||||
|
error!("Failed to set style attribute: {e}");
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Open links in new tab
|
||||||
|
element!("a[href]", |el| {
|
||||||
|
el.set_attribute("target", "_blank").unwrap();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Replace mixed part CID images with URL
|
||||||
|
element!("img[src]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("src")
|
||||||
|
.expect("src was required")
|
||||||
|
.replace("cid:", cid_prefix);
|
||||||
|
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Only secure image URLs
|
||||||
|
element!("img[src]", |el| {
|
||||||
|
let src = el
|
||||||
|
.get_attribute("src")
|
||||||
|
.expect("src was required")
|
||||||
|
.replace("http:", "https:");
|
||||||
|
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Add https to href with //<domain name>
|
||||||
|
element!("link[href]", |el| {
|
||||||
|
info!("found link[href] {el:?}");
|
||||||
|
let mut href = el.get_attribute("href").expect("href was required");
|
||||||
|
if href.starts_with("//") {
|
||||||
|
warn!("adding https to {href}");
|
||||||
|
href.insert_str(0, "https:");
|
||||||
|
}
|
||||||
|
|
||||||
|
el.set_attribute("href", &href)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Add https to src with //<domain name>
|
||||||
|
element!("style[src]", |el| {
|
||||||
|
let mut src = el.get_attribute("src").expect("src was required");
|
||||||
|
if src.starts_with("//") {
|
||||||
|
src.insert_str(0, "https:");
|
||||||
|
}
|
||||||
|
|
||||||
|
el.set_attribute("src", &src)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
if let Some(base_url) = base_url {
|
||||||
|
element_content_handlers.extend(vec![
|
||||||
|
// Make links with relative URLs absolute
|
||||||
|
element!("a[href]", |el| {
|
||||||
|
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
|
||||||
|
el.set_attribute("href", &href.as_str()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
// Make images with relative srcs absolute
|
||||||
|
element!("img[src]", |el| {
|
||||||
|
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
|
||||||
|
el.set_attribute("src", &src.as_str()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
let html = rewrite_str(
|
||||||
|
&html,
|
||||||
|
RewriteStrSettings {
|
||||||
|
element_content_handlers,
|
||||||
|
..RewriteStrSettings::default()
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
// Default's don't allow style, but we want to preserve that.
|
||||||
|
// TODO: remove 'class' if rendering mails moves to a two phase process where abstract message
|
||||||
|
// types are collected, santized, and then grouped together as one big HTML doc
|
||||||
|
let attributes = hashset![
|
||||||
|
"align", "bgcolor", "class", "color", "height", "lang", "title", "width", "style",
|
||||||
|
];
|
||||||
|
|
||||||
|
let tags = hashset![
|
||||||
|
"a",
|
||||||
|
"abbr",
|
||||||
|
"acronym",
|
||||||
|
"area",
|
||||||
|
"article",
|
||||||
|
"aside",
|
||||||
|
"b",
|
||||||
|
"bdi",
|
||||||
|
"bdo",
|
||||||
|
"blockquote",
|
||||||
|
"br",
|
||||||
|
"caption",
|
||||||
|
"center",
|
||||||
|
"cite",
|
||||||
|
"code",
|
||||||
|
"col",
|
||||||
|
"colgroup",
|
||||||
|
"data",
|
||||||
|
"dd",
|
||||||
|
"del",
|
||||||
|
"details",
|
||||||
|
"dfn",
|
||||||
|
"div",
|
||||||
|
"dl",
|
||||||
|
"dt",
|
||||||
|
"em",
|
||||||
|
"figcaption",
|
||||||
|
"figure",
|
||||||
|
"footer",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"header",
|
||||||
|
"hgroup",
|
||||||
|
"hr",
|
||||||
|
"i",
|
||||||
|
"iframe", // wathiede
|
||||||
|
"img",
|
||||||
|
"ins",
|
||||||
|
"kbd",
|
||||||
|
"kbd",
|
||||||
|
"li",
|
||||||
|
"map",
|
||||||
|
"mark",
|
||||||
|
"nav",
|
||||||
|
"noscript", // wathiede
|
||||||
|
"ol",
|
||||||
|
"p",
|
||||||
|
"pre",
|
||||||
|
"q",
|
||||||
|
"rp",
|
||||||
|
"rt",
|
||||||
|
"rtc",
|
||||||
|
"ruby",
|
||||||
|
"s",
|
||||||
|
"samp",
|
||||||
|
"small",
|
||||||
|
"span",
|
||||||
|
"strike",
|
||||||
|
"strong",
|
||||||
|
"sub",
|
||||||
|
"summary",
|
||||||
|
"sup",
|
||||||
|
"table",
|
||||||
|
"tbody",
|
||||||
|
"td",
|
||||||
|
"th",
|
||||||
|
"thead",
|
||||||
|
"time",
|
||||||
|
"title", // wathiede
|
||||||
|
"tr",
|
||||||
|
"tt",
|
||||||
|
"u",
|
||||||
|
"ul",
|
||||||
|
"var",
|
||||||
|
"wbr",
|
||||||
|
];
|
||||||
|
let tag_attributes = hashmap![
|
||||||
|
"a" => hashset![
|
||||||
|
"href", "hreflang", "target",
|
||||||
|
],
|
||||||
|
"bdo" => hashset![
|
||||||
|
"dir"
|
||||||
|
],
|
||||||
|
"blockquote" => hashset![
|
||||||
|
"cite"
|
||||||
|
],
|
||||||
|
"col" => hashset![
|
||||||
|
"align", "char", "charoff", "span"
|
||||||
|
],
|
||||||
|
"colgroup" => hashset![
|
||||||
|
"align", "char", "charoff", "span"
|
||||||
|
],
|
||||||
|
"del" => hashset![
|
||||||
|
"cite", "datetime"
|
||||||
|
],
|
||||||
|
"hr" => hashset![
|
||||||
|
"align", "size", "width"
|
||||||
|
],
|
||||||
|
"iframe" => hashset![
|
||||||
|
"src", "allow", "allowfullscreen"
|
||||||
|
],
|
||||||
|
"img" => hashset![
|
||||||
|
"align", "alt", "height", "src", "width"
|
||||||
|
],
|
||||||
|
"ins" => hashset![
|
||||||
|
"cite", "datetime"
|
||||||
|
],
|
||||||
|
"ol" => hashset![
|
||||||
|
"start"
|
||||||
|
],
|
||||||
|
"q" => hashset![
|
||||||
|
"cite"
|
||||||
|
],
|
||||||
|
"table" => hashset![
|
||||||
|
"align", "border", "cellpadding", "cellspacing", "char", "charoff", "summary",
|
||||||
|
],
|
||||||
|
"tbody" => hashset![
|
||||||
|
"align", "char", "charoff"
|
||||||
|
],
|
||||||
|
"td" => hashset![
|
||||||
|
"align", "char", "charoff", "colspan", "headers", "rowspan"
|
||||||
|
],
|
||||||
|
"tfoot" => hashset![
|
||||||
|
"align", "char", "charoff"
|
||||||
|
],
|
||||||
|
"th" => hashset![
|
||||||
|
"align", "char", "charoff", "colspan", "headers", "rowspan", "scope"
|
||||||
|
],
|
||||||
|
"thead" => hashset![
|
||||||
|
"align", "char", "charoff"
|
||||||
|
],
|
||||||
|
"tr" => hashset![
|
||||||
|
"align", "char", "charoff"
|
||||||
|
],
|
||||||
|
];
|
||||||
|
|
||||||
|
let html = ammonia::Builder::default()
|
||||||
|
.tags(tags)
|
||||||
|
.tag_attributes(tag_attributes)
|
||||||
|
.generic_attributes(attributes)
|
||||||
|
.clean(&html)
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
Ok(html)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_offset_limit(
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
) -> (i32, i32) {
|
||||||
|
let default_page_size = 10000;
|
||||||
|
match (after, before, first, last) {
|
||||||
|
// Reasonable defaults
|
||||||
|
(None, None, None, None) => (0, default_page_size),
|
||||||
|
(None, None, Some(first), None) => (0, first),
|
||||||
|
(Some(after), None, None, None) => (after + 1, default_page_size),
|
||||||
|
(Some(after), None, Some(first), None) => (after + 1, first),
|
||||||
|
(None, Some(before), None, None) => (0.max(before - default_page_size), default_page_size),
|
||||||
|
(None, Some(before), None, Some(last)) => (0.max(before - last), last),
|
||||||
|
(None, None, None, Some(_)) => {
|
||||||
|
panic!("specifying last and no before doesn't make sense")
|
||||||
|
}
|
||||||
|
(None, None, Some(_), Some(_)) => {
|
||||||
|
panic!("specifying first and last doesn't make sense")
|
||||||
|
}
|
||||||
|
(None, Some(_), Some(_), _) => {
|
||||||
|
panic!("specifying before and first doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), Some(_), _, _) => {
|
||||||
|
panic!("specifying after and before doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), None, None, Some(_)) => {
|
||||||
|
panic!("specifying after and last doesn't make sense")
|
||||||
|
}
|
||||||
|
(Some(_), None, Some(_), Some(_)) => {
|
||||||
|
panic!("specifying after, first and last doesn't make sense")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct Query {
|
||||||
|
pub unread_only: bool,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
pub uids: Vec<String>,
|
||||||
|
pub remainder: Vec<String>,
|
||||||
|
pub is_notmuch: bool,
|
||||||
|
pub is_newsreader: bool,
|
||||||
|
pub is_tantivy: bool,
|
||||||
|
pub corpus: Option<Corpus>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Query {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
|
if self.unread_only {
|
||||||
|
write!(f, "is:unread ")?;
|
||||||
|
}
|
||||||
|
for tag in &self.tags {
|
||||||
|
write!(f, "tag:{tag} ")?;
|
||||||
|
}
|
||||||
|
for uid in &self.uids {
|
||||||
|
write!(f, "id:{uid} ")?;
|
||||||
|
}
|
||||||
|
if self.is_notmuch {
|
||||||
|
write!(f, "is:mail ")?;
|
||||||
|
}
|
||||||
|
if self.is_newsreader {
|
||||||
|
write!(f, "is:newsreader ")?;
|
||||||
|
}
|
||||||
|
if self.is_newsreader {
|
||||||
|
write!(f, "is:news ")?;
|
||||||
|
}
|
||||||
|
match self.corpus {
|
||||||
|
Some(c) => write!(f, "corpus:{c:?}")?,
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
for rem in &self.remainder {
|
||||||
|
write!(f, "{rem} ")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Query {
|
||||||
|
// Converts the internal state of Query to something suitable for notmuch queries. Removes and
|
||||||
|
// letterbox specific '<key>:<value' tags
|
||||||
|
fn to_notmuch(&self) -> String {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
if !self.is_notmuch {
|
||||||
|
return String::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.unread_only {
|
||||||
|
parts.push("is:unread".to_string());
|
||||||
|
}
|
||||||
|
for tag in &self.tags {
|
||||||
|
parts.push(format!("tag:{tag}"));
|
||||||
|
}
|
||||||
|
for uid in &self.uids {
|
||||||
|
parts.push(uid.clone());
|
||||||
|
}
|
||||||
|
for r in &self.remainder {
|
||||||
|
// Rewrite "to:" to include ExtraTo:. ExtraTo: is configured in
|
||||||
|
// notmuch-config to index Delivered-To and X-Original-To headers.
|
||||||
|
if r.starts_with("to:") {
|
||||||
|
parts.push("(".to_string());
|
||||||
|
parts.push(r.to_string());
|
||||||
|
parts.push("OR".to_string());
|
||||||
|
parts.push(r.replace("to:", "ExtraTo:"));
|
||||||
|
parts.push(")".to_string());
|
||||||
|
} else {
|
||||||
|
parts.push(r.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parts.join(" ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Query {
|
||||||
|
type Err = Infallible;
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
let mut unread_only = false;
|
||||||
|
let mut tags = Vec::new();
|
||||||
|
let mut uids = Vec::new();
|
||||||
|
let mut remainder = Vec::new();
|
||||||
|
let mut is_notmuch = false;
|
||||||
|
let mut is_newsreader = false;
|
||||||
|
let mut is_tantivy = false;
|
||||||
|
let mut corpus = None;
|
||||||
|
for word in s.split_whitespace() {
|
||||||
|
if word == "is:unread" {
|
||||||
|
unread_only = true
|
||||||
|
} else if word.starts_with("tag:") {
|
||||||
|
let t = &word["tag:".len()..];
|
||||||
|
// Per-address emails are faked as `tag:@<domain>/<username>`, rewrite to `to:` form
|
||||||
|
if t.starts_with('@') && t.contains('.') {
|
||||||
|
let t = match t.split_once('/') {
|
||||||
|
None => format!("to:{t}"),
|
||||||
|
Some((domain, user)) => format!("to:{user}{domain}"),
|
||||||
|
};
|
||||||
|
remainder.push(t);
|
||||||
|
} else {
|
||||||
|
tags.push(t.to_string());
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
} else if word.starts_with("tag:") {
|
||||||
|
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
|
||||||
|
// database
|
||||||
|
site = Some(NON_EXISTENT_SITE_NAME.to_string());
|
||||||
|
*/
|
||||||
|
} else if word.starts_with("corpus:") {
|
||||||
|
let c = word["corpus:".len()..].to_string();
|
||||||
|
corpus = c.parse::<Corpus>().map(|c| Some(c)).unwrap_or_else(|e| {
|
||||||
|
warn!("Error parsing corpus '{c}': {e:?}");
|
||||||
|
None
|
||||||
|
});
|
||||||
|
} else if is_newsreader_thread(word) {
|
||||||
|
uids.push(word.to_string());
|
||||||
|
} else if is_notmuch_thread_or_id(word) {
|
||||||
|
uids.push(word.to_string());
|
||||||
|
} else if word == "is:mail" || word == "is:email" || word == "is:notmuch" {
|
||||||
|
is_notmuch = true;
|
||||||
|
} else if word == "is:news" {
|
||||||
|
is_newsreader = true;
|
||||||
|
} else if word == "is:newsreader" {
|
||||||
|
is_newsreader = true;
|
||||||
|
} else {
|
||||||
|
remainder.push(word.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If we don't see any explicit filters for a corpus, flip them all on
|
||||||
|
if corpus.is_none() && !(is_notmuch || is_tantivy || is_newsreader) {
|
||||||
|
is_notmuch = true;
|
||||||
|
is_newsreader = true;
|
||||||
|
is_tantivy = true;
|
||||||
|
}
|
||||||
|
Ok(Query {
|
||||||
|
unread_only,
|
||||||
|
tags,
|
||||||
|
uids,
|
||||||
|
remainder,
|
||||||
|
is_notmuch,
|
||||||
|
is_newsreader,
|
||||||
|
is_tantivy,
|
||||||
|
corpus,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub struct ThreadSummaryRecord {
|
||||||
|
pub site: Option<String>,
|
||||||
|
pub date: Option<PrimitiveDateTime>,
|
||||||
|
pub is_read: Option<bool>,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub uid: String,
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub corpus: Corpus,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn thread_summary_from_row(r: ThreadSummaryRecord) -> ThreadSummary {
|
||||||
|
let site = r.site.unwrap_or("UNKOWN TAG".to_string());
|
||||||
|
let mut tags = vec![format!("{NEWSREADER_TAG_PREFIX}{site}")];
|
||||||
|
if !r.is_read.unwrap_or(true) {
|
||||||
|
tags.push("unread".to_string());
|
||||||
|
};
|
||||||
|
let mut title = r.title.unwrap_or("NO TITLE".to_string());
|
||||||
|
title = clean_title(&title).await.expect("failed to clean title");
|
||||||
|
ThreadSummary {
|
||||||
|
thread: format!("{NEWSREADER_THREAD_PREFIX}{}", r.uid),
|
||||||
|
timestamp: r
|
||||||
|
.date
|
||||||
|
.expect("post missing date")
|
||||||
|
.assume_utc()
|
||||||
|
.unix_timestamp() as isize,
|
||||||
|
date_relative: format!("{:?}", r.date),
|
||||||
|
//date_relative: "TODO date_relative".to_string(),
|
||||||
|
matched: 0,
|
||||||
|
total: 1,
|
||||||
|
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||||
|
subject: title,
|
||||||
|
tags,
|
||||||
|
corpus: r.corpus,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn clean_title(title: &str) -> Result<String, ServerError> {
|
||||||
|
// Make title HTML so html parsers work
|
||||||
|
let mut title = format!("<html>{title}</html>");
|
||||||
|
let title_tranformers: Vec<Box<dyn Transformer>> =
|
||||||
|
vec![Box::new(EscapeHtml), Box::new(StripHtml)];
|
||||||
|
// Make title HTML so html parsers work
|
||||||
|
title = format!("<html>{title}</html>");
|
||||||
|
for t in title_tranformers.iter() {
|
||||||
|
if t.should_run(&None, &title) {
|
||||||
|
title = t.transform(&None, &title).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(title)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{SanitizeHtml, Transformer};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn strip_sizes() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let ss = SanitizeHtml {
|
||||||
|
cid_prefix: "",
|
||||||
|
base_url: &None,
|
||||||
|
};
|
||||||
|
let input = r#"<p width=16 height=16 style="color:blue;width:16px;height:16px;">This el has width and height attributes and inline styles</p>"#;
|
||||||
|
let want = r#"<p style="color:blue;">This el has width and height attributes and inline styles</p>"#;
|
||||||
|
let got = ss.transform(&None, input).await?;
|
||||||
|
assert_eq!(got, want);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,164 +0,0 @@
|
|||||||
#[macro_use]
|
|
||||||
extern crate rocket;
|
|
||||||
mod error;
|
|
||||||
mod nm;
|
|
||||||
|
|
||||||
use std::{error::Error, io::Cursor, str::FromStr};
|
|
||||||
|
|
||||||
use glog::Flags;
|
|
||||||
use notmuch::{Notmuch, NotmuchError};
|
|
||||||
use rocket::{
|
|
||||||
http::{ContentType, Header},
|
|
||||||
request::Request,
|
|
||||||
response::{Debug, Responder},
|
|
||||||
serde::json::Json,
|
|
||||||
Response, State,
|
|
||||||
};
|
|
||||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
|
||||||
|
|
||||||
use crate::error::ServerError;
|
|
||||||
|
|
||||||
#[get("/")]
|
|
||||||
fn hello() -> &'static str {
|
|
||||||
"Hello, world!"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/refresh")]
|
|
||||||
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
|
||||||
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/search")]
|
|
||||||
async fn search_all(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
|
||||||
search(nm, "*", None, None).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/search/<query>?<page>&<results_per_page>")]
|
|
||||||
async fn search(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
query: &str,
|
|
||||||
page: Option<usize>,
|
|
||||||
results_per_page: Option<usize>,
|
|
||||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
|
||||||
let page = page.unwrap_or(0);
|
|
||||||
let results_per_page = results_per_page.unwrap_or(10);
|
|
||||||
info!(" search '{query}'");
|
|
||||||
let res = shared::SearchResult {
|
|
||||||
summary: nm.search(query, page * results_per_page, results_per_page)?,
|
|
||||||
query: query.to_string(),
|
|
||||||
page,
|
|
||||||
results_per_page,
|
|
||||||
total: nm.count(query)?,
|
|
||||||
};
|
|
||||||
Ok(Json(res))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/show/<query>")]
|
|
||||||
async fn show(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
query: &str,
|
|
||||||
) -> Result<Json<Vec<shared::Message>>, Debug<ServerError>> {
|
|
||||||
let res = nm::threadset_to_messages(nm.show(query).map_err(|e| -> ServerError { e.into() })?)?;
|
|
||||||
Ok(Json(res))
|
|
||||||
}
|
|
||||||
|
|
||||||
struct PartResponder {
|
|
||||||
bytes: Vec<u8>,
|
|
||||||
filename: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'r, 'o: 'r> Responder<'r, 'o> for PartResponder {
|
|
||||||
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
|
||||||
let mut resp = Response::build();
|
|
||||||
if let Some(filename) = self.filename {
|
|
||||||
info!("filename {:?}", filename);
|
|
||||||
resp.header(Header::new(
|
|
||||||
"Content-Disposition",
|
|
||||||
format!(r#"attachment; filename="{}""#, filename),
|
|
||||||
))
|
|
||||||
.header(ContentType::Binary);
|
|
||||||
}
|
|
||||||
resp.sized_body(self.bytes.len(), Cursor::new(self.bytes))
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/original/<id>/part/<part>")]
|
|
||||||
async fn original_part(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
id: &str,
|
|
||||||
part: usize,
|
|
||||||
) -> Result<PartResponder, Debug<NotmuchError>> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
let meta = nm.show_part(&mid, part)?;
|
|
||||||
let res = nm.show_original_part(&mid, part)?;
|
|
||||||
Ok(PartResponder {
|
|
||||||
bytes: res,
|
|
||||||
filename: meta.filename,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/original/<id>")]
|
|
||||||
async fn original(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
id: &str,
|
|
||||||
) -> Result<(ContentType, Vec<u8>), Debug<NotmuchError>> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
let res = nm.show_original(&mid)?;
|
|
||||||
Ok((ContentType::Plain, res))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rocket::main]
|
|
||||||
async fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
glog::new()
|
|
||||||
.init(Flags {
|
|
||||||
colorlogtostderr: true,
|
|
||||||
//alsologtostderr: true, // use logtostderr to only write to stderr and not to files
|
|
||||||
logtostderr: true,
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let allowed_origins = AllowedOrigins::all();
|
|
||||||
let cors = rocket_cors::CorsOptions {
|
|
||||||
allowed_origins,
|
|
||||||
allowed_methods: vec!["Get"]
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| FromStr::from_str(s).unwrap())
|
|
||||||
.collect(),
|
|
||||||
allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]),
|
|
||||||
allow_credentials: true,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
.to_cors()?;
|
|
||||||
|
|
||||||
let _ = rocket::build()
|
|
||||||
.mount(
|
|
||||||
"/",
|
|
||||||
routes![
|
|
||||||
original_part,
|
|
||||||
original,
|
|
||||||
hello,
|
|
||||||
refresh,
|
|
||||||
search_all,
|
|
||||||
search,
|
|
||||||
show
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.attach(cors)
|
|
||||||
.manage(Notmuch::default())
|
|
||||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
|
||||||
.launch()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
498
server/src/mvp.css
Normal file
498
server/src/mvp.css
Normal file
@@ -0,0 +1,498 @@
|
|||||||
|
/* MVP.css v1.15 - https://github.com/andybrewer/mvp */
|
||||||
|
|
||||||
|
/* :root content stored in client side index.html */
|
||||||
|
|
||||||
|
html {
|
||||||
|
scroll-behavior: smooth;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-reduced-motion: reduce) {
|
||||||
|
html {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
article aside {
|
||||||
|
background: var(--color-secondary-accent);
|
||||||
|
border-left: 4px solid var(--color-secondary);
|
||||||
|
padding: 0.01rem 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background: var(--color-bg);
|
||||||
|
color: var(--color-text);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 0;
|
||||||
|
overflow-x: hidden;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer,
|
||||||
|
header,
|
||||||
|
main {
|
||||||
|
margin: 0 auto;
|
||||||
|
max-width: var(--width-content);
|
||||||
|
padding: 3rem 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border: none;
|
||||||
|
height: 1px;
|
||||||
|
margin: 4rem 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
section {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
section img,
|
||||||
|
article img {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
section pre {
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
section aside {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
margin: 1rem;
|
||||||
|
padding: 1.25rem;
|
||||||
|
width: var(--width-card);
|
||||||
|
}
|
||||||
|
|
||||||
|
section aside:hover {
|
||||||
|
box-shadow: var(--box-shadow) var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
[hidden] {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Headers */
|
||||||
|
article header,
|
||||||
|
div header,
|
||||||
|
main header {
|
||||||
|
padding-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
header a b,
|
||||||
|
header a em,
|
||||||
|
header a i,
|
||||||
|
header a strong {
|
||||||
|
margin-left: 0.5rem;
|
||||||
|
margin-right: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
header nav img {
|
||||||
|
margin: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
section header {
|
||||||
|
padding-top: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Nav */
|
||||||
|
nav {
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
font-weight: bold;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-bottom: 7rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li {
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0 0.5rem;
|
||||||
|
position: relative;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Nav Dropdown */
|
||||||
|
nav ul li:hover ul {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul {
|
||||||
|
background: var(--color-bg);
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
display: none;
|
||||||
|
height: auto;
|
||||||
|
left: -2px;
|
||||||
|
padding: .5rem 1rem;
|
||||||
|
position: absolute;
|
||||||
|
top: 1.7rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
width: auto;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul::before {
|
||||||
|
/* fill gap above to make mousing over them easier */
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
top: -0.5rem;
|
||||||
|
height: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul li ul li,
|
||||||
|
nav ul li ul li a {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Typography */
|
||||||
|
code,
|
||||||
|
samp {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-text);
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0 0.1rem;
|
||||||
|
padding: 0 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
details {
|
||||||
|
margin: 1.3rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
details summary {
|
||||||
|
font-weight: bold;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6 {
|
||||||
|
line-height: var(--line-height);
|
||||||
|
text-wrap: balance;
|
||||||
|
}
|
||||||
|
|
||||||
|
mark {
|
||||||
|
padding: 0.1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol li,
|
||||||
|
ul li {
|
||||||
|
padding: 0.2rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0.75rem 0;
|
||||||
|
padding: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
margin: 1rem 0;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
padding: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre code,
|
||||||
|
pre samp {
|
||||||
|
display: block;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
padding: 0.5rem 2rem;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
small {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
sup {
|
||||||
|
background-color: var(--color-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-bg);
|
||||||
|
font-size: xx-small;
|
||||||
|
font-weight: bold;
|
||||||
|
margin: 0.2rem;
|
||||||
|
padding: 0.2rem 0.3rem;
|
||||||
|
position: relative;
|
||||||
|
top: -2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Links */
|
||||||
|
a {
|
||||||
|
color: var(--color-link);
|
||||||
|
display: inline-block;
|
||||||
|
font-weight: bold;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
filter: brightness(var(--hover-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a:active {
|
||||||
|
filter: brightness(var(--active-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a b,
|
||||||
|
a em,
|
||||||
|
a i,
|
||||||
|
a strong,
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
display: inline-block;
|
||||||
|
font-size: medium;
|
||||||
|
font-weight: bold;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 0.5rem 0;
|
||||||
|
padding: 1rem 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
font-family: var(--font-family);
|
||||||
|
}
|
||||||
|
|
||||||
|
button:hover,
|
||||||
|
input[type="submit"]:hover {
|
||||||
|
cursor: pointer;
|
||||||
|
filter: brightness(var(--hover-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
button:active,
|
||||||
|
input[type="submit"]:active {
|
||||||
|
filter: brightness(var(--active-brightness));
|
||||||
|
}
|
||||||
|
|
||||||
|
a b,
|
||||||
|
a strong,
|
||||||
|
button,
|
||||||
|
input[type="submit"] {
|
||||||
|
background-color: var(--color-link);
|
||||||
|
border: 2px solid var(--color-link);
|
||||||
|
color: var(--color-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
a em,
|
||||||
|
a i {
|
||||||
|
border: 2px solid var(--color-link);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-link);
|
||||||
|
display: inline-block;
|
||||||
|
padding: 1rem 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
article aside a {
|
||||||
|
color: var(--color-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Images */
|
||||||
|
figure {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure img {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure figcaption {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Forms */
|
||||||
|
button:disabled,
|
||||||
|
input:disabled {
|
||||||
|
background: var(--color-bg-secondary);
|
||||||
|
border-color: var(--color-bg-secondary);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
button[disabled]:hover,
|
||||||
|
input[type="submit"][disabled]:hover {
|
||||||
|
filter: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
form {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
display: block;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
min-width: var(--width-card);
|
||||||
|
padding: 1.5rem;
|
||||||
|
text-align: var(--justify-normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
form header {
|
||||||
|
margin: 1.5rem 0;
|
||||||
|
padding: 1.5rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
label,
|
||||||
|
select,
|
||||||
|
textarea {
|
||||||
|
display: block;
|
||||||
|
font-size: inherit;
|
||||||
|
max-width: var(--width-card-wide);
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="checkbox"],
|
||||||
|
input[type="radio"] {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="checkbox"]+label,
|
||||||
|
input[type="radio"]+label {
|
||||||
|
display: inline-block;
|
||||||
|
font-weight: normal;
|
||||||
|
position: relative;
|
||||||
|
top: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="range"] {
|
||||||
|
padding: 0.4rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
select,
|
||||||
|
textarea {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
padding: 0.4rem 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="text"],
|
||||||
|
input[type="password"] textarea {
|
||||||
|
width: calc(100% - 1.6rem);
|
||||||
|
}
|
||||||
|
|
||||||
|
input[readonly],
|
||||||
|
textarea[readonly] {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-weight: bold;
|
||||||
|
margin-bottom: 0.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Popups */
|
||||||
|
dialog {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: var(--box-shadow) var(--color-shadow);
|
||||||
|
position: fixed;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
width: 50%;
|
||||||
|
z-index: 999;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tables */
|
||||||
|
table {
|
||||||
|
border: 1px solid var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
border-spacing: 0;
|
||||||
|
display: inline-block;
|
||||||
|
max-width: 100%;
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
table td,
|
||||||
|
table th,
|
||||||
|
table tr {
|
||||||
|
padding: 0.4rem 0.8rem;
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead {
|
||||||
|
background-color: var(--color-table);
|
||||||
|
border-collapse: collapse;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--color-bg);
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead tr:first-child th:first-child {
|
||||||
|
border-top-left-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead tr:first-child th:last-child {
|
||||||
|
border-top-right-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
table thead th:first-child,
|
||||||
|
table tr td:first-child {
|
||||||
|
text-align: var(--justify-normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
table tr:nth-child(even) {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quotes */
|
||||||
|
blockquote {
|
||||||
|
display: block;
|
||||||
|
font-size: x-large;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
margin: 1rem auto;
|
||||||
|
max-width: var(--width-card-medium);
|
||||||
|
padding: 1.5rem 1rem;
|
||||||
|
text-align: var(--justify-important);
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote footer {
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
display: block;
|
||||||
|
font-size: small;
|
||||||
|
line-height: var(--line-height);
|
||||||
|
padding: 1.5rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
* {
|
||||||
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: var(--color-scrollbar) transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar {
|
||||||
|
width: 5px;
|
||||||
|
height: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar-track {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
*::-webkit-scrollbar-thumb {
|
||||||
|
background-color: var(--color-scrollbar);
|
||||||
|
border-radius: 10px;
|
||||||
|
}
|
||||||
383
server/src/newsreader.rs
Normal file
383
server/src/newsreader.rs
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use cacher::FilesystemCacher;
|
||||||
|
use futures::{stream::FuturesUnordered, StreamExt};
|
||||||
|
use letterbox_shared::compute_color;
|
||||||
|
use maplit::hashmap;
|
||||||
|
use scraper::Selector;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use tracing::{error, info, instrument};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
clean_title, compute_offset_limit,
|
||||||
|
error::ServerError,
|
||||||
|
graphql::{Corpus, NewsPost, Tag, Thread, ThreadSummary},
|
||||||
|
thread_summary_from_row, AddOutlink, FrameImages, Query, SanitizeHtml, SlurpContents,
|
||||||
|
StripHtml, ThreadSummaryRecord, Transformer, NEWSREADER_TAG_PREFIX, NEWSREADER_THREAD_PREFIX,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn is_newsreader_query(query: &Query) -> bool {
|
||||||
|
query.is_newsreader || query.corpus == Some(Corpus::Newsreader)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_newsreader_thread(query: &str) -> bool {
|
||||||
|
query.starts_with(NEWSREADER_THREAD_PREFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_thread_id(query: &str) -> &str {
|
||||||
|
if query.starts_with(NEWSREADER_THREAD_PREFIX) {
|
||||||
|
&query[NEWSREADER_THREAD_PREFIX.len()..]
|
||||||
|
} else {
|
||||||
|
query
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_site(tag: &str) -> &str {
|
||||||
|
&tag[NEWSREADER_TAG_PREFIX.len()..]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make_news_tag(tag: &str) -> String {
|
||||||
|
format!("tag:{NEWSREADER_TAG_PREFIX}{tag}")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn site_from_tags(tags: &[String]) -> Option<String> {
|
||||||
|
for t in tags {
|
||||||
|
if t.starts_with(NEWSREADER_TAG_PREFIX) {
|
||||||
|
return Some(extract_site(t).to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "newsreader::count", skip_all, fields(query=%query))]
|
||||||
|
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
|
||||||
|
if !is_newsreader_query(query) {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
let site = site_from_tags(&query.tags);
|
||||||
|
if !query.tags.is_empty() && site.is_none() {
|
||||||
|
// Newsreader can only handle all sites read/unread queries, anything with a non-site tag
|
||||||
|
// isn't supported
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let search_term = query.remainder.join(" ");
|
||||||
|
let search_term = search_term.trim();
|
||||||
|
let search_term = if search_term.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(search_term)
|
||||||
|
};
|
||||||
|
// TODO: add support for looking for search_term in title and site
|
||||||
|
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only, search_term)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "newsreader::search", skip_all, fields(query=%query))]
|
||||||
|
pub async fn search(
|
||||||
|
pool: &PgPool,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
||||||
|
if !is_newsreader_query(query) {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
let site = site_from_tags(&query.tags);
|
||||||
|
if !query.tags.is_empty() && site.is_none() {
|
||||||
|
// Newsreader can only handle all sites read/unread queries, anything with a non-site tag
|
||||||
|
// isn't supported
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|
||||||
|
if before.is_none() {
|
||||||
|
// When searching forward, the +1 is to see if there are more pages of data available.
|
||||||
|
// Searching backwards implies there's more pages forward, because the value represented by
|
||||||
|
// `before` is on the next page.
|
||||||
|
limit = limit + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"search offset {offset} limit {limit} site {site:?} unread_only {}",
|
||||||
|
query.unread_only
|
||||||
|
);
|
||||||
|
let search_term = query.remainder.join(" ");
|
||||||
|
let search_term = search_term.trim();
|
||||||
|
let search_term = if search_term.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(search_term)
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: add support for looking for search_term in title and site
|
||||||
|
let rows = sqlx::query_file!(
|
||||||
|
"sql/threads.sql",
|
||||||
|
site,
|
||||||
|
query.unread_only,
|
||||||
|
offset as i64,
|
||||||
|
limit as i64,
|
||||||
|
search_term
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
let mut res = Vec::new();
|
||||||
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
res.push((
|
||||||
|
i as i32 + offset,
|
||||||
|
thread_summary_from_row(ThreadSummaryRecord {
|
||||||
|
site: r.site,
|
||||||
|
date: r.date,
|
||||||
|
is_read: r.is_read,
|
||||||
|
title: r.title,
|
||||||
|
uid: r.uid,
|
||||||
|
name: r.name,
|
||||||
|
corpus: Corpus::Newsreader,
|
||||||
|
})
|
||||||
|
.await,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
#[instrument(name = "newsreader::tags", skip_all, fields(needs_unread=%_needs_unread))]
|
||||||
|
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||||
|
// TODO: optimize query by using needs_unread
|
||||||
|
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||||
|
let tags = tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|tag| {
|
||||||
|
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||||
|
let name = format!(
|
||||||
|
"{NEWSREADER_TAG_PREFIX}{}",
|
||||||
|
tag.site.expect("tag must have site")
|
||||||
|
);
|
||||||
|
let hex = compute_color(&name);
|
||||||
|
Tag {
|
||||||
|
name,
|
||||||
|
fg_color: "white".to_string(),
|
||||||
|
bg_color: hex,
|
||||||
|
unread,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "newsreader::thread", skip_all, fields(thread_id=%thread_id))]
|
||||||
|
pub async fn thread(
|
||||||
|
cacher: &FilesystemCacher,
|
||||||
|
pool: &PgPool,
|
||||||
|
thread_id: String,
|
||||||
|
) -> Result<Thread, ServerError> {
|
||||||
|
let id = thread_id
|
||||||
|
.strip_prefix(NEWSREADER_THREAD_PREFIX)
|
||||||
|
.expect("news thread doesn't start with '{NEWSREADER_THREAD_PREFIX}'")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let r = sqlx::query_file!("sql/thread.sql", id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let slug = r.site.unwrap_or("no-slug".to_string());
|
||||||
|
let site = r.name.unwrap_or("NO SITE".to_string());
|
||||||
|
// TODO: remove the various places that have this as an Option
|
||||||
|
let link = Some(Url::parse(&r.link)?);
|
||||||
|
let mut body = r.clean_summary.unwrap_or("NO SUMMARY".to_string());
|
||||||
|
let body_transformers: Vec<Box<dyn Transformer>> = vec![
|
||||||
|
Box::new(SlurpContents {
|
||||||
|
cacher,
|
||||||
|
inline_css: true,
|
||||||
|
site_selectors: slurp_contents_selectors(),
|
||||||
|
}),
|
||||||
|
Box::new(FrameImages),
|
||||||
|
Box::new(AddOutlink),
|
||||||
|
// TODO: causes doubling of images in cloudflare blogs
|
||||||
|
//Box::new(EscapeHtml),
|
||||||
|
Box::new(SanitizeHtml {
|
||||||
|
cid_prefix: "",
|
||||||
|
base_url: &link,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
for t in body_transformers.iter() {
|
||||||
|
if t.should_run(&link, &body) {
|
||||||
|
body = t.transform(&link, &body).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let title = clean_title(&r.title.unwrap_or("NO TITLE".to_string())).await?;
|
||||||
|
let is_read = r.is_read.unwrap_or(false);
|
||||||
|
let timestamp = r
|
||||||
|
.date
|
||||||
|
.expect("post missing date")
|
||||||
|
.assume_utc()
|
||||||
|
.unix_timestamp();
|
||||||
|
Ok(Thread::News(NewsPost {
|
||||||
|
thread_id,
|
||||||
|
is_read,
|
||||||
|
slug,
|
||||||
|
site,
|
||||||
|
title,
|
||||||
|
body,
|
||||||
|
url: link
|
||||||
|
.as_ref()
|
||||||
|
.map(|url| url.to_string())
|
||||||
|
.unwrap_or("NO URL".to_string()),
|
||||||
|
timestamp,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
#[instrument(name = "newsreader::set_read_status", skip_all, fields(query=%query,unread=%unread))]
|
||||||
|
pub async fn set_read_status<'ctx>(
|
||||||
|
pool: &PgPool,
|
||||||
|
query: &Query,
|
||||||
|
unread: bool,
|
||||||
|
) -> Result<bool, ServerError> {
|
||||||
|
// TODO: make single query when query.uids.len() > 1
|
||||||
|
let uids: Vec<_> = query
|
||||||
|
.uids
|
||||||
|
.iter()
|
||||||
|
.filter(|uid| is_newsreader_thread(uid))
|
||||||
|
.map(
|
||||||
|
|uid| extract_thread_id(uid), // TODO strip prefix
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
for uid in uids {
|
||||||
|
sqlx::query_file!("sql/set_unread.sql", !unread, uid)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
#[instrument(name = "newsreader::refresh", skip_all)]
|
||||||
|
pub async fn refresh<'ctx>(pool: &PgPool, cacher: &FilesystemCacher) -> Result<bool, ServerError> {
|
||||||
|
async fn update_search_summary(
|
||||||
|
pool: &PgPool,
|
||||||
|
cacher: &FilesystemCacher,
|
||||||
|
link: String,
|
||||||
|
body: String,
|
||||||
|
id: i32,
|
||||||
|
) -> Result<(), ServerError> {
|
||||||
|
let slurp_contents = SlurpContents {
|
||||||
|
cacher,
|
||||||
|
inline_css: true,
|
||||||
|
site_selectors: slurp_contents_selectors(),
|
||||||
|
};
|
||||||
|
let strip_html = StripHtml;
|
||||||
|
|
||||||
|
info!("adding {link} to search index");
|
||||||
|
let mut body = body;
|
||||||
|
if let Ok(link) = Url::parse(&link) {
|
||||||
|
let link = Some(link);
|
||||||
|
if slurp_contents.should_run(&link, &body) {
|
||||||
|
body = slurp_contents.transform(&link, &body).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error!("failed to parse link: {}", link);
|
||||||
|
}
|
||||||
|
body = strip_html.transform(&None, &body).await?;
|
||||||
|
sqlx::query!(
|
||||||
|
"UPDATE post SET search_summary = $1 WHERE id = $2",
|
||||||
|
body,
|
||||||
|
id
|
||||||
|
)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut unordered: FuturesUnordered<_> = sqlx::query_file!("sql/need-search-summary.sql",)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|r| {
|
||||||
|
let Some(body) = r.clean_summary else {
|
||||||
|
error!("clean_summary missing for {}", r.link);
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
let id = r.id;
|
||||||
|
Some(update_search_summary(pool, cacher, r.link, body, id))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
while let Some(res) = unordered.next().await {
|
||||||
|
//let res = res;
|
||||||
|
match res {
|
||||||
|
Ok(()) => {}
|
||||||
|
Err(err) => {
|
||||||
|
info!("failed refresh {err:?}");
|
||||||
|
// TODO:
|
||||||
|
//fd.error = Some(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn slurp_contents_selectors() -> HashMap<String, Vec<Selector>> {
|
||||||
|
hashmap![
|
||||||
|
"atmeta.com".to_string() => vec![
|
||||||
|
Selector::parse("div.entry-content").unwrap(),
|
||||||
|
],
|
||||||
|
"blog.prusa3d.com".to_string() => vec![
|
||||||
|
Selector::parse("article.content .post-block").unwrap(),
|
||||||
|
],
|
||||||
|
"blog.cloudflare.com".to_string() => vec![
|
||||||
|
Selector::parse(".author-lists .author-name-tooltip").unwrap(),
|
||||||
|
Selector::parse(".post-full-content").unwrap()
|
||||||
|
],
|
||||||
|
"blog.zsa.io".to_string() => vec![
|
||||||
|
Selector::parse("section.blog-article").unwrap(),
|
||||||
|
],
|
||||||
|
"engineering.fb.com".to_string() => vec![
|
||||||
|
Selector::parse("article").unwrap(),
|
||||||
|
],
|
||||||
|
"grafana.com".to_string() => vec![
|
||||||
|
Selector::parse(".blog-content").unwrap(),
|
||||||
|
],
|
||||||
|
"hackaday.com".to_string() => vec![
|
||||||
|
Selector::parse("div.entry-featured-image").unwrap(),
|
||||||
|
Selector::parse("div.entry-content").unwrap()
|
||||||
|
],
|
||||||
|
"ingowald.blog".to_string() => vec![
|
||||||
|
Selector::parse("article").unwrap(),
|
||||||
|
],
|
||||||
|
"jvns.ca".to_string() => vec![
|
||||||
|
Selector::parse("article").unwrap(),
|
||||||
|
],
|
||||||
|
"mitchellh.com".to_string() => vec![Selector::parse("div.w-full").unwrap()],
|
||||||
|
"natwelch.com".to_string() => vec![
|
||||||
|
Selector::parse("article div.prose").unwrap(),
|
||||||
|
],
|
||||||
|
"rustacean-station.org".to_string() => vec![
|
||||||
|
Selector::parse("article").unwrap(),
|
||||||
|
],
|
||||||
|
"slashdot.org".to_string() => vec![
|
||||||
|
Selector::parse("span.story-byline").unwrap(),
|
||||||
|
Selector::parse("div.p").unwrap(),
|
||||||
|
],
|
||||||
|
"theonion.com".to_string() => vec![
|
||||||
|
// Single image joke w/ title
|
||||||
|
Selector::parse("article > section > div > figure").unwrap(),
|
||||||
|
// Single cartoon
|
||||||
|
Selector::parse("article > div > div > figure").unwrap(),
|
||||||
|
// Image at top of article
|
||||||
|
Selector::parse("article > header > div > div > figure").unwrap(),
|
||||||
|
// Article body
|
||||||
|
Selector::parse("article .entry-content > *").unwrap(),
|
||||||
|
],
|
||||||
|
"trofi.github.io".to_string() => vec![
|
||||||
|
Selector::parse("#content").unwrap(),
|
||||||
|
],
|
||||||
|
"www.redox-os.org".to_string() => vec![
|
||||||
|
Selector::parse("div.content").unwrap(),
|
||||||
|
],
|
||||||
|
"www.smbc-comics.com".to_string() => vec![
|
||||||
|
Selector::parse("img#cc-comic").unwrap(),
|
||||||
|
Selector::parse("div#aftercomic img").unwrap(),
|
||||||
|
],
|
||||||
|
]
|
||||||
|
}
|
||||||
1107
server/src/nm.rs
Normal file
1107
server/src/nm.rs
Normal file
File diff suppressed because it is too large
Load Diff
353
server/src/tantivy.rs
Normal file
353
server/src/tantivy.rs
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use log::{debug, error, info, warn};
|
||||||
|
use sqlx::{postgres::PgPool, types::time::PrimitiveDateTime};
|
||||||
|
use tantivy::{
|
||||||
|
collector::{DocSetCollector, TopDocs},
|
||||||
|
doc, query,
|
||||||
|
query::{AllQuery, BooleanQuery, Occur, QueryParser, TermQuery},
|
||||||
|
schema::{Facet, IndexRecordOption, Value},
|
||||||
|
DocAddress, Index, IndexReader, Searcher, TantivyDocument, TantivyError, Term,
|
||||||
|
};
|
||||||
|
use tracing::{info_span, instrument, Instrument};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
compute_offset_limit,
|
||||||
|
error::ServerError,
|
||||||
|
graphql::{Corpus, ThreadSummary},
|
||||||
|
newsreader::{extract_thread_id, is_newsreader_thread},
|
||||||
|
thread_summary_from_row, Query, ThreadSummaryRecord,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn is_tantivy_query(query: &Query) -> bool {
|
||||||
|
query.is_tantivy || query.corpus == Some(Corpus::Tantivy)
|
||||||
|
}
|
||||||
|
pub struct TantivyConnection {
|
||||||
|
db_path: String,
|
||||||
|
index: Index,
|
||||||
|
reader: IndexReader,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index(db_path: &str) -> Result<Index, TantivyError> {
|
||||||
|
Ok(match Index::open_in_dir(db_path) {
|
||||||
|
Ok(idx) => idx,
|
||||||
|
Err(err) => {
|
||||||
|
warn!("Failed to open {db_path}: {err}");
|
||||||
|
create_news_db(db_path)?;
|
||||||
|
Index::open_in_dir(db_path)?
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TantivyConnection {
|
||||||
|
pub fn new(tantivy_db_path: &str) -> Result<TantivyConnection, TantivyError> {
|
||||||
|
let index = get_index(tantivy_db_path)?;
|
||||||
|
let reader = index.reader()?;
|
||||||
|
|
||||||
|
Ok(TantivyConnection {
|
||||||
|
db_path: tantivy_db_path.to_string(),
|
||||||
|
index,
|
||||||
|
reader,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
#[instrument(name = "tantivy::refresh", skip_all)]
|
||||||
|
pub async fn refresh(&self, pool: &PgPool) -> Result<(), ServerError> {
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let p_uids: Vec<_> = sqlx::query_file!("sql/all-uids.sql")
|
||||||
|
.fetch_all(pool)
|
||||||
|
.instrument(info_span!("postgres query"))
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.uid)
|
||||||
|
.collect();
|
||||||
|
info!(
|
||||||
|
"refresh from postgres got {} uids in {}",
|
||||||
|
p_uids.len(),
|
||||||
|
start_time.elapsed().as_secs_f32()
|
||||||
|
);
|
||||||
|
|
||||||
|
let t_span = info_span!("tantivy query");
|
||||||
|
let _enter = t_span.enter();
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let (searcher, _query) = self.searcher_and_query(&Query::default())?;
|
||||||
|
let docs = searcher.search(&AllQuery, &DocSetCollector)?;
|
||||||
|
let uid = self.index.schema().get_field("uid")?;
|
||||||
|
let t_uids: Vec<_> = docs
|
||||||
|
.into_iter()
|
||||||
|
.map(|doc_address| {
|
||||||
|
searcher
|
||||||
|
.doc(doc_address)
|
||||||
|
.map(|doc: TantivyDocument| {
|
||||||
|
debug!("doc: {doc:#?}");
|
||||||
|
doc.get_first(uid)
|
||||||
|
.expect("uid")
|
||||||
|
.as_str()
|
||||||
|
.expect("as_str")
|
||||||
|
.to_string()
|
||||||
|
})
|
||||||
|
.expect("searcher.doc")
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
drop(_enter);
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"refresh tantivy got {} uids in {}",
|
||||||
|
t_uids.len(),
|
||||||
|
start_time.elapsed().as_secs_f32()
|
||||||
|
);
|
||||||
|
let t_set: HashSet<_> = t_uids.into_iter().collect();
|
||||||
|
let need: Vec<_> = p_uids
|
||||||
|
.into_iter()
|
||||||
|
.filter(|uid| !t_set.contains(uid.as_str()))
|
||||||
|
.collect();
|
||||||
|
if !need.is_empty() {
|
||||||
|
info!(
|
||||||
|
"need to reindex {} uids: {:?}...",
|
||||||
|
need.len(),
|
||||||
|
&need[..need.len().min(10)]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let batch_size = 1000;
|
||||||
|
let uids: Vec<_> = need[..need.len().min(batch_size)]
|
||||||
|
.into_iter()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
self.reindex_uids(pool, &uids).await
|
||||||
|
}
|
||||||
|
#[instrument(skip(self, pool))]
|
||||||
|
async fn reindex_uids(&self, pool: &PgPool, uids: &[String]) -> Result<(), ServerError> {
|
||||||
|
if uids.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
// TODO: add SlurpContents and convert HTML to text
|
||||||
|
|
||||||
|
let pool: &PgPool = pool;
|
||||||
|
|
||||||
|
let mut index_writer = self.index.writer(50_000_000)?;
|
||||||
|
let schema = self.index.schema();
|
||||||
|
let site = schema.get_field("site")?;
|
||||||
|
let title = schema.get_field("title")?;
|
||||||
|
let summary = schema.get_field("summary")?;
|
||||||
|
let link = schema.get_field("link")?;
|
||||||
|
let date = schema.get_field("date")?;
|
||||||
|
let is_read = schema.get_field("is_read")?;
|
||||||
|
let uid = schema.get_field("uid")?;
|
||||||
|
let id = schema.get_field("id")?;
|
||||||
|
let tag = schema.get_field("tag")?;
|
||||||
|
|
||||||
|
info!("reindexing {} posts", uids.len());
|
||||||
|
let rows = sqlx::query_file_as!(PostgresDoc, "sql/posts-from-uids.sql", uids)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if uids.len() != rows.len() {
|
||||||
|
error!(
|
||||||
|
"Had {} uids and only got {} rows: uids {uids:?}",
|
||||||
|
uids.len(),
|
||||||
|
rows.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for r in rows {
|
||||||
|
let id_term = Term::from_field_text(uid, &r.uid);
|
||||||
|
index_writer.delete_term(id_term);
|
||||||
|
let slug = r.site;
|
||||||
|
let tag_facet = Facet::from(&format!("/News/{slug}"));
|
||||||
|
index_writer.add_document(doc!(
|
||||||
|
site => slug.clone(),
|
||||||
|
title => r.title,
|
||||||
|
// TODO: clean and extract text from HTML
|
||||||
|
summary => r.summary,
|
||||||
|
link => r.link,
|
||||||
|
date => tantivy::DateTime::from_primitive(r.date),
|
||||||
|
is_read => r.is_read,
|
||||||
|
uid => r.uid,
|
||||||
|
id => r.id as u64,
|
||||||
|
tag => tag_facet,
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
info_span!("IndexWriter.commit").in_scope(|| index_writer.commit())?;
|
||||||
|
info_span!("IndexReader.reload").in_scope(|| self.reader.reload())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
#[instrument(name = "tantivy::reindex_thread", skip_all, fields(query=%query))]
|
||||||
|
pub async fn reindex_thread(&self, pool: &PgPool, query: &Query) -> Result<(), ServerError> {
|
||||||
|
let uids: Vec<_> = query
|
||||||
|
.uids
|
||||||
|
.iter()
|
||||||
|
.filter(|uid| is_newsreader_thread(uid))
|
||||||
|
.map(|uid| extract_thread_id(uid).to_string())
|
||||||
|
.collect();
|
||||||
|
Ok(self.reindex_uids(pool, &uids).await?)
|
||||||
|
}
|
||||||
|
#[instrument(name = "tantivy::reindex_all", skip_all)]
|
||||||
|
pub async fn reindex_all(&self, pool: &PgPool) -> Result<(), ServerError> {
|
||||||
|
let rows = sqlx::query_file!("sql/all-posts.sql")
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let uids: Vec<String> = rows.into_iter().map(|r| r.uid).collect();
|
||||||
|
self.reindex_uids(pool, &uids).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn searcher_and_query(
|
||||||
|
&self,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<(Searcher, Box<dyn query::Query>), ServerError> {
|
||||||
|
// TODO: only create one reader
|
||||||
|
// From https://tantivy-search.github.io/examples/basic_search.html
|
||||||
|
// "For a search server you will typically create one reader for the entire lifetime of
|
||||||
|
// your program, and acquire a new searcher for every single request."
|
||||||
|
//
|
||||||
|
// I think there's some challenge in making the reader work if we reindex, so reader my
|
||||||
|
// need to be stored indirectly, and be recreated on reindex
|
||||||
|
// I think creating a reader takes 200-300 ms.
|
||||||
|
let schema = self.index.schema();
|
||||||
|
let searcher = self.reader.searcher();
|
||||||
|
let title = schema.get_field("title")?;
|
||||||
|
let summary = schema.get_field("summary")?;
|
||||||
|
let query_parser = QueryParser::for_index(&self.index, vec![title, summary]);
|
||||||
|
// Tantivy uses '*' to match all docs, not empty string
|
||||||
|
let term = &query.remainder.join(" ");
|
||||||
|
let term = if term.is_empty() { "*" } else { term };
|
||||||
|
info!("query_parser('{term}')");
|
||||||
|
|
||||||
|
let tantivy_query = query_parser.parse_query(&term)?;
|
||||||
|
|
||||||
|
let tag = schema.get_field("tag")?;
|
||||||
|
let is_read = schema.get_field("is_read")?;
|
||||||
|
let mut terms = vec![(Occur::Must, tantivy_query)];
|
||||||
|
for t in &query.tags {
|
||||||
|
let facet = Facet::from(&format!("/{t}"));
|
||||||
|
let facet_term = Term::from_facet(tag, &facet);
|
||||||
|
let facet_term_query = Box::new(TermQuery::new(facet_term, IndexRecordOption::Basic));
|
||||||
|
terms.push((Occur::Must, facet_term_query));
|
||||||
|
}
|
||||||
|
if query.unread_only {
|
||||||
|
info!("searching for unread only");
|
||||||
|
let term = Term::from_field_bool(is_read, false);
|
||||||
|
terms.push((
|
||||||
|
Occur::Must,
|
||||||
|
Box::new(TermQuery::new(term, IndexRecordOption::Basic)),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let search_query = BooleanQuery::new(terms);
|
||||||
|
Ok((searcher, Box::new(search_query)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(name="tantivy::count", skip_all, fields(query=%query))]
|
||||||
|
pub async fn count(&self, query: &Query) -> Result<usize, ServerError> {
|
||||||
|
if !is_tantivy_query(query) {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
info!("tantivy::count {query:?}");
|
||||||
|
use tantivy::collector::Count;
|
||||||
|
let (searcher, query) = self.searcher_and_query(&query)?;
|
||||||
|
Ok(searcher.search(&query, &Count)?)
|
||||||
|
}
|
||||||
|
#[instrument(name="tantivy::search", skip_all, fields(query=%query))]
|
||||||
|
pub async fn search(
|
||||||
|
&self,
|
||||||
|
pool: &PgPool,
|
||||||
|
after: Option<i32>,
|
||||||
|
before: Option<i32>,
|
||||||
|
first: Option<i32>,
|
||||||
|
last: Option<i32>,
|
||||||
|
query: &Query,
|
||||||
|
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||||
|
if !is_tantivy_query(query) {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
let (offset, mut limit) = compute_offset_limit(after, before, first, last);
|
||||||
|
if before.is_none() {
|
||||||
|
// When searching forward, the +1 is to see if there are more pages of data available.
|
||||||
|
// Searching backwards implies there's more pages forward, because the value represented by
|
||||||
|
// `before` is on the next page.
|
||||||
|
limit = limit + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (searcher, search_query) = self.searcher_and_query(&query)?;
|
||||||
|
info!("Tantivy::search(query '{query:?}', off {offset}, lim {limit}, search_query {search_query:?})");
|
||||||
|
let top_docs = searcher.search(
|
||||||
|
&search_query,
|
||||||
|
&TopDocs::with_limit(limit as usize)
|
||||||
|
.and_offset(offset as usize)
|
||||||
|
.order_by_u64_field("date", tantivy::index::Order::Desc),
|
||||||
|
)?;
|
||||||
|
info!("search found {} docs", top_docs.len());
|
||||||
|
let uid = self.index.schema().get_field("uid")?;
|
||||||
|
let uids = top_docs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, doc_address): (u64, DocAddress)| {
|
||||||
|
searcher.doc(doc_address).map(|doc: TantivyDocument| {
|
||||||
|
debug!("doc: {doc:#?}");
|
||||||
|
doc.get_first(uid)
|
||||||
|
.expect("doc missing uid")
|
||||||
|
.as_str()
|
||||||
|
.expect("doc str missing")
|
||||||
|
.to_string()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<String>, TantivyError>>()?;
|
||||||
|
|
||||||
|
//let uids = format!("'{}'", uids.join("','"));
|
||||||
|
info!("uids {uids:?}");
|
||||||
|
let rows = sqlx::query_file!("sql/threads-from-uid.sql", &uids as &[String])
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
let mut res = Vec::new();
|
||||||
|
info!("found {} hits joining w/ tantivy", rows.len());
|
||||||
|
for (i, r) in rows.into_iter().enumerate() {
|
||||||
|
res.push((
|
||||||
|
i as i32 + offset,
|
||||||
|
thread_summary_from_row(ThreadSummaryRecord {
|
||||||
|
site: r.site,
|
||||||
|
date: r.date,
|
||||||
|
is_read: r.is_read,
|
||||||
|
title: r.title,
|
||||||
|
uid: r.uid,
|
||||||
|
name: r.name,
|
||||||
|
corpus: Corpus::Tantivy,
|
||||||
|
})
|
||||||
|
.await,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
pub fn drop_and_load_index(&self) -> Result<(), TantivyError> {
|
||||||
|
create_news_db(&self.db_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_news_db(tantivy_db_path: &str) -> Result<(), TantivyError> {
|
||||||
|
info!("create_news_db");
|
||||||
|
// Don't care if directory didn't exist
|
||||||
|
let _ = std::fs::remove_dir_all(tantivy_db_path);
|
||||||
|
std::fs::create_dir_all(tantivy_db_path)?;
|
||||||
|
use tantivy::schema::*;
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
schema_builder.add_text_field("site", STRING | STORED);
|
||||||
|
schema_builder.add_text_field("title", TEXT | STORED);
|
||||||
|
schema_builder.add_text_field("summary", TEXT);
|
||||||
|
schema_builder.add_text_field("link", STRING | STORED);
|
||||||
|
schema_builder.add_date_field("date", FAST | INDEXED | STORED);
|
||||||
|
schema_builder.add_bool_field("is_read", FAST | INDEXED | STORED);
|
||||||
|
schema_builder.add_text_field("uid", STRING | STORED);
|
||||||
|
schema_builder.add_u64_field("id", FAST);
|
||||||
|
schema_builder.add_facet_field("tag", FacetOptions::default());
|
||||||
|
|
||||||
|
let schema = schema_builder.build();
|
||||||
|
Index::create_in_dir(tantivy_db_path, schema)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PostgresDoc {
|
||||||
|
site: String,
|
||||||
|
title: String,
|
||||||
|
summary: String,
|
||||||
|
link: String,
|
||||||
|
date: PrimitiveDateTime,
|
||||||
|
is_read: bool,
|
||||||
|
uid: String,
|
||||||
|
id: i32,
|
||||||
|
}
|
||||||
35
server/src/ws.rs
Normal file
35
server/src/ws.rs
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
use std::{collections::HashMap, net::SocketAddr};
|
||||||
|
|
||||||
|
use axum::extract::ws::{Message, WebSocket};
|
||||||
|
use letterbox_shared::WebsocketMessage;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ConnectionTracker {
|
||||||
|
peers: HashMap<SocketAddr, WebSocket>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConnectionTracker {
|
||||||
|
pub async fn add_peer(&mut self, socket: WebSocket, who: SocketAddr) {
|
||||||
|
warn!("adding {who:?} to connection tracker");
|
||||||
|
self.peers.insert(who, socket);
|
||||||
|
self.send_message_all(WebsocketMessage::RefreshMessages)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
pub async fn send_message_all(&mut self, msg: WebsocketMessage) {
|
||||||
|
info!("send_message_all {msg}");
|
||||||
|
let m = serde_json::to_string(&msg).expect("failed to json encode WebsocketMessage");
|
||||||
|
let mut bad_peers = Vec::new();
|
||||||
|
for (who, socket) in &mut self.peers.iter_mut() {
|
||||||
|
if let Err(e) = socket.send(Message::Text(m.clone().into())).await {
|
||||||
|
warn!("{:?} is bad, scheduling for removal: {e}", who);
|
||||||
|
bad_peers.push(who.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for b in bad_peers {
|
||||||
|
info!("removing bad peer {b:?}");
|
||||||
|
self.peers.remove(&b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
59
server/static/graphql-playground.html
Normal file
59
server/static/graphql-playground.html
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset=utf-8 />
|
||||||
|
<meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
|
||||||
|
<title>GraphQL Playground</title>
|
||||||
|
<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/css/index.css" />
|
||||||
|
<link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react/build/favicon.png" />
|
||||||
|
<script src="//cdn.jsdelivr.net/npm/graphql-playground-react/build/static/js/middleware.js"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="root">
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
background-color: rgb(23, 42, 58);
|
||||||
|
font-family: Open Sans, sans-serif;
|
||||||
|
height: 90vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
#root {
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading {
|
||||||
|
font-size: 32px;
|
||||||
|
font-weight: 200;
|
||||||
|
color: rgba(255, 255, 255, .6);
|
||||||
|
margin-left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
width: 78px;
|
||||||
|
height: 78px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.title {
|
||||||
|
font-weight: 400;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<img src='//cdn.jsdelivr.net/npm/graphql-playground-react/build/logo.png' alt=''>
|
||||||
|
<div class="loading"> Loading
|
||||||
|
<span class="title">GraphQL Playground</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<script>window.addEventListener('load', function (event) {
|
||||||
|
GraphQLPlayground.init(document.getElementById('root'), {
|
||||||
|
// options as 'endpoint' belong here
|
||||||
|
endpoint: "/api/graphql",
|
||||||
|
})
|
||||||
|
})</script>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
42
server/static/vars.css
Normal file
42
server/static/vars.css
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
:root {
|
||||||
|
--active-brightness: 0.85;
|
||||||
|
--border-radius: 5px;
|
||||||
|
--box-shadow: 2px 2px 10px;
|
||||||
|
--color-accent: #118bee15;
|
||||||
|
--color-bg: #fff;
|
||||||
|
--color-bg-secondary: #e9e9e9;
|
||||||
|
--color-link: #118bee;
|
||||||
|
--color-secondary: #920de9;
|
||||||
|
--color-secondary-accent: #920de90b;
|
||||||
|
--color-shadow: #f4f4f4;
|
||||||
|
--color-table: #118bee;
|
||||||
|
--color-text: #000;
|
||||||
|
--color-text-secondary: #999;
|
||||||
|
--color-scrollbar: #cacae8;
|
||||||
|
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
|
||||||
|
--hover-brightness: 1.2;
|
||||||
|
--justify-important: center;
|
||||||
|
--justify-normal: left;
|
||||||
|
--line-height: 1.5;
|
||||||
|
/*
|
||||||
|
--width-card: 285px;
|
||||||
|
--width-card-medium: 460px;
|
||||||
|
--width-card-wide: 800px;
|
||||||
|
*/
|
||||||
|
--width-content: 1080px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
:root[color-mode="user"] {
|
||||||
|
--color-accent: #0097fc4f;
|
||||||
|
--color-bg: #333;
|
||||||
|
--color-bg-secondary: #555;
|
||||||
|
--color-link: #0097fc;
|
||||||
|
--color-secondary: #e20de9;
|
||||||
|
--color-secondary-accent: #e20de94f;
|
||||||
|
--color-shadow: #bbbbbb20;
|
||||||
|
--color-table: #0097fc;
|
||||||
|
--color-text: #f7f7f7;
|
||||||
|
--color-text-secondary: #aaa;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,20 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "shared"
|
name = "letterbox-shared"
|
||||||
version = "0.1.0"
|
description = "Shared module for letterbox"
|
||||||
edition = "2021"
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
version.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
notmuch = { path = "../notmuch" }
|
build-info = "0.0.40"
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
letterbox-notmuch = { path = "../notmuch", version = "0.17.22", registry = "xinu" }
|
||||||
|
regex = "1.11.1"
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
sqlx = "0.8.5"
|
||||||
|
strum_macros = "0.27.1"
|
||||||
|
tracing = "0.1.41"
|
||||||
|
|||||||
@@ -1,5 +1,14 @@
|
|||||||
use notmuch::SearchSummary;
|
use std::{
|
||||||
|
convert::Infallible,
|
||||||
|
hash::{DefaultHasher, Hash, Hasher},
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use build_info::{BuildInfo, VersionControl};
|
||||||
|
use letterbox_notmuch::SearchSummary;
|
||||||
|
use regex::{RegexBuilder, RegexSetBuilder};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SearchResult {
|
pub struct SearchResult {
|
||||||
@@ -10,26 +19,250 @@ pub struct SearchResult {
|
|||||||
pub total: usize,
|
pub total: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, strum_macros::Display)]
|
||||||
pub struct ShowResult {
|
pub enum WebsocketMessage {
|
||||||
messages: Vec<Message>,
|
RefreshMessages,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type AttachementId = String;
|
pub mod urls {
|
||||||
|
pub const MOUNT_POINT: &'static str = "/api";
|
||||||
/// # Number of seconds since the Epoch
|
pub fn view_original(host: Option<&str>, id: &str) -> String {
|
||||||
pub type UnixTime = isize;
|
if let Some(host) = host {
|
||||||
|
format!("//{host}/api/original/{id}")
|
||||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
} else {
|
||||||
pub struct Message {
|
format!("/api/original/{id}")
|
||||||
pub from: String,
|
}
|
||||||
pub to: Option<String>,
|
}
|
||||||
pub cc: Option<String>,
|
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||||
pub timestamp: UnixTime, // date header as unix time
|
if let Some(host) = host {
|
||||||
pub date_relative: String, // user-friendly timestamp
|
format!("//{host}/api/cid/{cid}/")
|
||||||
pub tags: Vec<String>,
|
} else {
|
||||||
|
format!("/api/cid/{cid}/")
|
||||||
// HTML formatted body
|
}
|
||||||
pub body: String,
|
}
|
||||||
pub attachment: Vec<AttachementId>,
|
pub fn download_attachment(host: Option<&str>, id: &str, idx: &str, filename: &str) -> String {
|
||||||
|
if let Some(host) = host {
|
||||||
|
format!(
|
||||||
|
"//{host}/api/download/attachment/{}/{}/{}",
|
||||||
|
id, idx, filename
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!("/api/download/attachment/{}/{}/{}", id, idx, filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn build_version(bi: fn() -> &'static BuildInfo) -> String {
|
||||||
|
fn commit(git: &Option<VersionControl>) -> String {
|
||||||
|
let Some(VersionControl::Git(git)) = git else {
|
||||||
|
return String::new();
|
||||||
|
};
|
||||||
|
let mut s = vec!["-".to_string(), git.commit_short_id.clone()];
|
||||||
|
if let Some(branch) = &git.branch {
|
||||||
|
s.push(format!(" ({branch})"));
|
||||||
|
}
|
||||||
|
s.join("")
|
||||||
|
}
|
||||||
|
let bi = bi();
|
||||||
|
|
||||||
|
format!("v{}{}", bi.crate_info.version, commit(&bi.version_control)).to_string()
|
||||||
|
}
|
||||||
|
pub fn compute_color(data: &str) -> String {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
data.hash(&mut hasher);
|
||||||
|
format!("#{:06x}", hasher.finish() % (1 << 24))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize,
|
||||||
|
)]
|
||||||
|
pub enum MatchType {
|
||||||
|
From,
|
||||||
|
Sender,
|
||||||
|
To,
|
||||||
|
Cc,
|
||||||
|
Subject,
|
||||||
|
ListId,
|
||||||
|
DeliveredTo,
|
||||||
|
XForwardedTo,
|
||||||
|
ReplyTo,
|
||||||
|
XOriginalTo,
|
||||||
|
XSpam,
|
||||||
|
Body,
|
||||||
|
#[default]
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
|
pub struct Match {
|
||||||
|
pub match_type: MatchType,
|
||||||
|
pub needle: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
|
pub struct Rule {
|
||||||
|
pub stop_on_match: bool,
|
||||||
|
pub matches: Vec<Match>,
|
||||||
|
pub tag: String,
|
||||||
|
}
|
||||||
|
impl Rule {
|
||||||
|
pub fn is_match(&self, header_key: &str, header_value: &str) -> bool {
|
||||||
|
let pats: Vec<_> = self
|
||||||
|
.matches
|
||||||
|
.iter()
|
||||||
|
.filter_map(|m| match m.match_type {
|
||||||
|
MatchType::To => Some("^(to|cc|bcc|x-original-to)$"),
|
||||||
|
MatchType::From => Some("^from$"),
|
||||||
|
MatchType::Sender => Some("^sender$"),
|
||||||
|
MatchType::Subject => Some("^subject$"),
|
||||||
|
MatchType::ListId => Some("^list-id$"),
|
||||||
|
MatchType::XOriginalTo => Some("^x-original-to$"),
|
||||||
|
MatchType::ReplyTo => Some("^reply-to$"),
|
||||||
|
MatchType::XSpam => Some("^x-spam$"),
|
||||||
|
MatchType::Body => None,
|
||||||
|
c => panic!("TODO handle '{c:?}' match type"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let set = RegexSetBuilder::new(&pats)
|
||||||
|
.case_insensitive(true)
|
||||||
|
.build()
|
||||||
|
.expect("failed to compile regex for matches");
|
||||||
|
let matches: Vec<_> = set.matches(header_key).into_iter().collect();
|
||||||
|
if !matches.is_empty() {
|
||||||
|
//info!("matched key '{header_key}' '{header_value}'");
|
||||||
|
for m_idx in matches {
|
||||||
|
let needle = regex::escape(&self.matches[m_idx].needle);
|
||||||
|
let pat = RegexBuilder::new(&needle)
|
||||||
|
.case_insensitive(true)
|
||||||
|
.build()
|
||||||
|
.expect("failed to compile regex for needle");
|
||||||
|
if pat.is_match(header_value) {
|
||||||
|
debug!("{header_key} matched {header_value} against {needle}");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod matches {
|
||||||
|
// From https://linux.die.net/man/5/procmailrc
|
||||||
|
// If the regular expression contains '^TO_' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^-a-zA-Z0-9_.])?)'
|
||||||
|
// If the regular expression contains '^TO' it will be substituted by '(^((Original-)?(Resent-)?(To|Cc|Bcc)|(X-Envelope |Apparently(-Resent)?)-To):(.*[^a-zA-Z])?)', which should catch all destination specifications containing a specific word.
|
||||||
|
|
||||||
|
pub const TO: &'static str = "TO";
|
||||||
|
pub const CC: &'static str = "Cc";
|
||||||
|
pub const TOCC: &'static str = "(TO|Cc)";
|
||||||
|
pub const FROM: &'static str = "From";
|
||||||
|
pub const SENDER: &'static str = "Sender";
|
||||||
|
pub const SUBJECT: &'static str = "Subject";
|
||||||
|
pub const DELIVERED_TO: &'static str = "Delivered-To";
|
||||||
|
pub const X_FORWARDED_TO: &'static str = "X-Forwarded-To";
|
||||||
|
pub const REPLY_TO: &'static str = "Reply-To";
|
||||||
|
pub const X_ORIGINAL_TO: &'static str = "X-Original-To";
|
||||||
|
pub const LIST_ID: &'static str = "List-ID";
|
||||||
|
pub const X_SPAM: &'static str = "X-Spam";
|
||||||
|
pub const X_SPAM_FLAG: &'static str = "X-Spam-Flag";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Match {
|
||||||
|
type Err = Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
// Examples:
|
||||||
|
// "* 1^0 ^TOsonyrewards.com@xinu.tv"
|
||||||
|
// "* ^TOsonyrewards.com@xinu.tv"
|
||||||
|
let mut it = s.split_whitespace().skip(1);
|
||||||
|
let mut needle = it.next().unwrap();
|
||||||
|
if needle == "1^0" {
|
||||||
|
needle = it.next().unwrap();
|
||||||
|
}
|
||||||
|
let mut needle = vec![needle];
|
||||||
|
needle.extend(it);
|
||||||
|
let needle = needle.join(" ");
|
||||||
|
let first = needle.chars().nth(0).unwrap_or(' ');
|
||||||
|
use matches::*;
|
||||||
|
if first == '^' {
|
||||||
|
let needle = &needle[1..];
|
||||||
|
if needle.starts_with(TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(FROM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::From,
|
||||||
|
needle: cleanup_match(FROM, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(CC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Cc,
|
||||||
|
needle: cleanup_match(CC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(TOCC) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::To,
|
||||||
|
needle: cleanup_match(TOCC, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SENDER) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Sender,
|
||||||
|
needle: cleanup_match(SENDER, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(SUBJECT) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Subject,
|
||||||
|
needle: cleanup_match(SUBJECT, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_ORIGINAL_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XOriginalTo,
|
||||||
|
needle: cleanup_match(X_ORIGINAL_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(LIST_ID) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::ListId,
|
||||||
|
needle: cleanup_match(LIST_ID, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(REPLY_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::ReplyTo,
|
||||||
|
needle: cleanup_match(REPLY_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM_FLAG) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_SPAM) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XSpam,
|
||||||
|
needle: '*'.to_string(),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(DELIVERED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::DeliveredTo,
|
||||||
|
needle: cleanup_match(DELIVERED_TO, needle),
|
||||||
|
});
|
||||||
|
} else if needle.starts_with(X_FORWARDED_TO) {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::XForwardedTo,
|
||||||
|
needle: cleanup_match(X_FORWARDED_TO, needle),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
unreachable!("needle: '{needle}'")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Ok(Match {
|
||||||
|
match_type: MatchType::Body,
|
||||||
|
needle: cleanup_match("", &needle),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn unescape(s: &str) -> String {
|
||||||
|
s.replace('\\', "")
|
||||||
|
}
|
||||||
|
pub fn cleanup_match(prefix: &str, s: &str) -> String {
|
||||||
|
unescape(&s[prefix.len()..]).replace(".*", "")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,39 +1,58 @@
|
|||||||
[package]
|
[package]
|
||||||
version = "0.1.0"
|
name = "letterbox-web"
|
||||||
name = "letterbox"
|
description = "Web frontend for letterbox"
|
||||||
repository = "https://github.com/seed-rs/seed-quickstart"
|
authors.workspace = true
|
||||||
authors = ["Bill Thiede <git@xinu.tv>"]
|
edition.workspace = true
|
||||||
description = "App Description"
|
license.workspace = true
|
||||||
categories = ["category"]
|
publish.workspace = true
|
||||||
license = "MIT"
|
repository.workspace = true
|
||||||
readme = "./README.md"
|
version.workspace = true
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[lib]
|
[build-dependencies]
|
||||||
crate-type = ["cdylib"]
|
build-info-build = "0.0.40"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
wasm-bindgen-test = "0.3.33"
|
wasm-bindgen-test = "0.3.50"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
console_error_panic_hook = "0.1.7"
|
console_error_panic_hook = "0.1.7"
|
||||||
log = "0.4.17"
|
log = "0.4.27"
|
||||||
seed = "0.9.2"
|
seed = { version = "0.10.0", features = ["routing"] }
|
||||||
console_log = {git = "http://git-private.h.xinu.tv/wathiede/console_log.git"}
|
#seed = "0.9.2"
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
console_log = { version = "0.1.4", registry = "xinu" }
|
||||||
notmuch = {path = "../notmuch"}
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
shared = {path = "../shared"}
|
itertools = "0.14.0"
|
||||||
itertools = "0.10.5"
|
serde_json = { version = "1.0.140", features = ["unbounded_depth"] }
|
||||||
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
chrono = "0.4.40"
|
||||||
wasm-timer = "0.2.5"
|
graphql_client = "0.14.0"
|
||||||
css-inline = "0.8.5"
|
thiserror = "2.0.12"
|
||||||
|
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||||
|
human_format = "1.1.0"
|
||||||
|
build-info = "0.0.40"
|
||||||
|
wasm-bindgen = "=0.2.100"
|
||||||
|
uuid = { version = "1.16.0", features = [
|
||||||
|
"js",
|
||||||
|
] } # direct dep to set js feature, prevents Rng issues
|
||||||
|
letterbox-shared = { version = "0.17.9", registry = "xinu" }
|
||||||
|
seed_hooks = { version = "0.4.1", registry = "xinu" }
|
||||||
|
strum_macros = "0.27.1"
|
||||||
|
gloo-console = "0.3.0"
|
||||||
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
|
wasm-sockets = "1.0.0"
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.release]
|
[package.metadata.wasm-pack.profile.release]
|
||||||
wasm-opt = ['-Os']
|
wasm-opt = ['-Os']
|
||||||
|
|
||||||
[dependencies.web-sys]
|
[dependencies.web-sys]
|
||||||
version = "0.3.58"
|
version = "0.3.77"
|
||||||
features = [
|
features = [
|
||||||
|
"Clipboard",
|
||||||
|
"DomRect",
|
||||||
|
"Element",
|
||||||
|
"History",
|
||||||
"MediaQueryList",
|
"MediaQueryList",
|
||||||
"Window"
|
"Navigator",
|
||||||
|
"Performance",
|
||||||
|
"ScrollRestoration",
|
||||||
|
"Window",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
.PHONY: all
|
|
||||||
|
|
||||||
# Build in release mode and push to minio for serving.
|
|
||||||
all:
|
|
||||||
trunk build --release
|
|
||||||
mc mirror --overwrite --remove dist/ m/letterbox/
|
|
||||||
27
web/Trunk.toml
Normal file
27
web/Trunk.toml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
[build]
|
||||||
|
release = false
|
||||||
|
|
||||||
|
[serve]
|
||||||
|
# The address to serve on.
|
||||||
|
address = "0.0.0.0"
|
||||||
|
port = 6758
|
||||||
|
|
||||||
|
[[proxy]]
|
||||||
|
ws = true
|
||||||
|
backend = "ws://localhost:9345/api/ws"
|
||||||
|
|
||||||
|
[[proxy]]
|
||||||
|
backend = "http://localhost:9345/api/"
|
||||||
|
|
||||||
|
[[proxy]]
|
||||||
|
backend = "http://localhost:9345/notification/"
|
||||||
|
|
||||||
|
[[hooks]]
|
||||||
|
stage = "pre_build"
|
||||||
|
command = "printf"
|
||||||
|
command_arguments = ["\\033c"]
|
||||||
|
|
||||||
|
#[[hooks]]
|
||||||
|
#stage = "pre_build"
|
||||||
|
#command = "cargo"
|
||||||
|
#command_arguments = [ "test" ]
|
||||||
5
web/build.rs
Normal file
5
web/build.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
fn main() {
|
||||||
|
// Calling `build_info_build::build_script` collects all data and makes it available to `build_info::build_info!`
|
||||||
|
// and `build_info::format!` in the main program.
|
||||||
|
build_info_build::build_script();
|
||||||
|
}
|
||||||
3
web/graphql/add_tag.graphql
Normal file
3
web/graphql/add_tag.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
mutation AddTagMutation($query: String!, $tag: String!) {
|
||||||
|
tagAdd(query:$query, tag:$tag)
|
||||||
|
}
|
||||||
3
web/graphql/catchup.graphql
Normal file
3
web/graphql/catchup.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
query CatchupQuery($query: String!) {
|
||||||
|
catchup(query: $query)
|
||||||
|
}
|
||||||
27
web/graphql/front_page.graphql
Normal file
27
web/graphql/front_page.graphql
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
query FrontPageQuery($query: String!, $after: String $before: String, $first: Int, $last: Int) {
|
||||||
|
count(query: $query)
|
||||||
|
search(query: $query, after: $after, before: $before, first: $first, last: $last) {
|
||||||
|
pageInfo {
|
||||||
|
hasPreviousPage
|
||||||
|
hasNextPage
|
||||||
|
startCursor
|
||||||
|
endCursor
|
||||||
|
}
|
||||||
|
nodes {
|
||||||
|
thread
|
||||||
|
total
|
||||||
|
timestamp
|
||||||
|
subject
|
||||||
|
authors
|
||||||
|
tags
|
||||||
|
corpus
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tags {
|
||||||
|
name
|
||||||
|
bgColor
|
||||||
|
fgColor
|
||||||
|
unread
|
||||||
|
}
|
||||||
|
version
|
||||||
|
}
|
||||||
3
web/graphql/mark_read.graphql
Normal file
3
web/graphql/mark_read.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
mutation MarkReadMutation($query: String!, $unread: Boolean!) {
|
||||||
|
setReadStatus(query:$query, unread:$unread)
|
||||||
|
}
|
||||||
3
web/graphql/refresh.graphql
Normal file
3
web/graphql/refresh.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
mutation RefreshMutation {
|
||||||
|
refresh
|
||||||
|
}
|
||||||
3
web/graphql/remove_tag.graphql
Normal file
3
web/graphql/remove_tag.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
mutation RemoveTagMutation($query: String!, $tag: String!) {
|
||||||
|
tagRemove(query:$query, tag:$tag)
|
||||||
|
}
|
||||||
2833
web/graphql/schema.json
Normal file
2833
web/graphql/schema.json
Normal file
File diff suppressed because it is too large
Load Diff
77
web/graphql/show_thread.graphql
Normal file
77
web/graphql/show_thread.graphql
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
query ShowThreadQuery($threadId: String!) {
|
||||||
|
thread(threadId: $threadId) {
|
||||||
|
__typename ... on NewsPost{
|
||||||
|
threadId
|
||||||
|
isRead
|
||||||
|
slug
|
||||||
|
site
|
||||||
|
title
|
||||||
|
body
|
||||||
|
url
|
||||||
|
timestamp
|
||||||
|
# TODO: unread
|
||||||
|
}
|
||||||
|
__typename ... on EmailThread{
|
||||||
|
threadId,
|
||||||
|
subject
|
||||||
|
messages {
|
||||||
|
id
|
||||||
|
subject
|
||||||
|
tags
|
||||||
|
from {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
photoUrl
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
cc {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
xOriginalTo {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
deliveredTo {
|
||||||
|
name
|
||||||
|
addr
|
||||||
|
}
|
||||||
|
timestamp
|
||||||
|
body {
|
||||||
|
__typename
|
||||||
|
... on UnhandledContentType {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
... on PlainText {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
... on Html {
|
||||||
|
contents
|
||||||
|
contentTree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path
|
||||||
|
attachments {
|
||||||
|
id
|
||||||
|
idx
|
||||||
|
filename
|
||||||
|
contentType
|
||||||
|
contentId
|
||||||
|
size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tags {
|
||||||
|
name
|
||||||
|
bgColor
|
||||||
|
fgColor
|
||||||
|
unread
|
||||||
|
}
|
||||||
|
version
|
||||||
|
}
|
||||||
4
web/graphql/update_schema.sh
Executable file
4
web/graphql/update_schema.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
DEV_HOST=localhost
|
||||||
|
DEV_PORT=9345
|
||||||
|
graphql-client introspect-schema http://${DEV_HOST:?}:${DEV_PORT:?}/api/graphql --output schema.json
|
||||||
|
git diff schema.json
|
||||||
@@ -2,91 +2,24 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
<link rel="modulepreload" href="/pkg/package.js" as="script" type="text/javascript">
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.7.2/css/all.min.css"
|
||||||
<link rel="preload" href="/pkg/package_bg.wasm" as="fetch" type="application/wasm" crossorigin="anonymous">
|
integrity="sha512-Evv84Mr4kqVGRNSgIGL/F/aIDqQb7xQ2vcrdIwxfjThSH8CSR7PBEakCr51Ck+w+/U6swU2Im1vVX0SVk9ABhg=="
|
||||||
<link rel="stylesheet", href="https://jenil.github.io/bulmaswatch/cyborg/bulmaswatch.min.css">
|
crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.3.0/css/all.min.css" integrity="sha512-SzlrxWUlpfuzQ+pcUCosxcglQRNAq/DZjVsC0lE40xsADsfeQoEypE+enwcOiGjk/bSuGGKHEyjSoQ1zVisanQ==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
<link rel="icon" href="https://static.xinu.tv/favicon/letterbox.svg" />
|
||||||
<style>
|
<!-- tall thin font for user icon -->
|
||||||
.message {
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
padding-left: 0.5em;
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
}
|
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap" rel="stylesheet">
|
||||||
.body {
|
<!-- <link data-trunk rel="css" href="static/site-specific.css" /> -->
|
||||||
background: white;
|
<link data-trunk rel="css" href="static/vars.css" />
|
||||||
color: black;
|
<link data-trunk rel="tailwind-css" href="./src/tailwind.css" />
|
||||||
padding-bottom: 1em;
|
<link data-trunk rel="css" href="static/overrides.css" />
|
||||||
}
|
|
||||||
.error {
|
|
||||||
background-color: red;
|
|
||||||
}
|
|
||||||
.view-part-text-plain {
|
|
||||||
white-space: pre-line;
|
|
||||||
}
|
|
||||||
iframe {
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
.index .from {
|
|
||||||
width: 200px;
|
|
||||||
}
|
|
||||||
.index .subject {
|
|
||||||
}
|
|
||||||
.index .date {
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
.footer {
|
|
||||||
background-color: #eee;
|
|
||||||
color: #222;
|
|
||||||
position: fixed;
|
|
||||||
bottom: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
height: 3em;
|
|
||||||
padding: 1em;
|
|
||||||
}
|
|
||||||
.tag {
|
|
||||||
margin-right: 2px;
|
|
||||||
}
|
|
||||||
.debug ul {
|
|
||||||
padding-left: 2em;
|
|
||||||
}
|
|
||||||
.debug li {
|
|
||||||
}
|
|
||||||
.loading {
|
|
||||||
animation-name: spin;
|
|
||||||
animation-duration: 1000ms;
|
|
||||||
animation-iteration-count: infinite;
|
|
||||||
animation-timing-function: linear;
|
|
||||||
}
|
|
||||||
@keyframes spin {
|
|
||||||
from {
|
|
||||||
transform:rotate(0deg);
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
transform:rotate(360deg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.section {
|
|
||||||
padding: 1.5em;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
input, .input {
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
input::placeholder, .input::placeholder{
|
|
||||||
color: #555;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<section id="app"></section>
|
<section id="app"></section>
|
||||||
<script type="module">
|
|
||||||
import init from '/pkg/package.js';
|
|
||||||
init('/pkg/package_bg.wasm');
|
|
||||||
</script>
|
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
@@ -1,16 +1,14 @@
|
|||||||
use seed::Url;
|
pub mod urls {
|
||||||
|
use seed::Url;
|
||||||
const BASE_URL: &str = "/api";
|
pub fn search(query: &str, page: usize) -> Url {
|
||||||
pub fn refresh() -> String {
|
let query = Url::encode_uri_component(query);
|
||||||
format!("{BASE_URL}/refresh")
|
if page > 0 {
|
||||||
}
|
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
|
||||||
pub fn search(query: &str, page: usize, results_per_page: usize) -> String {
|
} else {
|
||||||
let query = Url::encode_uri_component(query);
|
Url::new().set_hash_path(["s", &query])
|
||||||
format!("{BASE_URL}/search/{query}?page={page}&results_per_page={results_per_page}")
|
}
|
||||||
}
|
}
|
||||||
pub fn show(tid: &str) -> String {
|
pub fn thread(tid: &str) -> Url {
|
||||||
format!("{BASE_URL}/show/{tid}")
|
Url::new().set_hash_path(["t", tid])
|
||||||
}
|
}
|
||||||
pub fn original(message_id: &str) -> String {
|
|
||||||
format!("{BASE_URL}/original/{message_id}")
|
|
||||||
}
|
}
|
||||||
|
|||||||
1
web/src/consts.rs
Normal file
1
web/src/consts.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub const SEARCH_RESULTS_PER_PAGE: usize = 20;
|
||||||
77
web/src/graphql.rs
Normal file
77
web/src/graphql.rs
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
use gloo_net::{http::Request, Error};
|
||||||
|
use graphql_client::GraphQLQuery;
|
||||||
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
|
||||||
|
// The paths are relative to the directory where your `Cargo.toml` is located.
|
||||||
|
// Both json and the GraphQL schema language are supported as sources for the schema
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/front_page.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct FrontPageQuery;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/catchup.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct CatchupQuery;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/show_thread.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct ShowThreadQuery;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/mark_read.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct MarkReadMutation;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/add_tag.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct AddTagMutation;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/remove_tag.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct RemoveTagMutation;
|
||||||
|
|
||||||
|
#[derive(GraphQLQuery)]
|
||||||
|
#[graphql(
|
||||||
|
schema_path = "graphql/schema.json",
|
||||||
|
query_path = "graphql/refresh.graphql",
|
||||||
|
response_derives = "Debug"
|
||||||
|
)]
|
||||||
|
pub struct RefreshMutation;
|
||||||
|
|
||||||
|
pub async fn send_graphql<Body, Resp>(body: Body) -> Result<graphql_client::Response<Resp>, Error>
|
||||||
|
where
|
||||||
|
Body: Serialize,
|
||||||
|
Resp: DeserializeOwned + 'static,
|
||||||
|
{
|
||||||
|
use web_sys::RequestMode;
|
||||||
|
|
||||||
|
Request::post("/api/graphql/")
|
||||||
|
.mode(RequestMode::Cors)
|
||||||
|
.json(&body)?
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
}
|
||||||
580
web/src/lib.rs
580
web/src/lib.rs
@@ -1,580 +0,0 @@
|
|||||||
mod api;
|
|
||||||
mod nm;
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
collections::hash_map::DefaultHasher,
|
|
||||||
hash::{Hash, Hasher},
|
|
||||||
};
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use log::{debug, error, info, Level};
|
|
||||||
use notmuch::ThreadSet;
|
|
||||||
use seed::{prelude::*, *};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use wasm_timer::Instant;
|
|
||||||
|
|
||||||
const SEARCH_RESULTS_PER_PAGE: usize = 20;
|
|
||||||
|
|
||||||
// ------ ------
|
|
||||||
// Init
|
|
||||||
// ------ ------
|
|
||||||
|
|
||||||
// `init` describes what should happen when your app started.
|
|
||||||
fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|
||||||
orders
|
|
||||||
.subscribe(on_url_changed)
|
|
||||||
.notify(subs::UrlChanged(url.clone()));
|
|
||||||
|
|
||||||
Model {
|
|
||||||
context: Context::None,
|
|
||||||
query: "".to_string(),
|
|
||||||
refreshing_state: RefreshingState::None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
|
||||||
let mut url = uc.0;
|
|
||||||
info!(
|
|
||||||
"url changed '{}', history {}",
|
|
||||||
url,
|
|
||||||
history().length().unwrap_or(0)
|
|
||||||
);
|
|
||||||
let hpp = url.remaining_hash_path_parts();
|
|
||||||
match hpp.as_slice() {
|
|
||||||
["t", tid] => Msg::ShowRequest(tid.to_string()),
|
|
||||||
["s", query] => {
|
|
||||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
||||||
Msg::SearchRequest {
|
|
||||||
query,
|
|
||||||
page: 0,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
["s", query, page] => {
|
|
||||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
||||||
let page = page[1..].parse().unwrap_or(0);
|
|
||||||
Msg::SearchRequest {
|
|
||||||
query,
|
|
||||||
page,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p => {
|
|
||||||
if !p.is_empty() {
|
|
||||||
info!("Unhandled path '{p:?}'");
|
|
||||||
}
|
|
||||||
Msg::SearchRequest {
|
|
||||||
query: "".to_string(),
|
|
||||||
page: 0,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod urls {
|
|
||||||
use seed::Url;
|
|
||||||
pub fn search(query: &str, page: usize) -> Url {
|
|
||||||
let query = Url::encode_uri_component(query);
|
|
||||||
if page > 0 {
|
|
||||||
Url::new().set_hash_path(["s", &query, &format!("p{page}")])
|
|
||||||
} else {
|
|
||||||
Url::new().set_hash_path(["s", &query])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn thread(tid: &str) -> Url {
|
|
||||||
Url::new().set_hash_path(["t", tid])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------ ------
|
|
||||||
// Model
|
|
||||||
// ------ ------
|
|
||||||
enum Context {
|
|
||||||
None,
|
|
||||||
Search(shared::SearchResult),
|
|
||||||
Thread(Vec<shared::Message>),
|
|
||||||
}
|
|
||||||
|
|
||||||
// `Model` describes our app state.
|
|
||||||
struct Model {
|
|
||||||
query: String,
|
|
||||||
context: Context,
|
|
||||||
refreshing_state: RefreshingState,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
enum RefreshingState {
|
|
||||||
None,
|
|
||||||
Loading,
|
|
||||||
Error(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------ ------
|
|
||||||
// Update
|
|
||||||
// ------ ------
|
|
||||||
|
|
||||||
// (Remove the line below once any of your `Msg` variants doesn't implement `Copy`.)
|
|
||||||
// `Msg` describes the different events you can modify state with.
|
|
||||||
pub enum Msg {
|
|
||||||
Noop,
|
|
||||||
RefreshStart,
|
|
||||||
RefreshDone(Option<FetchError>),
|
|
||||||
SearchRequest {
|
|
||||||
query: String,
|
|
||||||
page: usize,
|
|
||||||
results_per_page: usize,
|
|
||||||
},
|
|
||||||
SearchResult(fetch::Result<shared::SearchResult>),
|
|
||||||
ShowRequest(String),
|
|
||||||
ShowResult(fetch::Result<Vec<shared::Message>>),
|
|
||||||
NextPage,
|
|
||||||
PreviousPage,
|
|
||||||
}
|
|
||||||
|
|
||||||
// `update` describes how to handle each `Msg`.
|
|
||||||
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|
||||||
match msg {
|
|
||||||
Msg::Noop => {}
|
|
||||||
Msg::RefreshStart => {
|
|
||||||
model.refreshing_state = RefreshingState::Loading;
|
|
||||||
orders.perform_cmd(async move { Msg::RefreshDone(refresh_request().await.err()) });
|
|
||||||
}
|
|
||||||
Msg::RefreshDone(err) => {
|
|
||||||
model.refreshing_state = if let Some(err) = err {
|
|
||||||
RefreshingState::Error(format!("{:?}", err))
|
|
||||||
} else {
|
|
||||||
// If looking at search page, refresh the search to view update on the server side.
|
|
||||||
if let Context::Search(sr) = &model.context {
|
|
||||||
let query = sr.query.clone();
|
|
||||||
let page = sr.page;
|
|
||||||
let results_per_page = sr.results_per_page;
|
|
||||||
orders.perform_cmd(async move {
|
|
||||||
Msg::SearchResult(search_request(&query, page, results_per_page).await)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
RefreshingState::None
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Msg::SearchRequest {
|
|
||||||
query,
|
|
||||||
page,
|
|
||||||
results_per_page,
|
|
||||||
} => {
|
|
||||||
info!("searching for '{query}' pg {page} # / pg {results_per_page}");
|
|
||||||
model.query = query.clone();
|
|
||||||
orders.skip().perform_cmd(async move {
|
|
||||||
Msg::SearchResult(search_request(&query, page, results_per_page).await)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Msg::SearchResult(Ok(response_data)) => {
|
|
||||||
debug!("fetch ok {:#?}", response_data);
|
|
||||||
model.context = Context::Search(response_data);
|
|
||||||
}
|
|
||||||
Msg::SearchResult(Err(fetch_error)) => {
|
|
||||||
error!("fetch failed {:?}", fetch_error);
|
|
||||||
}
|
|
||||||
|
|
||||||
Msg::ShowRequest(tid) => {
|
|
||||||
orders
|
|
||||||
.skip()
|
|
||||||
.perform_cmd(async move { Msg::ShowResult(show_request(&tid).await) });
|
|
||||||
}
|
|
||||||
Msg::ShowResult(Ok(response_data)) => {
|
|
||||||
debug!("fetch ok {:#?}", response_data);
|
|
||||||
model.context = Context::Thread(response_data);
|
|
||||||
}
|
|
||||||
Msg::ShowResult(Err(fetch_error)) => {
|
|
||||||
error!("fetch failed {:?}", fetch_error);
|
|
||||||
}
|
|
||||||
Msg::NextPage => {
|
|
||||||
match &model.context {
|
|
||||||
Context::Search(sr) => {
|
|
||||||
orders.request_url(urls::search(&sr.query, sr.page + 1));
|
|
||||||
}
|
|
||||||
Context::Thread(_) => (), // do nothing (yet?)
|
|
||||||
Context::None => (), // do nothing (yet?)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
Msg::PreviousPage => {
|
|
||||||
match &model.context {
|
|
||||||
Context::Search(sr) => {
|
|
||||||
orders.request_url(urls::search(&sr.query, sr.page.saturating_sub(1)));
|
|
||||||
}
|
|
||||||
Context::Thread(_) => (), // do nothing (yet?)
|
|
||||||
Context::None => (), // do nothing (yet?)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn show_request(tid: &str) -> fetch::Result<Vec<shared::Message>> {
|
|
||||||
let b = Request::new(api::show(tid))
|
|
||||||
.method(Method::Get)
|
|
||||||
.fetch()
|
|
||||||
.await?
|
|
||||||
.check_status()?
|
|
||||||
.bytes()
|
|
||||||
.await?;
|
|
||||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
|
||||||
deserializer.disable_recursion_limit();
|
|
||||||
Ok(Vec::<shared::Message>::deserialize(&mut deserializer)
|
|
||||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
|
||||||
}
|
|
||||||
async fn search_request(
|
|
||||||
query: &str,
|
|
||||||
page: usize,
|
|
||||||
results_per_page: usize,
|
|
||||||
) -> fetch::Result<shared::SearchResult> {
|
|
||||||
Request::new(api::search(query, page, results_per_page))
|
|
||||||
.method(Method::Get)
|
|
||||||
.fetch()
|
|
||||||
.await?
|
|
||||||
.check_status()?
|
|
||||||
.json()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn refresh_request() -> fetch::Result<()> {
|
|
||||||
let t = Request::new(api::refresh())
|
|
||||||
.method(Method::Get)
|
|
||||||
.fetch()
|
|
||||||
.await?
|
|
||||||
.check_status()?
|
|
||||||
.text()
|
|
||||||
.await?;
|
|
||||||
info!("refresh {t}");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------ ------
|
|
||||||
// View
|
|
||||||
// ------ ------
|
|
||||||
|
|
||||||
fn set_title(title: &str) {
|
|
||||||
seed::document().set_title(&format!("lb: {}", title));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tags_chiclet(tags: &[String], is_mobile: bool) -> impl Iterator<Item = Node<Msg>> + '_ {
|
|
||||||
tags.iter().map(move |tag| {
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
tag.hash(&mut hasher);
|
|
||||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
|
||||||
let style = style! {St::BackgroundColor=>hex};
|
|
||||||
let classes = C!["tag", IF!(is_mobile => "is-small")];
|
|
||||||
let tag = tag.clone();
|
|
||||||
a![
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::search(&format!("tag:{tag}"), 0)
|
|
||||||
},
|
|
||||||
match tag.as_str() {
|
|
||||||
"attachment" => span![classes, style, "📎"],
|
|
||||||
"replied" => span![classes, style, i![C!["fa-solid", "fa-reply"]]],
|
|
||||||
_ => span![classes, style, &tag],
|
|
||||||
},
|
|
||||||
ev(Ev::Click, move |_| Msg::SearchRequest {
|
|
||||||
query: format!("tag:{tag}"),
|
|
||||||
page: 0,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
})
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pretty_authors(authors: &str) -> impl Iterator<Item = Node<Msg>> + '_ {
|
|
||||||
let one_person = authors.matches(',').count() == 0;
|
|
||||||
let authors = authors.split(',');
|
|
||||||
|
|
||||||
Itertools::intersperse(
|
|
||||||
authors.filter_map(move |author| {
|
|
||||||
if one_person {
|
|
||||||
return Some(span![
|
|
||||||
attrs! {
|
|
||||||
At::Title => author.trim()},
|
|
||||||
author
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
author.split_whitespace().nth(0).map(|first| {
|
|
||||||
span![
|
|
||||||
attrs! {
|
|
||||||
At::Title => author.trim()},
|
|
||||||
first
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}),
|
|
||||||
span![", "],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_mobile_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
|
||||||
if query.is_empty() {
|
|
||||||
set_title("all mail");
|
|
||||||
} else {
|
|
||||||
set_title(query);
|
|
||||||
}
|
|
||||||
let summaries = &search_results.summary.0;
|
|
||||||
let rows = summaries.iter().map(|r| {
|
|
||||||
/*
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
tr![
|
|
||||||
td![
|
|
||||||
C!["from"],
|
|
||||||
pretty_authors(&r.authors),
|
|
||||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
||||||
],
|
|
||||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
|
||||||
td![C!["date"], &r.date_relative],
|
|
||||||
ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
|
|
||||||
]
|
|
||||||
*/
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
div![
|
|
||||||
div![
|
|
||||||
C!["subject"],
|
|
||||||
&r.subject,
|
|
||||||
ev(Ev::Click, move |_| Msg::ShowRequest(tid)),
|
|
||||||
],
|
|
||||||
div![
|
|
||||||
span![C!["from"], pretty_authors(&r.authors)],
|
|
||||||
span![C!["tags"], tags_chiclet(&r.tags, true)],
|
|
||||||
],
|
|
||||||
span![C!["date"], &r.date_relative],
|
|
||||||
hr![],
|
|
||||||
]
|
|
||||||
});
|
|
||||||
let first = search_results.page * search_results.results_per_page;
|
|
||||||
div![
|
|
||||||
h1!["Search results"],
|
|
||||||
view_search_pager(first, summaries.len(), search_results.total),
|
|
||||||
rows,
|
|
||||||
view_search_pager(first, summaries.len(), search_results.total)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
|
||||||
if query.is_empty() {
|
|
||||||
set_title("all mail");
|
|
||||||
} else {
|
|
||||||
set_title(query);
|
|
||||||
}
|
|
||||||
let summaries = &search_results.summary.0;
|
|
||||||
let rows = summaries.iter().map(|r| {
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
tr![
|
|
||||||
td![
|
|
||||||
C!["from"],
|
|
||||||
pretty_authors(&r.authors),
|
|
||||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
||||||
],
|
|
||||||
td![
|
|
||||||
C!["subject"],
|
|
||||||
tags_chiclet(&r.tags, false),
|
|
||||||
" ",
|
|
||||||
a![
|
|
||||||
C!["has-text-light"],
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::thread(&tid)
|
|
||||||
},
|
|
||||||
&r.subject,
|
|
||||||
]
|
|
||||||
],
|
|
||||||
td![C!["date"], &r.date_relative]
|
|
||||||
]
|
|
||||||
});
|
|
||||||
let first = search_results.page * search_results.results_per_page;
|
|
||||||
div![
|
|
||||||
view_search_pager(first, summaries.len(), search_results.total),
|
|
||||||
table![
|
|
||||||
C![
|
|
||||||
"table",
|
|
||||||
"index",
|
|
||||||
"is-fullwidth",
|
|
||||||
"is-hoverable",
|
|
||||||
"is-narrow",
|
|
||||||
"is-striped",
|
|
||||||
],
|
|
||||||
thead![tr![
|
|
||||||
th![C!["from"], "From"],
|
|
||||||
th![C!["subject"], "Subject"],
|
|
||||||
th![C!["date"], "Date"]
|
|
||||||
]],
|
|
||||||
tbody![rows]
|
|
||||||
],
|
|
||||||
view_search_pager(first, summaries.len(), search_results.total)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
|
||||||
let is_first = start <= 0;
|
|
||||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
|
||||||
nav![
|
|
||||||
C!["pagination"],
|
|
||||||
a![
|
|
||||||
C![
|
|
||||||
"pagination-previous",
|
|
||||||
"button",
|
|
||||||
IF!(is_first => "is-static"),
|
|
||||||
IF!(is_first => "is-info"),
|
|
||||||
],
|
|
||||||
"<",
|
|
||||||
ev(Ev::Click, |_| Msg::PreviousPage)
|
|
||||||
],
|
|
||||||
a![
|
|
||||||
C!["pagination-next", "button", IF!(is_last => "is-static")],
|
|
||||||
IF!(is_last => attrs!{ At::Disabled=>true }),
|
|
||||||
">",
|
|
||||||
ev(Ev::Click, |_| Msg::NextPage)
|
|
||||||
],
|
|
||||||
ul![
|
|
||||||
C!["pagination-list"],
|
|
||||||
li![format!("{} - {} of {}", start, start + count, total)],
|
|
||||||
],
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_header(query: &str, refresh_request: &RefreshingState) -> Node<Msg> {
|
|
||||||
let is_loading = refresh_request == &RefreshingState::Loading;
|
|
||||||
let is_error = if let RefreshingState::Error(err) = refresh_request {
|
|
||||||
error!("Failed to refresh: {err:?}");
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
|
||||||
nav![
|
|
||||||
C!["navbar"],
|
|
||||||
attrs! {At::Role=>"navigation"},
|
|
||||||
div![
|
|
||||||
C!["navbar-start"],
|
|
||||||
a![
|
|
||||||
C!["navbar-item", "button", IF![is_error => "is-danger"]],
|
|
||||||
span![i![C![
|
|
||||||
"fa-solid",
|
|
||||||
"fa-arrow-rotate-right",
|
|
||||||
"refresh",
|
|
||||||
IF![is_loading => "loading"],
|
|
||||||
]]],
|
|
||||||
ev(Ev::Click, |_| Msg::RefreshStart),
|
|
||||||
],
|
|
||||||
a![
|
|
||||||
C!["navbar-item", "button"],
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::search("is:unread", 0)
|
|
||||||
},
|
|
||||||
"Unread",
|
|
||||||
],
|
|
||||||
a![
|
|
||||||
C!["navbar-item", "button"],
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::search("", 0)
|
|
||||||
},
|
|
||||||
"All",
|
|
||||||
],
|
|
||||||
input![
|
|
||||||
C!["navbar-item", "input"],
|
|
||||||
attrs! {
|
|
||||||
At::Placeholder => "Search";
|
|
||||||
At::AutoFocus => true.as_at_value();
|
|
||||||
At::Value => query,
|
|
||||||
},
|
|
||||||
input_ev(Ev::Input, |q| Msg::SearchRequest {
|
|
||||||
query: Url::encode_uri_component(q),
|
|
||||||
page: 0,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
}),
|
|
||||||
// Resend search on enter.
|
|
||||||
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
|
||||||
Msg::SearchRequest {
|
|
||||||
query: Url::encode_uri_component(query),
|
|
||||||
page: 0,
|
|
||||||
results_per_page: SEARCH_RESULTS_PER_PAGE,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Msg::Noop
|
|
||||||
}),
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
|
||||||
footer![
|
|
||||||
C!["footer"],
|
|
||||||
div![
|
|
||||||
C!["content", "has-text-right", "is-size-7"],
|
|
||||||
format!("Render time {} ms", render_time_ms)
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_thread(messages: &[shared::Message]) -> Node<Msg> {
|
|
||||||
div![
|
|
||||||
"MESSAGES GO HERE",
|
|
||||||
ol![messages.iter().map(|msg| li![format!("{:?}", msg)])]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_desktop(model: &Model) -> Node<Msg> {
|
|
||||||
let content = match &model.context {
|
|
||||||
Context::None => div![h1!["Loading"]],
|
|
||||||
Context::Thread(thread_set) => view_thread(thread_set),
|
|
||||||
Context::Search(search_results) => view_search_results(&model.query, search_results),
|
|
||||||
};
|
|
||||||
div![
|
|
||||||
view_header(&model.query, &model.refreshing_state),
|
|
||||||
section![C!["section"], div![C!["container"], content],]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_mobile(model: &Model) -> Node<Msg> {
|
|
||||||
let content = match &model.context {
|
|
||||||
Context::None => div![h1!["Loading"]],
|
|
||||||
Context::Thread(thread_set) => view_thread(thread_set),
|
|
||||||
Context::Search(search_results) => view_mobile_search_results(&model.query, search_results),
|
|
||||||
};
|
|
||||||
div![
|
|
||||||
view_header(&model.query, &model.refreshing_state),
|
|
||||||
section![C!["section"], div![C!["content"], content],]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
// `view` describes what to display.
|
|
||||||
fn view(model: &Model) -> Node<Msg> {
|
|
||||||
info!("refreshing {:?}", model.refreshing_state);
|
|
||||||
let is_mobile = seed::window()
|
|
||||||
.match_media("(max-width: 768px)")
|
|
||||||
.expect("failed media query")
|
|
||||||
.map(|mql| mql.matches())
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
let start = Instant::now();
|
|
||||||
info!("view called");
|
|
||||||
div![
|
|
||||||
if is_mobile {
|
|
||||||
view_mobile(model)
|
|
||||||
} else {
|
|
||||||
view_desktop(model)
|
|
||||||
},
|
|
||||||
view_footer(start.elapsed().as_millis())
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------ ------
|
|
||||||
// Start
|
|
||||||
// ------ ------
|
|
||||||
|
|
||||||
// (This function is invoked by `init` function in `index.html`.)
|
|
||||||
#[wasm_bindgen(start)]
|
|
||||||
pub fn start() {
|
|
||||||
// This provides better error messages in debug mode.
|
|
||||||
// It's disabled in release mode so it doesn't bloat up the file size.
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
console_error_panic_hook::set_once();
|
|
||||||
|
|
||||||
let lvl = Level::Info;
|
|
||||||
console_log::init_with_level(lvl).expect("failed to initialize console logging");
|
|
||||||
// Mount the `app` to the element with the `id` "app".
|
|
||||||
App::start("app", init, update, view);
|
|
||||||
}
|
|
||||||
31
web/src/main.rs
Normal file
31
web/src/main.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// (Lines like the one below ignore selected Clippy rules
|
||||||
|
// - it's useful when you want to check your code with `cargo make verify`
|
||||||
|
// but some rules are too "annoying" or are not applicable for your case.)
|
||||||
|
#![allow(clippy::wildcard_imports)]
|
||||||
|
// Until https://github.com/rust-lang/rust/issues/138762 is addressed in dependencies
|
||||||
|
#![allow(wasm_c_abi)]
|
||||||
|
|
||||||
|
use log::Level;
|
||||||
|
use seed::App;
|
||||||
|
|
||||||
|
mod api;
|
||||||
|
mod consts;
|
||||||
|
mod graphql;
|
||||||
|
mod state;
|
||||||
|
mod view;
|
||||||
|
mod websocket;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// This provides better error messages in debug mode.
|
||||||
|
// It's disabled in release mode so it doesn't bloat up the file size.
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
console_error_panic_hook::set_once();
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let lvl = Level::Debug;
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
let lvl = Level::Info;
|
||||||
|
console_log::init_with_level(lvl).expect("failed to initialize console logging");
|
||||||
|
// Mount the `app` to the element with the `id` "app".
|
||||||
|
App::start("app", state::init, state::update, view::view);
|
||||||
|
}
|
||||||
193
web/src/nm.rs
193
web/src/nm.rs
@@ -1,193 +0,0 @@
|
|||||||
use notmuch::{Content, Part, Thread, ThreadNode, ThreadSet};
|
|
||||||
use seed::{prelude::*, *};
|
|
||||||
use serde::de::Deserialize;
|
|
||||||
|
|
||||||
use crate::{api, set_title, Msg};
|
|
||||||
|
|
||||||
pub async fn show_request(tid: &str) -> fetch::Result<ThreadSet> {
|
|
||||||
let b = Request::new(api::show(tid))
|
|
||||||
.method(Method::Get)
|
|
||||||
.fetch()
|
|
||||||
.await?
|
|
||||||
.check_status()?
|
|
||||||
.bytes()
|
|
||||||
.await?;
|
|
||||||
let mut deserializer = serde_json::Deserializer::from_slice(&b);
|
|
||||||
deserializer.disable_recursion_limit();
|
|
||||||
Ok(ThreadSet::deserialize(&mut deserializer)
|
|
||||||
.map_err(|_| FetchError::JsonError(fetch::JsonError::Serde(JsValue::NULL)))?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn view_thread(thread_set: &ThreadSet) -> Node<Msg> {
|
|
||||||
assert_eq!(thread_set.0.len(), 1);
|
|
||||||
let thread = &thread_set.0[0];
|
|
||||||
assert_eq!(thread.0.len(), 1);
|
|
||||||
let thread_node = &thread.0[0];
|
|
||||||
let subject = first_subject(&thread_node).unwrap_or("<No subject>".to_string());
|
|
||||||
set_title(&subject);
|
|
||||||
div![
|
|
||||||
h1![subject],
|
|
||||||
a![
|
|
||||||
attrs! {At::Href=>api::original(&thread_node.0.as_ref().expect("message missing").id)},
|
|
||||||
"Original"
|
|
||||||
],
|
|
||||||
view_message(&thread_node),
|
|
||||||
div![
|
|
||||||
C!["debug"],
|
|
||||||
"Add zippy for debug dump",
|
|
||||||
view_debug_thread_set(thread_set)
|
|
||||||
] /* pre![format!("Thread: {:#?}", thread_set).replace(" ", " ")] */
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
// <subject>
|
|
||||||
// <tags>
|
|
||||||
//
|
|
||||||
// <from1> <date>
|
|
||||||
// <to1>
|
|
||||||
// <content1>
|
|
||||||
// <zippy>
|
|
||||||
// <children1>
|
|
||||||
// </zippy>
|
|
||||||
//
|
|
||||||
// <from2> <date>
|
|
||||||
// <to2>
|
|
||||||
// <body2>
|
|
||||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
|
||||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
|
||||||
let children = &thread.1;
|
|
||||||
div![
|
|
||||||
C!["message"],
|
|
||||||
/* TODO(wathiede): collect all the tags and show them here. */
|
|
||||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
|
||||||
div![C!["header"], "From: ", &message.headers.from],
|
|
||||||
div![C!["header"], "Date: ", &message.headers.date],
|
|
||||||
div![C!["header"], "To: ", &message.headers.to],
|
|
||||||
hr![],
|
|
||||||
div![
|
|
||||||
C!["body"],
|
|
||||||
match &message.body {
|
|
||||||
Some(body) => view_body(body.as_slice()),
|
|
||||||
None => div!["<no body>"],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
children.iter().map(view_message)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_body(body: &[Part]) -> Node<Msg> {
|
|
||||||
div![body.iter().map(view_part)]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_text_plain(content: &Option<Content>) -> Node<Msg> {
|
|
||||||
match &content {
|
|
||||||
Some(Content::String(content)) => p![C!["view-part-text-plain"], content],
|
|
||||||
_ => div![
|
|
||||||
C!["error"],
|
|
||||||
format!("Unhandled content enum for text/plain"),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_part(part: &Part) -> Node<Msg> {
|
|
||||||
match part.content_type.as_str() {
|
|
||||||
"text/plain" => view_text_plain(&part.content),
|
|
||||||
"text/html" => {
|
|
||||||
if let Some(Content::String(html)) = &part.content {
|
|
||||||
let inliner = css_inline::CSSInliner::options()
|
|
||||||
.load_remote_stylesheets(false)
|
|
||||||
.remove_style_tags(true)
|
|
||||||
.build();
|
|
||||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
|
||||||
|
|
||||||
return div![C!["view-part-text-html"], div!["TEST"], raw![&inlined]];
|
|
||||||
} else {
|
|
||||||
div![
|
|
||||||
C!["error"],
|
|
||||||
format!("Unhandled content enum for multipart/mixed"),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://en.wikipedia.org/wiki/MIME#alternative
|
|
||||||
// RFC1341 states: In general, user agents that compose multipart/alternative entities
|
|
||||||
// should place the body parts in increasing order of preference, that is, with the
|
|
||||||
// preferred format last.
|
|
||||||
"multipart/alternative" => {
|
|
||||||
if let Some(Content::Multipart(parts)) = &part.content {
|
|
||||||
for part in parts.iter().rev() {
|
|
||||||
if part.content_type == "text/html" {
|
|
||||||
if let Some(Content::String(html)) = &part.content {
|
|
||||||
let inliner = css_inline::CSSInliner::options()
|
|
||||||
.load_remote_stylesheets(false)
|
|
||||||
.remove_style_tags(true)
|
|
||||||
.build();
|
|
||||||
let inlined = inliner.inline(html).expect("failed to inline CSS");
|
|
||||||
return div![Node::from_html(None, &inlined)];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if part.content_type == "text/plain" {
|
|
||||||
return view_text_plain(&part.content);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
div!["No known multipart/alternative parts"]
|
|
||||||
} else {
|
|
||||||
div![
|
|
||||||
C!["error"],
|
|
||||||
format!("multipart/alternative with non-multipart content"),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"multipart/mixed" => match &part.content {
|
|
||||||
Some(Content::Multipart(parts)) => div![parts.iter().map(view_part)],
|
|
||||||
_ => div![
|
|
||||||
C!["error"],
|
|
||||||
format!("Unhandled content enum for multipart/mixed"),
|
|
||||||
],
|
|
||||||
},
|
|
||||||
_ => div![
|
|
||||||
C!["error"],
|
|
||||||
format!("Unhandled content type: {}", part.content_type)
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn first_subject(thread: &ThreadNode) -> Option<String> {
|
|
||||||
if let Some(msg) = &thread.0 {
|
|
||||||
return Some(msg.headers.subject.clone());
|
|
||||||
} else {
|
|
||||||
for tn in &thread.1 {
|
|
||||||
if let Some(s) = first_subject(&tn) {
|
|
||||||
return Some(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_debug_thread_set(thread_set: &ThreadSet) -> Node<Msg> {
|
|
||||||
ul![thread_set
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, t)| { li!["t", i, ": ", view_debug_thread(t),] })]
|
|
||||||
}
|
|
||||||
fn view_debug_thread(thread: &Thread) -> Node<Msg> {
|
|
||||||
ul![thread
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, tn)| { li!["tn", i, ": ", view_debug_thread_node(tn),] })]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn view_debug_thread_node(thread_node: &ThreadNode) -> Node<Msg> {
|
|
||||||
ul![
|
|
||||||
IF!(thread_node.0.is_some()=>li!["tn id:", &thread_node.0.as_ref().unwrap().id]),
|
|
||||||
thread_node.1.iter().enumerate().map(|(i, tn)| li![
|
|
||||||
"tn",
|
|
||||||
i,
|
|
||||||
": ",
|
|
||||||
view_debug_thread_node(tn)
|
|
||||||
])
|
|
||||||
]
|
|
||||||
}
|
|
||||||
844
web/src/state.rs
Normal file
844
web/src/state.rs
Normal file
@@ -0,0 +1,844 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use graphql_client::GraphQLQuery;
|
||||||
|
use letterbox_shared::WebsocketMessage;
|
||||||
|
use log::{debug, error, info, warn};
|
||||||
|
use seed::{prelude::*, *};
|
||||||
|
use thiserror::Error;
|
||||||
|
use web_sys::HtmlElement;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::urls,
|
||||||
|
consts::SEARCH_RESULTS_PER_PAGE,
|
||||||
|
graphql,
|
||||||
|
graphql::{front_page_query::*, send_graphql, show_thread_query::*},
|
||||||
|
websocket,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Used to fake the unread string while in development
|
||||||
|
pub fn unread_query() -> &'static str {
|
||||||
|
let host = seed::window()
|
||||||
|
.location()
|
||||||
|
.host()
|
||||||
|
.expect("failed to get host");
|
||||||
|
if host.starts_with("6758.") {
|
||||||
|
return "tag:letterbox";
|
||||||
|
}
|
||||||
|
"is:unread"
|
||||||
|
}
|
||||||
|
|
||||||
|
// `init` describes what should happen when your app started.
|
||||||
|
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||||
|
let version = letterbox_shared::build_version(bi);
|
||||||
|
info!("Build Info: {}", version);
|
||||||
|
// Disable restoring to scroll position when navigating
|
||||||
|
window()
|
||||||
|
.history()
|
||||||
|
.expect("couldn't get history")
|
||||||
|
.set_scroll_restoration(web_sys::ScrollRestoration::Manual)
|
||||||
|
.expect("failed to set scroll restoration to manual");
|
||||||
|
if url.hash().is_none() {
|
||||||
|
orders.request_url(urls::search(unread_query(), 0));
|
||||||
|
} else {
|
||||||
|
orders.request_url(url.clone());
|
||||||
|
};
|
||||||
|
// TODO(wathiede): only do this while viewing the index? Or maybe add a new message that force
|
||||||
|
// 'notmuch new' on the server periodically?
|
||||||
|
//orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||||
|
orders.subscribe(Msg::OnUrlChanged);
|
||||||
|
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
|
||||||
|
|
||||||
|
build_info::build_info!(fn bi);
|
||||||
|
Model {
|
||||||
|
context: Context::None,
|
||||||
|
query: "".to_string(),
|
||||||
|
refreshing_state: RefreshingState::None,
|
||||||
|
tags: None,
|
||||||
|
read_completion_ratio: 0.,
|
||||||
|
content_el: ElRef::<HtmlElement>::default(),
|
||||||
|
versions: Version {
|
||||||
|
client: version,
|
||||||
|
server: None,
|
||||||
|
},
|
||||||
|
catchup: None,
|
||||||
|
last_url: Url::current(),
|
||||||
|
websocket: websocket::init("/api/ws", &mut orders.proxy(Msg::WebSocket)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_url_changed(old: &Url, mut new: Url) -> Msg {
|
||||||
|
let did_change = *old != new;
|
||||||
|
let mut messages = Vec::new();
|
||||||
|
if did_change {
|
||||||
|
messages.push(Msg::ScrollToTop)
|
||||||
|
}
|
||||||
|
info!(
|
||||||
|
"url changed\nold '{old}'\nnew '{new}', history {}",
|
||||||
|
history().length().unwrap_or(0)
|
||||||
|
);
|
||||||
|
let hpp = new.remaining_hash_path_parts();
|
||||||
|
let msg = match hpp.as_slice() {
|
||||||
|
["t", tid] => Msg::ShowThreadRequest {
|
||||||
|
thread_id: tid.to_string(),
|
||||||
|
},
|
||||||
|
["s", query] => {
|
||||||
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query,
|
||||||
|
after: None,
|
||||||
|
before: None,
|
||||||
|
first: None,
|
||||||
|
last: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
["s", query, page] => {
|
||||||
|
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||||
|
let page = page[1..].parse().unwrap_or(0);
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query,
|
||||||
|
after: Some(page.to_string()),
|
||||||
|
before: None,
|
||||||
|
first: None,
|
||||||
|
last: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p => {
|
||||||
|
if !p.is_empty() {
|
||||||
|
info!("Unhandled path '{p:?}'");
|
||||||
|
}
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query: "".to_string(),
|
||||||
|
after: None,
|
||||||
|
before: None,
|
||||||
|
first: None,
|
||||||
|
last: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
messages.push(msg);
|
||||||
|
Msg::MultiMsg(messages)
|
||||||
|
}
|
||||||
|
|
||||||
|
// `update` describes how to handle each `Msg`.
|
||||||
|
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||||
|
info!("update({})", msg);
|
||||||
|
match msg {
|
||||||
|
Msg::Noop => {}
|
||||||
|
Msg::RefreshStart => {
|
||||||
|
model.refreshing_state = RefreshingState::Loading;
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
Msg::RefreshDone(
|
||||||
|
send_graphql::<_, graphql::refresh_mutation::ResponseData>(
|
||||||
|
graphql::RefreshMutation::build_query(
|
||||||
|
graphql::refresh_mutation::Variables {},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.err(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::RefreshDone(err) => {
|
||||||
|
model.refreshing_state = if let Some(err) = err {
|
||||||
|
RefreshingState::Error(format!("{:?}", err))
|
||||||
|
} else {
|
||||||
|
RefreshingState::None
|
||||||
|
};
|
||||||
|
orders.perform_cmd(async move { Msg::Refresh });
|
||||||
|
}
|
||||||
|
Msg::Refresh => {
|
||||||
|
orders.request_url(Url::current());
|
||||||
|
}
|
||||||
|
Msg::Reload => {
|
||||||
|
window()
|
||||||
|
.location()
|
||||||
|
.reload()
|
||||||
|
.expect("failed to reload window");
|
||||||
|
}
|
||||||
|
Msg::OnUrlChanged(new_url) => {
|
||||||
|
orders.send_msg(on_url_changed(&model.last_url, new_url.0.clone()));
|
||||||
|
model.last_url = new_url.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::NextPage => {
|
||||||
|
match &model.context {
|
||||||
|
Context::SearchResult { query, pager, .. } => {
|
||||||
|
let query = query.to_string();
|
||||||
|
let after = pager.end_cursor.clone();
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query,
|
||||||
|
after,
|
||||||
|
before: None,
|
||||||
|
first: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
||||||
|
last: None,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Context::ThreadResult { .. } => (), // do nothing (yet?)
|
||||||
|
Context::None => (), // do nothing (yet?)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Msg::PreviousPage => {
|
||||||
|
match &model.context {
|
||||||
|
Context::SearchResult { query, pager, .. } => {
|
||||||
|
let query = query.to_string();
|
||||||
|
let before = pager.start_cursor.clone();
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query,
|
||||||
|
after: None,
|
||||||
|
before,
|
||||||
|
first: None,
|
||||||
|
last: Some(SEARCH_RESULTS_PER_PAGE as i64),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Context::ThreadResult { .. } => (), // do nothing (yet?)
|
||||||
|
Context::None => (), // do nothing (yet?)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Msg::GoToSearchResults => {
|
||||||
|
orders.send_msg(Msg::SearchQuery(model.query.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::UpdateQuery(query) => model.query = query,
|
||||||
|
Msg::SearchQuery(query) => {
|
||||||
|
orders.request_url(urls::search(&query, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::SetUnread(query, unread) => {
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
let res: Result<
|
||||||
|
graphql_client::Response<graphql::mark_read_mutation::ResponseData>,
|
||||||
|
gloo_net::Error,
|
||||||
|
> = send_graphql(graphql::MarkReadMutation::build_query(
|
||||||
|
graphql::mark_read_mutation::Variables {
|
||||||
|
query: query.clone(),
|
||||||
|
unread,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
if let Err(e) = res {
|
||||||
|
error!("Failed to set read for {query} to {unread}: {e}");
|
||||||
|
}
|
||||||
|
Msg::Refresh
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::AddTag(query, tag) => {
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
let res: Result<
|
||||||
|
graphql_client::Response<graphql::add_tag_mutation::ResponseData>,
|
||||||
|
gloo_net::Error,
|
||||||
|
> = send_graphql(graphql::AddTagMutation::build_query(
|
||||||
|
graphql::add_tag_mutation::Variables {
|
||||||
|
query: query.clone(),
|
||||||
|
tag: tag.clone(),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
if let Err(e) = res {
|
||||||
|
error!("Failed to add tag {tag} to {query}: {e}");
|
||||||
|
}
|
||||||
|
Msg::GoToSearchResults
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::RemoveTag(query, tag) => {
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
let res: Result<
|
||||||
|
graphql_client::Response<graphql::remove_tag_mutation::ResponseData>,
|
||||||
|
gloo_net::Error,
|
||||||
|
> = send_graphql(graphql::RemoveTagMutation::build_query(
|
||||||
|
graphql::remove_tag_mutation::Variables {
|
||||||
|
query: query.clone(),
|
||||||
|
tag: tag.clone(),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
if let Err(e) = res {
|
||||||
|
error!("Failed to remove tag {tag} to {query}: {e}");
|
||||||
|
}
|
||||||
|
// TODO: reconsider this behavior
|
||||||
|
Msg::GoToSearchResults
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::FrontPageRequest {
|
||||||
|
query,
|
||||||
|
after,
|
||||||
|
before,
|
||||||
|
first,
|
||||||
|
last,
|
||||||
|
} => {
|
||||||
|
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
|
||||||
|
{
|
||||||
|
// If no pagination set, set reasonable defaults
|
||||||
|
(None, None, None, None) => {
|
||||||
|
(None, None, Some(SEARCH_RESULTS_PER_PAGE as i64), None)
|
||||||
|
}
|
||||||
|
_ => (after, before, first, last),
|
||||||
|
};
|
||||||
|
model.query = query.clone();
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
Msg::FrontPageResult(
|
||||||
|
send_graphql(graphql::FrontPageQuery::build_query(
|
||||||
|
graphql::front_page_query::Variables {
|
||||||
|
query,
|
||||||
|
after,
|
||||||
|
before,
|
||||||
|
first,
|
||||||
|
last,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::FrontPageResult(Err(e)) => {
|
||||||
|
error!("error FrontPageResult: {e:?}");
|
||||||
|
}
|
||||||
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||||
|
data: None,
|
||||||
|
errors: None,
|
||||||
|
..
|
||||||
|
})) => {
|
||||||
|
error!("FrontPageResult no data or errors, should not happen");
|
||||||
|
}
|
||||||
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||||
|
data: None,
|
||||||
|
errors: Some(e),
|
||||||
|
..
|
||||||
|
})) => {
|
||||||
|
error!("FrontPageResult error: {e:?}");
|
||||||
|
}
|
||||||
|
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||||
|
data: Some(data), ..
|
||||||
|
})) => {
|
||||||
|
model.tags = Some(
|
||||||
|
data.tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| Tag {
|
||||||
|
name: t.name,
|
||||||
|
bg_color: t.bg_color,
|
||||||
|
unread: t.unread,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
let selected_threads = 'context: {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
results,
|
||||||
|
selected_threads,
|
||||||
|
..
|
||||||
|
} = &model.context
|
||||||
|
{
|
||||||
|
let old: HashSet<_> = results.iter().map(|n| &n.thread).collect();
|
||||||
|
let new: HashSet<_> = data.search.nodes.iter().map(|n| &n.thread).collect();
|
||||||
|
|
||||||
|
if old == new {
|
||||||
|
break 'context selected_threads.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HashSet::new()
|
||||||
|
};
|
||||||
|
model.context = Context::SearchResult {
|
||||||
|
query: model.query.clone(),
|
||||||
|
results: data.search.nodes,
|
||||||
|
count: data.count as usize,
|
||||||
|
pager: data.search.page_info,
|
||||||
|
selected_threads,
|
||||||
|
};
|
||||||
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
||||||
|
// Generate signal so progress bar is reset
|
||||||
|
orders.send_msg(Msg::WindowScrolled);
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::ShowThreadRequest { thread_id } => {
|
||||||
|
orders.skip().perform_cmd(async move {
|
||||||
|
Msg::ShowThreadResult(
|
||||||
|
send_graphql(graphql::ShowThreadQuery::build_query(
|
||||||
|
graphql::show_thread_query::Variables { thread_id },
|
||||||
|
))
|
||||||
|
.await,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::ShowThreadResult(Ok(graphql_client::Response {
|
||||||
|
data: Some(data), ..
|
||||||
|
})) => {
|
||||||
|
model.tags = Some(
|
||||||
|
data.tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| Tag {
|
||||||
|
name: t.name,
|
||||||
|
bg_color: t.bg_color,
|
||||||
|
unread: t.unread,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
match &data.thread {
|
||||||
|
graphql::show_thread_query::ShowThreadQueryThread::EmailThread(
|
||||||
|
ShowThreadQueryThreadOnEmailThread { messages, .. },
|
||||||
|
) => {
|
||||||
|
let mut open_messages: HashSet<_> = messages
|
||||||
|
.iter()
|
||||||
|
.filter(|msg| msg.tags.iter().any(|t| t == "unread"))
|
||||||
|
.map(|msg| msg.id.clone())
|
||||||
|
.collect();
|
||||||
|
if open_messages.is_empty() {
|
||||||
|
open_messages = messages.iter().map(|msg| msg.id.clone()).collect();
|
||||||
|
}
|
||||||
|
model.context = Context::ThreadResult {
|
||||||
|
thread: data.thread,
|
||||||
|
open_messages,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
graphql::show_thread_query::ShowThreadQueryThread::NewsPost(..) => {
|
||||||
|
model.context = Context::ThreadResult {
|
||||||
|
thread: data.thread,
|
||||||
|
open_messages: HashSet::new(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
orders.send_msg(Msg::UpdateServerVersion(data.version));
|
||||||
|
// Generate signal so progress bar is reset
|
||||||
|
orders.send_msg(Msg::WindowScrolled);
|
||||||
|
}
|
||||||
|
Msg::ShowThreadResult(bad) => {
|
||||||
|
error!("show_thread_query error: {bad:#?}");
|
||||||
|
}
|
||||||
|
Msg::CatchupRequest { query } => {
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
Msg::CatchupResult(
|
||||||
|
send_graphql::<_, graphql::catchup_query::ResponseData>(
|
||||||
|
graphql::CatchupQuery::build_query(graphql::catchup_query::Variables {
|
||||||
|
query,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::CatchupResult(Ok(graphql_client::Response {
|
||||||
|
data: Some(data), ..
|
||||||
|
})) => {
|
||||||
|
let items = data.catchup;
|
||||||
|
if items.is_empty() {
|
||||||
|
orders.send_msg(Msg::GoToSearchResults);
|
||||||
|
model.catchup = None;
|
||||||
|
} else {
|
||||||
|
orders.request_url(urls::thread(&items[0]));
|
||||||
|
model.catchup = Some(Catchup {
|
||||||
|
items: items
|
||||||
|
.into_iter()
|
||||||
|
.map(|id| CatchupItem { id, seen: false })
|
||||||
|
.collect(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::CatchupResult(bad) => {
|
||||||
|
error!("catchup_query error: {bad:#?}");
|
||||||
|
}
|
||||||
|
Msg::SelectionSetNone => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
*selected_threads = HashSet::new();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionSetAll => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
results,
|
||||||
|
selected_threads,
|
||||||
|
..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
*selected_threads = results.iter().map(|node| node.thread.clone()).collect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionAddTag(tag) => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
let threads = selected_threads
|
||||||
|
.iter()
|
||||||
|
.map(|tid| tid.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ");
|
||||||
|
orders
|
||||||
|
.skip()
|
||||||
|
.perform_cmd(async move { Msg::AddTag(threads, tag) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionRemoveTag(tag) => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
let threads = selected_threads
|
||||||
|
.iter()
|
||||||
|
.map(|tid| tid.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ");
|
||||||
|
orders
|
||||||
|
.skip()
|
||||||
|
.perform_cmd(async move { Msg::RemoveTag(threads, tag) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionMarkAsRead => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
let threads = selected_threads
|
||||||
|
.iter()
|
||||||
|
.map(|tid| tid.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ");
|
||||||
|
orders
|
||||||
|
.skip()
|
||||||
|
.perform_cmd(async move { Msg::SetUnread(threads, false) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionMarkAsUnread => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
let threads = selected_threads
|
||||||
|
.iter()
|
||||||
|
.map(|tid| tid.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ");
|
||||||
|
orders
|
||||||
|
.skip()
|
||||||
|
.perform_cmd(async move { Msg::SetUnread(threads, true) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionAddThread(tid) => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
selected_threads.insert(tid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SelectionRemoveThread(tid) => {
|
||||||
|
if let Context::SearchResult {
|
||||||
|
selected_threads, ..
|
||||||
|
} = &mut model.context
|
||||||
|
{
|
||||||
|
selected_threads.remove(&tid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::MessageCollapse(id) => {
|
||||||
|
if let Context::ThreadResult { open_messages, .. } = &mut model.context {
|
||||||
|
open_messages.remove(&id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::MessageExpand(id) => {
|
||||||
|
if let Context::ThreadResult { open_messages, .. } = &mut model.context {
|
||||||
|
open_messages.insert(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::MultiMsg(msgs) => msgs.into_iter().for_each(|msg| update(msg, model, orders)),
|
||||||
|
Msg::CopyToClipboard(text) => {
|
||||||
|
let clipboard = seed::window().navigator().clipboard();
|
||||||
|
orders.perform_cmd(async move {
|
||||||
|
wasm_bindgen_futures::JsFuture::from(clipboard.write_text(&text))
|
||||||
|
.await
|
||||||
|
.expect("failed to copy to clipboard");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Msg::ScrollToTop => {
|
||||||
|
info!("scrolling to the top");
|
||||||
|
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||||
|
}
|
||||||
|
Msg::WindowScrolled => {
|
||||||
|
// TODO: model.content_el doesn't go to None like it should when a DOM is recreated and the refrenced element goes away
|
||||||
|
if let Some(el) = model.content_el.get() {
|
||||||
|
let ih = window()
|
||||||
|
.inner_height()
|
||||||
|
.expect("window height")
|
||||||
|
.unchecked_into::<js_sys::Number>()
|
||||||
|
.value_of();
|
||||||
|
|
||||||
|
let r = el.get_bounding_client_rect();
|
||||||
|
if r.height() < ih {
|
||||||
|
// The whole content fits in the window, no scrollbar
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let end: f64 = r.height() - ih;
|
||||||
|
if end < 0. {
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Flip Y, normally it's 0-point when the top of the content hits the top of the
|
||||||
|
// screen and goes negative from there.
|
||||||
|
let y = -r.y();
|
||||||
|
let ratio: f64 = (y / end).max(0.);
|
||||||
|
debug!(
|
||||||
|
"WindowScrolled ih {ih} end {end} ratio {ratio:.02} {}x{} @ {},{}",
|
||||||
|
r.width(),
|
||||||
|
r.height(),
|
||||||
|
r.x(),
|
||||||
|
r.y()
|
||||||
|
);
|
||||||
|
|
||||||
|
orders.send_msg(Msg::SetProgress(ratio));
|
||||||
|
} else {
|
||||||
|
orders.send_msg(Msg::SetProgress(0.));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::SetProgress(ratio) => {
|
||||||
|
model.read_completion_ratio = ratio;
|
||||||
|
}
|
||||||
|
Msg::UpdateServerVersion(version) => {
|
||||||
|
// Only git versions contain dash, don't autoreload there
|
||||||
|
if !version.contains('-') && version != model.versions.client {
|
||||||
|
warn!(
|
||||||
|
"Server ({}) and client ({}) version mismatch, reloading",
|
||||||
|
version, model.versions.client
|
||||||
|
);
|
||||||
|
orders.send_msg(Msg::Reload);
|
||||||
|
}
|
||||||
|
model.versions.server = Some(version);
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::CatchupStart => {
|
||||||
|
let query = if model.query.contains("is:unread") {
|
||||||
|
model.query.to_string()
|
||||||
|
} else {
|
||||||
|
format!("{} is:unread", model.query)
|
||||||
|
};
|
||||||
|
info!("starting catchup mode w/ {}", query);
|
||||||
|
orders.send_msg(Msg::ScrollToTop);
|
||||||
|
orders.send_msg(Msg::CatchupRequest { query });
|
||||||
|
}
|
||||||
|
Msg::CatchupKeepUnread => {
|
||||||
|
orders.send_msg(Msg::CatchupNext);
|
||||||
|
}
|
||||||
|
Msg::CatchupMarkAsRead => {
|
||||||
|
if let Some(thread_id) = current_thread_id(&model.context) {
|
||||||
|
orders.send_msg(Msg::SetUnread(thread_id, false));
|
||||||
|
};
|
||||||
|
orders.send_msg(Msg::CatchupNext);
|
||||||
|
}
|
||||||
|
Msg::CatchupNext => {
|
||||||
|
orders.send_msg(Msg::ScrollToTop);
|
||||||
|
let Some(catchup) = &mut model.catchup else {
|
||||||
|
orders.send_msg(Msg::GoToSearchResults);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let Some(thread_id) = current_thread_id(&model.context) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let Some(idx) = catchup
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.inspect(|i| info!("i {i:?} thread_id {thread_id}"))
|
||||||
|
.position(|i| i.id == thread_id)
|
||||||
|
else {
|
||||||
|
// All items have been seen
|
||||||
|
orders.send_msg(Msg::CatchupExit);
|
||||||
|
orders.send_msg(Msg::GoToSearchResults);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
catchup.items[idx].seen = true;
|
||||||
|
if idx < catchup.items.len() - 1 {
|
||||||
|
// Reached last item
|
||||||
|
orders.request_url(urls::thread(&catchup.items[idx + 1].id));
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
orders.send_msg(Msg::CatchupExit);
|
||||||
|
orders.send_msg(Msg::GoToSearchResults);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Msg::CatchupExit => {
|
||||||
|
orders.send_msg(Msg::ScrollToTop);
|
||||||
|
model.catchup = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Msg::WebSocket(ws) => {
|
||||||
|
websocket::update(ws, &mut model.websocket, &mut orders.proxy(Msg::WebSocket));
|
||||||
|
while let Some(msg) = model.websocket.updates.pop_front() {
|
||||||
|
orders.send_msg(Msg::WebsocketMessage(msg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Msg::WebsocketMessage(msg) => {
|
||||||
|
match msg {
|
||||||
|
WebsocketMessage::RefreshMessages => orders.send_msg(Msg::Refresh),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_thread_id(context: &Context) -> Option<String> {
|
||||||
|
match context {
|
||||||
|
Context::ThreadResult {
|
||||||
|
thread:
|
||||||
|
ShowThreadQueryThread::EmailThread(ShowThreadQueryThreadOnEmailThread {
|
||||||
|
thread_id, ..
|
||||||
|
}),
|
||||||
|
..
|
||||||
|
} => Some(thread_id.clone()),
|
||||||
|
Context::ThreadResult {
|
||||||
|
thread:
|
||||||
|
ShowThreadQueryThread::NewsPost(ShowThreadQueryThreadOnNewsPost { thread_id, .. }),
|
||||||
|
..
|
||||||
|
} => Some(thread_id.clone()),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// `Model` describes our app state.
|
||||||
|
pub struct Model {
|
||||||
|
pub query: String,
|
||||||
|
pub context: Context,
|
||||||
|
pub refreshing_state: RefreshingState,
|
||||||
|
pub tags: Option<Vec<Tag>>,
|
||||||
|
pub read_completion_ratio: f64,
|
||||||
|
pub content_el: ElRef<HtmlElement>,
|
||||||
|
pub versions: Version,
|
||||||
|
pub catchup: Option<Catchup>,
|
||||||
|
pub last_url: Url,
|
||||||
|
pub websocket: websocket::Model,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Version {
|
||||||
|
pub client: String,
|
||||||
|
pub server: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[allow(dead_code)] // Remove once the UI is showing errors
|
||||||
|
pub enum UIError {
|
||||||
|
#[error("No error, this should never be presented to user")]
|
||||||
|
NoError,
|
||||||
|
#[error("failed to fetch {0}: {1:?}")]
|
||||||
|
FetchError(&'static str, gloo_net::Error),
|
||||||
|
#[error("{0} error decoding: {1:?}")]
|
||||||
|
FetchDecodeError(&'static str, Vec<graphql_client::Error>),
|
||||||
|
#[error("no data or errors for {0}")]
|
||||||
|
NoData(&'static str),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum Context {
|
||||||
|
None,
|
||||||
|
SearchResult {
|
||||||
|
query: String,
|
||||||
|
results: Vec<FrontPageQuerySearchNodes>,
|
||||||
|
count: usize,
|
||||||
|
pager: FrontPageQuerySearchPageInfo,
|
||||||
|
selected_threads: HashSet<String>,
|
||||||
|
},
|
||||||
|
ThreadResult {
|
||||||
|
thread: ShowThreadQueryThread,
|
||||||
|
open_messages: HashSet<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Catchup {
|
||||||
|
pub items: Vec<CatchupItem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CatchupItem {
|
||||||
|
pub id: String,
|
||||||
|
pub seen: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Tag {
|
||||||
|
pub name: String,
|
||||||
|
pub bg_color: String,
|
||||||
|
pub unread: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum RefreshingState {
|
||||||
|
None,
|
||||||
|
Loading,
|
||||||
|
Error(String),
|
||||||
|
}
|
||||||
|
// `Msg` describes the different events you can modify state with.
|
||||||
|
#[derive(strum_macros::Display)]
|
||||||
|
pub enum Msg {
|
||||||
|
Noop,
|
||||||
|
// Tell the client to refresh its state
|
||||||
|
Refresh,
|
||||||
|
// Tell the client to reload whole page from server
|
||||||
|
Reload,
|
||||||
|
// TODO: add GoToUrl
|
||||||
|
OnUrlChanged(subs::UrlChanged),
|
||||||
|
// Tell the server to update state
|
||||||
|
RefreshStart,
|
||||||
|
RefreshDone(Option<gloo_net::Error>),
|
||||||
|
NextPage,
|
||||||
|
PreviousPage,
|
||||||
|
GoToSearchResults,
|
||||||
|
|
||||||
|
UpdateQuery(String),
|
||||||
|
SearchQuery(String),
|
||||||
|
|
||||||
|
SetUnread(String, bool),
|
||||||
|
AddTag(String, String),
|
||||||
|
RemoveTag(String, String),
|
||||||
|
|
||||||
|
FrontPageRequest {
|
||||||
|
query: String,
|
||||||
|
after: Option<String>,
|
||||||
|
before: Option<String>,
|
||||||
|
first: Option<i64>,
|
||||||
|
last: Option<i64>,
|
||||||
|
},
|
||||||
|
FrontPageResult(
|
||||||
|
Result<graphql_client::Response<graphql::front_page_query::ResponseData>, gloo_net::Error>,
|
||||||
|
),
|
||||||
|
ShowThreadRequest {
|
||||||
|
thread_id: String,
|
||||||
|
},
|
||||||
|
ShowThreadResult(
|
||||||
|
Result<graphql_client::Response<graphql::show_thread_query::ResponseData>, gloo_net::Error>,
|
||||||
|
),
|
||||||
|
CatchupRequest {
|
||||||
|
query: String,
|
||||||
|
},
|
||||||
|
CatchupResult(
|
||||||
|
Result<graphql_client::Response<graphql::catchup_query::ResponseData>, gloo_net::Error>,
|
||||||
|
),
|
||||||
|
|
||||||
|
SelectionSetNone,
|
||||||
|
SelectionSetAll,
|
||||||
|
SelectionAddTag(String),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
SelectionRemoveTag(String),
|
||||||
|
SelectionMarkAsRead,
|
||||||
|
SelectionMarkAsUnread,
|
||||||
|
SelectionAddThread(String),
|
||||||
|
SelectionRemoveThread(String),
|
||||||
|
|
||||||
|
MessageCollapse(String),
|
||||||
|
MessageExpand(String),
|
||||||
|
MultiMsg(Vec<Msg>),
|
||||||
|
|
||||||
|
CopyToClipboard(String),
|
||||||
|
|
||||||
|
ScrollToTop,
|
||||||
|
WindowScrolled,
|
||||||
|
SetProgress(f64),
|
||||||
|
UpdateServerVersion(String),
|
||||||
|
|
||||||
|
CatchupStart,
|
||||||
|
CatchupKeepUnread,
|
||||||
|
CatchupMarkAsRead,
|
||||||
|
CatchupNext,
|
||||||
|
CatchupExit,
|
||||||
|
|
||||||
|
WebSocket(websocket::Msg),
|
||||||
|
WebsocketMessage(WebsocketMessage),
|
||||||
|
}
|
||||||
3
web/src/tailwind.css
Normal file
3
web/src/tailwind.css
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
||||||
1684
web/src/view/mod.rs
Normal file
1684
web/src/view/mod.rs
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user