Compare commits
118 Commits
letterbox-
...
letterbox-
| Author | SHA1 | Date | |
|---|---|---|---|
| b8ef753f85 | |||
| 33edd22f8f | |||
| 75e9232095 | |||
| 6daddf11de | |||
| 36d9eda303 | |||
| 4eb2d4c689 | |||
| edc7119fbf | |||
| aa1736a285 | |||
| 6f93aa4f34 | |||
| 0662e6230e | |||
| 30f3f14040 | |||
| f2042f284e | |||
| b2c73ffa15 | |||
| d7217d1b3c | |||
| 638d55a36c | |||
| b11f6b5149 | |||
| d0b5ecf4f2 | |||
| 7a67c30a2c | |||
| 5ea4694eb8 | |||
| e01dabe6ed | |||
| ecaf0dd0fc | |||
| 3d4dcc9e6b | |||
| 28a5d9f219 | |||
| 81876d37ea | |||
| 4a6b159ddb | |||
| d84957cc8c | |||
| d53db5b49a | |||
| 0448368011 | |||
| 36754136fd | |||
| 489acccf77 | |||
| 8ef4db63ad | |||
| 9f63205ff3 | |||
| 5a0378948d | |||
| 2b4c45be74 | |||
| 147896dc80 | |||
| 1ff6ec7653 | |||
| acd590111e | |||
| b5f24ba1f2 | |||
| 79ed24135f | |||
| a4949a25b5 | |||
| f16edef124 | |||
| 2fd6479cb9 | |||
| 85a6b3a9a4 | |||
| 9ac5216d6e | |||
| 82987dbd20 | |||
| 29de7c0727 | |||
| 5f6580fa2f | |||
| 5d4732d75d | |||
| a13bac813a | |||
| 85dcc9f7bd | |||
| b696629ad9 | |||
| b9e3128718 | |||
| 88fac4c2bc | |||
| 1fad5ec536 | |||
| 8e7214d531 | |||
| 333c4a3ebb | |||
| b9ba5a3bea | |||
| 2a0989e74d | |||
| e9319dc491 | |||
| 57481a77cd | |||
| 44915cce54 | |||
| 1225483b57 | |||
| daeb8c88a1 | |||
| 8a6b3ff501 | |||
| a6fffeafdc | |||
| d791b4ce49 | |||
| 8a0e4eb441 | |||
| fc84562419 | |||
| 37ebe1ebb3 | |||
| 2d06f070ea | |||
| 527a62069a | |||
| 40afafe1a8 | |||
| e3acf9ae6d | |||
| a68d067a68 | |||
| 5547c65af0 | |||
| b622bb7d7d | |||
| 43efdf18a0 | |||
| c71ab8e9e8 | |||
| 408d6ed8ba | |||
| 1411961e36 | |||
| dfd7ef466c | |||
| 2aa3dfbd0f | |||
| fba10e27cf | |||
| 5417c74f9c | |||
| eb0b0dbe81 | |||
| 561f522658 | |||
| 32d2ffeb3d | |||
| d41946e0a5 | |||
| 61402858f4 | |||
| 17de318645 | |||
| 3aa0144e8d | |||
| f9eafff4c7 | |||
| 4c6d67901d | |||
| e9aa97a089 | |||
| a82b047f75 | |||
| 9a8b44a8df | |||
| a96693004c | |||
| ed9fe11fbf | |||
| 09fb14a796 | |||
| 58a7936bba | |||
| cd0ee361f5 | |||
| 77bd5abe0d | |||
| 450c5496b3 | |||
| 4411e45a3c | |||
| e7d20896d5 | |||
| 32a1115abd | |||
| 4982057500 | |||
| 8977f8bab5 | |||
| 0962a6b3cf | |||
| 3c72929a4f | |||
| e4eb495a70 | |||
| 00e8b0342e | |||
| b1f9867c06 | |||
| 77943b3570 | |||
| 45e4edb1dd | |||
| 9bf53afebf | |||
| e1a502ac4b | |||
| 9346c46e62 |
@@ -1,4 +1,4 @@
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
name: Continuous integration
|
||||
|
||||
|
||||
2050
Cargo.lock
generated
2050
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ authors = ["Bill Thiede <git@xinu.tv>"]
|
||||
edition = "2021"
|
||||
license = "UNLICENSED"
|
||||
publish = ["xinu"]
|
||||
version = "0.9.0"
|
||||
version = "0.15.0"
|
||||
repository = "https://git.z.xinu.tv/wathiede/letterbox"
|
||||
|
||||
[profile.dev]
|
||||
|
||||
3
Justfile
3
Justfile
@@ -1,3 +1,6 @@
|
||||
export CARGO_INCREMENTAL := "0"
|
||||
export RUSTFLAGS := "-D warnings"
|
||||
|
||||
default:
|
||||
@echo "Run: just patch|minor|major"
|
||||
|
||||
|
||||
@@ -470,7 +470,7 @@ pub enum NotmuchError {
|
||||
MailParseError(#[from] mailparse::MailParseError),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Notmuch {
|
||||
config_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\nSELECT id\nFROM feed\nWHERE slug = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "dabd12987369cb273c0191d46645c376439d246d5a697340574c6afdac93d2cc"
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\nINSERT INTO feed ( name, slug, url, homepage, selector )\nVALUES ( $1, $2, $3, '', '' )\nRETURNING id\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e2a448aaf4fe92fc1deda10bf844f6b9225d35758cba7c9f337c1a730aee41bd"
|
||||
}
|
||||
@@ -15,44 +15,44 @@ version.workspace = true
|
||||
ammonia = "4.0.0"
|
||||
anyhow = "1.0.79"
|
||||
async-graphql = { version = "7", features = ["log"] }
|
||||
async-graphql-rocket = "7"
|
||||
async-graphql-axum = "7.0.15"
|
||||
async-trait = "0.1.81"
|
||||
build-info = "0.0.39"
|
||||
cacher = { version = "0.1.0", registry = "xinu" }
|
||||
axum = { version = "0.8.3", features = ["ws"] }
|
||||
axum-macros = "0.5.0"
|
||||
build-info = "0.0.40"
|
||||
cacher = { version = "0.2.0", registry = "xinu" }
|
||||
chrono = "0.4.39"
|
||||
clap = { version = "4.5.23", features = ["derive"] }
|
||||
css-inline = "0.14.0"
|
||||
futures = "0.3.31"
|
||||
headers = "0.4.0"
|
||||
html-escape = "0.2.13"
|
||||
letterbox-notmuch = { version = "0.15.0", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.15.0", path = "../shared", registry = "xinu" }
|
||||
linkify = "0.10.0"
|
||||
log = "0.4.17"
|
||||
lol_html = "2.0.0"
|
||||
mailparse = "0.16.0"
|
||||
maplit = "1.0.2"
|
||||
memmap = "0.7.0"
|
||||
opentelemetry = "0.28.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.7", features = ["blocking"] }
|
||||
rocket = { version = "0.5.0-rc.2", features = ["json"] }
|
||||
rocket_cors = "0.6.0"
|
||||
scraper = "0.23.0"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = "1.0.87"
|
||||
sqlx = { version = "0.8.2", features = ["postgres", "runtime-tokio", "time"] }
|
||||
tantivy = { version = "0.22.0", optional = true }
|
||||
tantivy = { version = "0.24.0", optional = true }
|
||||
thiserror = "2.0.0"
|
||||
tokio = "1.26.0"
|
||||
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||
tracing = "0.1.41"
|
||||
url = "2.5.2"
|
||||
urlencoding = "2.1.3"
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
#xtracing = { git = "http://git-private.h.xinu.tv/wathiede/xtracing.git" }
|
||||
#xtracing = { path = "../../xtracing" }
|
||||
xtracing = { version = "0.3.0", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.9.0", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.9.0", path = "../shared", registry = "xinu" }
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.39"
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
[features]
|
||||
#default = [ "tantivy" ]
|
||||
|
||||
@@ -5,7 +5,6 @@ newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||
newsreader_tantivy_db_path = "../target/database/newsreader"
|
||||
|
||||
[debug]
|
||||
address = "0.0.0.0"
|
||||
port = 9345
|
||||
# Uncomment to make it production like.
|
||||
#log_level = "critical"
|
||||
|
||||
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
20
server/migrations/20250330175930_update-nzfinder-link.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
-- Bad examples:
|
||||
-- https://nzbfinder.ws/getnzb/d2c3e5a08abadd985dccc6a574122892030b6a9a.nzb&i=95972&r=b55082d289937c050dedc203c9653850
|
||||
-- https://nzbfinder.ws/getnzb?id=45add174-7da4-4445-bf2b-a67dbbfc07fe.nzb&r=b55082d289937c050dedc203c9653850
|
||||
-- https://nzbfinder.ws/api/v1/getnzb?id=82486020-c192-4fa0-a7e7-798d7d72e973.nzb&r=b55082d289937c050dedc203c9653850
|
||||
UPDATE nzb_posts
|
||||
SET link =
|
||||
regexp_replace(
|
||||
regexp_replace(
|
||||
regexp_replace(
|
||||
link,
|
||||
'https://nzbfinder.ws/getnzb/',
|
||||
'https://nzbfinder.ws/api/v1/getnzb?id='
|
||||
),
|
||||
'https://nzbfinder.ws/getnzb',
|
||||
'https://nzbfinder.ws/api/v1/getnzb'
|
||||
),
|
||||
'&r=',
|
||||
'&apikey='
|
||||
)
|
||||
;
|
||||
@@ -1,22 +0,0 @@
|
||||
use clap::Parser;
|
||||
use letterbox_server::mail::read_mail_to_db;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
/// Add certain emails as posts in newsfeed app.
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// DB URL, something like postgres://newsreader@nixos-07.h.xinu.tv/newsreader
|
||||
#[arg(short, long)]
|
||||
db_url: String,
|
||||
/// path to parse
|
||||
path: String,
|
||||
}
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||
let args = Args::parse();
|
||||
let pool = PgPool::connect(&args.db_url).await?;
|
||||
read_mail_to_db(&pool, &args.path).await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,114 +1,100 @@
|
||||
// Rocket generates a lot of warnings for handlers
|
||||
// TODO: figure out why
|
||||
#![allow(unreachable_patterns)]
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
use std::{error::Error, io::Cursor, str::FromStr};
|
||||
use std::{error::Error, net::SocketAddr, sync::Arc, time::Duration};
|
||||
|
||||
use async_graphql::{extensions, http::GraphiQLSource, EmptySubscription, Schema};
|
||||
use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
|
||||
use async_graphql::{extensions, http::GraphiQLSource, Schema};
|
||||
use async_graphql_axum::{GraphQL, GraphQLSubscription};
|
||||
//allows to extract the IP of connecting user
|
||||
use axum::extract::connect_info::ConnectInfo;
|
||||
use axum::{
|
||||
extract::{self, ws::WebSocketUpgrade, State},
|
||||
http::{header, StatusCode},
|
||||
response::{self, IntoResponse, Response},
|
||||
routing::{any, get},
|
||||
Router,
|
||||
};
|
||||
use cacher::FilesystemCacher;
|
||||
use letterbox_notmuch::{Notmuch, NotmuchError, ThreadSet};
|
||||
use letterbox_notmuch::Notmuch;
|
||||
#[cfg(feature = "tantivy")]
|
||||
use letterbox_server::tantivy::TantivyConnection;
|
||||
use letterbox_server::{
|
||||
config::Config,
|
||||
error::ServerError,
|
||||
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||
graphql::{compute_catchup_ids, Attachment, MutationRoot, QueryRoot, SubscriptionRoot},
|
||||
nm::{attachment_bytes, cid_attachment_bytes},
|
||||
ws::ConnectionTracker,
|
||||
};
|
||||
use rocket::{
|
||||
fairing::AdHoc,
|
||||
http::{ContentType, Header},
|
||||
request::Request,
|
||||
response::{content, Debug, Responder},
|
||||
serde::json::Json,
|
||||
Response, State,
|
||||
};
|
||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tokio::{net::TcpListener, sync::Mutex};
|
||||
use tower_http::trace::{DefaultMakeSpan, TraceLayer};
|
||||
use tracing::info;
|
||||
|
||||
#[get("/show/<query>/pretty")]
|
||||
async fn show_pretty(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
) -> Result<Json<ThreadSet>, Debug<ServerError>> {
|
||||
let query = urlencoding::decode(query).map_err(|e| ServerError::from(NotmuchError::from(e)))?;
|
||||
let res = nm.show(&query).map_err(ServerError::from)?;
|
||||
Ok(Json(res))
|
||||
// Make our own error that wraps `anyhow::Error`.
|
||||
struct AppError(letterbox_server::ServerError);
|
||||
|
||||
// Tell axum how to convert `AppError` into a response.
|
||||
impl IntoResponse for AppError {
|
||||
fn into_response(self) -> Response {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Something went wrong: {}", self.0),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/show/<query>")]
|
||||
async fn show(nm: &State<Notmuch>, query: &str) -> Result<Json<ThreadSet>, Debug<NotmuchError>> {
|
||||
let query = urlencoding::decode(query).map_err(NotmuchError::from)?;
|
||||
let res = nm.show(&query)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
struct InlineAttachmentResponder(Attachment);
|
||||
|
||||
impl<'r, 'o: 'r> Responder<'r, 'o> for InlineAttachmentResponder {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||
let mut resp = Response::build();
|
||||
if let Some(filename) = self.0.filename {
|
||||
resp.header(Header::new(
|
||||
"Content-Disposition",
|
||||
format!(r#"inline; filename="{}""#, filename),
|
||||
));
|
||||
}
|
||||
if let Some(content_type) = self.0.content_type {
|
||||
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||
resp.header(ct);
|
||||
}
|
||||
}
|
||||
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||
.ok()
|
||||
// This enables using `?` on functions that return `Result<_, letterbox_server::Error>` to turn them into
|
||||
// `Result<_, AppError>`. That way you don't need to do that manually.
|
||||
impl<E> From<E> for AppError
|
||||
where
|
||||
E: Into<letterbox_server::ServerError>,
|
||||
{
|
||||
fn from(err: E) -> Self {
|
||||
Self(err.into())
|
||||
}
|
||||
}
|
||||
|
||||
struct DownloadAttachmentResponder(Attachment);
|
||||
|
||||
impl<'r, 'o: 'r> Responder<'r, 'o> for DownloadAttachmentResponder {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||
let mut resp = Response::build();
|
||||
if let Some(filename) = self.0.filename {
|
||||
resp.header(Header::new(
|
||||
"Content-Disposition",
|
||||
format!(r#"attachment; filename="{}""#, filename),
|
||||
));
|
||||
}
|
||||
if let Some(content_type) = self.0.content_type {
|
||||
if let Some(ct) = ContentType::parse_flexible(&content_type) {
|
||||
resp.header(ct);
|
||||
}
|
||||
}
|
||||
resp.sized_body(self.0.bytes.len(), Cursor::new(self.0.bytes))
|
||||
.ok()
|
||||
fn inline_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||
info!("attachment filename {:?}", attachment.filename);
|
||||
let mut hdr_map = headers::HeaderMap::new();
|
||||
if let Some(filename) = attachment.filename {
|
||||
hdr_map.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!(r#"inline; filename="{}""#, filename)
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
if let Some(ct) = attachment.content_type {
|
||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||
}
|
||||
info!("hdr_map {hdr_map:?}");
|
||||
(hdr_map, attachment.bytes).into_response()
|
||||
}
|
||||
|
||||
#[get("/cid/<id>/<cid>")]
|
||||
async fn view_cid(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
cid: &str,
|
||||
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
info!("view cid attachment {mid} {cid}");
|
||||
let attachment = cid_attachment_bytes(nm, &mid, &cid)?;
|
||||
Ok(InlineAttachmentResponder(attachment))
|
||||
fn download_attachment_response(attachment: Attachment) -> impl IntoResponse {
|
||||
info!("attachment filename {:?}", attachment.filename);
|
||||
let mut hdr_map = headers::HeaderMap::new();
|
||||
if let Some(filename) = attachment.filename {
|
||||
hdr_map.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!(r#"attachment; filename="{}""#, filename)
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
if let Some(ct) = attachment.content_type {
|
||||
hdr_map.insert(header::CONTENT_TYPE, ct.parse().unwrap());
|
||||
}
|
||||
info!("hdr_map {hdr_map:?}");
|
||||
(hdr_map, attachment.bytes).into_response()
|
||||
}
|
||||
|
||||
#[get("/view/attachment/<id>/<idx>/<_>")]
|
||||
#[axum_macros::debug_handler]
|
||||
async fn view_attachment(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
idx: &str,
|
||||
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
@@ -119,16 +105,14 @@ async fn view_attachment(
|
||||
.split('.')
|
||||
.map(|s| s.parse().expect("not a usize"))
|
||||
.collect();
|
||||
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||
Ok(InlineAttachmentResponder(attachment))
|
||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||
Ok(inline_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[get("/download/attachment/<id>/<idx>/<_>")]
|
||||
async fn download_attachment(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
idx: &str,
|
||||
) -> Result<DownloadAttachmentResponder, Debug<ServerError>> {
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, idx, _)): extract::Path<(String, String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
@@ -139,102 +123,154 @@ async fn download_attachment(
|
||||
.split('.')
|
||||
.map(|s| s.parse().expect("not a usize"))
|
||||
.collect();
|
||||
let attachment = attachment_bytes(nm, &mid, &idx)?;
|
||||
Ok(DownloadAttachmentResponder(attachment))
|
||||
let attachment = attachment_bytes(&nm, &mid, &idx)?;
|
||||
Ok(download_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[get("/original/<id>")]
|
||||
async fn original(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
) -> Result<(ContentType, Vec<u8>), Debug<NotmuchError>> {
|
||||
async fn view_cid(
|
||||
State(AppState { nm, .. }): State<AppState>,
|
||||
extract::Path((id, cid)): extract::Path<(String, String)>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
let res = nm.show_original(&mid)?;
|
||||
Ok((ContentType::Plain, res))
|
||||
info!("view cid attachment {mid} {cid}");
|
||||
let attachment = cid_attachment_bytes(&nm, &mid, &cid)?;
|
||||
Ok(inline_attachment_response(attachment))
|
||||
}
|
||||
|
||||
#[rocket::get("/")]
|
||||
fn graphiql() -> content::RawHtml<String> {
|
||||
content::RawHtml(GraphiQLSource::build().endpoint("/api/graphql").finish())
|
||||
async fn graphiql() -> impl IntoResponse {
|
||||
response::Html(
|
||||
GraphiQLSource::build()
|
||||
.endpoint("/api/graphql/")
|
||||
.subscription_endpoint("/api/graphql/ws")
|
||||
.finish(),
|
||||
)
|
||||
}
|
||||
|
||||
#[rocket::get("/graphql?<query..>")]
|
||||
async fn graphql_query(schema: &State<GraphqlSchema>, query: GraphQLQuery) -> GraphQLResponse {
|
||||
query.execute(schema.inner()).await
|
||||
async fn start_ws(
|
||||
ws: WebSocketUpgrade,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
State(AppState {
|
||||
connection_tracker, ..
|
||||
}): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
ws.on_upgrade(async move |socket| connection_tracker.lock().await.add_peer(socket, addr).await)
|
||||
}
|
||||
#[axum_macros::debug_handler]
|
||||
async fn test_handler(
|
||||
State(AppState {
|
||||
connection_tracker, ..
|
||||
}): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
connection_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await;
|
||||
"test triggered"
|
||||
}
|
||||
|
||||
#[rocket::post("/graphql", data = "<request>", format = "application/json")]
|
||||
async fn graphql_request(
|
||||
schema: &State<GraphqlSchema>,
|
||||
request: GraphQLRequest,
|
||||
) -> GraphQLResponse {
|
||||
request.execute(schema.inner()).await
|
||||
async fn watch_new(
|
||||
nm: Notmuch,
|
||||
pool: PgPool,
|
||||
conn_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
poll_time: Duration,
|
||||
) -> Result<(), async_graphql::Error> {
|
||||
let mut old_ids = Vec::new();
|
||||
loop {
|
||||
let ids = compute_catchup_ids(&nm, &pool, "is:unread").await?;
|
||||
if old_ids != ids {
|
||||
info!("old_ids: {old_ids:?}\n ids: {ids:?}");
|
||||
conn_tracker
|
||||
.lock()
|
||||
.await
|
||||
.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await
|
||||
}
|
||||
old_ids = ids;
|
||||
tokio::time::sleep(poll_time).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::main]
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
nm: Notmuch,
|
||||
connection_tracker: Arc<Mutex<ConnectionTracker>>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let _guard = xtracing::init(env!("CARGO_BIN_NAME"))?;
|
||||
build_info::build_info!(fn bi);
|
||||
info!("Build Info: {}", letterbox_shared::build_version(bi));
|
||||
let allowed_origins = AllowedOrigins::all();
|
||||
let cors = rocket_cors::CorsOptions {
|
||||
allowed_origins,
|
||||
allowed_methods: vec!["Get"]
|
||||
.into_iter()
|
||||
.map(|s| FromStr::from_str(s).unwrap())
|
||||
.collect(),
|
||||
allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]),
|
||||
allow_credentials: true,
|
||||
..Default::default()
|
||||
}
|
||||
.to_cors()?;
|
||||
|
||||
let rkt = rocket::build()
|
||||
.mount(
|
||||
letterbox_shared::urls::MOUNT_POINT,
|
||||
routes![
|
||||
original,
|
||||
show_pretty,
|
||||
show,
|
||||
graphql_query,
|
||||
graphql_request,
|
||||
graphiql,
|
||||
view_cid,
|
||||
view_attachment,
|
||||
download_attachment,
|
||||
],
|
||||
)
|
||||
.attach(cors)
|
||||
.attach(AdHoc::config::<Config>());
|
||||
|
||||
let config: Config = rkt.figment().extract()?;
|
||||
// TODO: move these to config
|
||||
let port = 9345;
|
||||
let config = Config {
|
||||
newsreader_database_url: "postgres://newsreader@nixos-07.h.xinu.tv/newsreader".to_string(),
|
||||
newsreader_tantivy_db_path: "../target/database/newsreader".to_string(),
|
||||
slurp_cache_path: "/tmp/letterbox/slurp".to_string(),
|
||||
};
|
||||
if !std::fs::exists(&config.slurp_cache_path)? {
|
||||
info!("Creating slurp cache @ '{}'", &config.slurp_cache_path);
|
||||
std::fs::create_dir_all(&config.slurp_cache_path)?;
|
||||
}
|
||||
let pool = PgPool::connect(&config.newsreader_database_url).await?;
|
||||
let nm = Notmuch::default();
|
||||
sqlx::migrate!("./migrations").run(&pool).await?;
|
||||
#[cfg(feature = "tantivy")]
|
||||
let tantivy_conn = TantivyConnection::new(&config.newsreader_tantivy_db_path)?;
|
||||
|
||||
let cacher = FilesystemCacher::new(&config.slurp_cache_path)?;
|
||||
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||
.data(Notmuch::default())
|
||||
let schema = Schema::build(QueryRoot, MutationRoot, SubscriptionRoot)
|
||||
.data(nm.clone())
|
||||
.data(cacher)
|
||||
.data(pool.clone());
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
let schema = schema.data(tantivy_conn);
|
||||
|
||||
let schema = schema.extension(extensions::Logger).finish();
|
||||
|
||||
let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default());
|
||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||
let connection_tracker = Arc::new(Mutex::new(ConnectionTracker::default()));
|
||||
let ct = Arc::clone(&connection_tracker);
|
||||
let poll_time = Duration::from_secs(10);
|
||||
let _h = tokio::spawn(watch_new(nm.clone(), pool, ct, poll_time));
|
||||
|
||||
rkt.launch().await?;
|
||||
let app = Router::new()
|
||||
.route("/test", get(test_handler))
|
||||
.route(
|
||||
"/api/download/attachment/{id}/{idx}/{*rest}",
|
||||
get(download_attachment),
|
||||
)
|
||||
.route(
|
||||
"/api/view/attachment/{id}/{idx}/{*rest}",
|
||||
get(view_attachment),
|
||||
)
|
||||
.route("/api/cid/{id}/{cid}", get(view_cid))
|
||||
.route("/api/ws", any(start_ws))
|
||||
.route_service("/api/graphql/ws", GraphQLSubscription::new(schema.clone()))
|
||||
.route(
|
||||
"/api/graphql/",
|
||||
get(graphiql).post_service(GraphQL::new(schema.clone())),
|
||||
)
|
||||
.with_state(AppState {
|
||||
nm,
|
||||
connection_tracker,
|
||||
})
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(DefaultMakeSpan::default().include_headers(true)),
|
||||
);
|
||||
|
||||
let listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], port)))
|
||||
.await
|
||||
.unwrap();
|
||||
tracing::info!("listening on {}", listener.local_addr().unwrap());
|
||||
axum::serve(
|
||||
listener,
|
||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@ use std::{fmt, str::FromStr};
|
||||
|
||||
use async_graphql::{
|
||||
connection::{self, Connection, Edge, OpaqueCursor},
|
||||
Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema,
|
||||
SimpleObject, Union,
|
||||
futures_util::Stream,
|
||||
Context, Enum, Error, FieldResult, InputObject, Object, Schema, SimpleObject, Subscription,
|
||||
Union,
|
||||
};
|
||||
use cacher::FilesystemCacher;
|
||||
use futures::stream;
|
||||
use letterbox_notmuch::Notmuch;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -289,7 +291,6 @@ impl QueryRoot {
|
||||
build_info::build_info!(fn bi);
|
||||
Ok(letterbox_shared::build_version(bi))
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query))]
|
||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
@@ -310,6 +311,7 @@ impl QueryRoot {
|
||||
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
|
||||
Ok(total)
|
||||
}
|
||||
#[instrument(skip_all, fields(query=query, rid=request_id()))]
|
||||
async fn catchup<'ctx>(
|
||||
&self,
|
||||
ctx: &Context<'ctx>,
|
||||
@@ -317,32 +319,7 @@ impl QueryRoot {
|
||||
) -> Result<Vec<String>, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
let query: Query = query.parse()?;
|
||||
// TODO: implement optimized versions of fetching just IDs
|
||||
let newsreader_fut = newsreader_search(pool, None, None, None, None, &query);
|
||||
let notmuch_fut = notmuch_search(nm, None, None, None, None, &query);
|
||||
let (newsreader_results, notmuch_results) = join!(newsreader_fut, notmuch_fut);
|
||||
|
||||
let newsreader_results = newsreader_results?;
|
||||
let notmuch_results = notmuch_results?;
|
||||
info!(
|
||||
"newsreader_results ({}) notmuch_results ({})",
|
||||
newsreader_results.len(),
|
||||
notmuch_results.len(),
|
||||
);
|
||||
|
||||
let results: Vec<_> = newsreader_results
|
||||
.into_iter()
|
||||
.chain(notmuch_results)
|
||||
.collect();
|
||||
let ids = results
|
||||
.into_iter()
|
||||
.map(|r| match r {
|
||||
ThreadSummaryCursor::Newsreader(_, ts) => ts.thread,
|
||||
ThreadSummaryCursor::Notmuch(_, ts) => ts.thread,
|
||||
})
|
||||
.collect();
|
||||
Ok(ids)
|
||||
compute_catchup_ids(nm, pool, &query).await
|
||||
}
|
||||
|
||||
// TODO: this function doesn't get parallelism, possibly because notmuch is sync and blocks,
|
||||
@@ -588,9 +565,9 @@ async fn tantivy_search(
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub struct Mutation;
|
||||
pub struct MutationRoot;
|
||||
#[Object]
|
||||
impl Mutation {
|
||||
impl MutationRoot {
|
||||
#[instrument(skip_all, fields(query=query, unread=unread, rid=request_id()))]
|
||||
async fn set_read_status<'ctx>(
|
||||
&self,
|
||||
@@ -662,4 +639,51 @@ impl Mutation {
|
||||
}
|
||||
}
|
||||
|
||||
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;
|
||||
pub struct SubscriptionRoot;
|
||||
#[Subscription]
|
||||
impl SubscriptionRoot {
|
||||
async fn values(&self, _ctx: &Context<'_>) -> Result<impl Stream<Item = usize>, Error> {
|
||||
Ok(stream::iter(0..10))
|
||||
}
|
||||
}
|
||||
|
||||
pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, SubscriptionRoot>;
|
||||
|
||||
#[instrument(skip_all, fields(query=query))]
|
||||
pub async fn compute_catchup_ids(
|
||||
nm: &Notmuch,
|
||||
pool: &PgPool,
|
||||
query: &str,
|
||||
) -> Result<Vec<String>, Error> {
|
||||
let query: Query = query.parse()?;
|
||||
// TODO: implement optimized versions of fetching just IDs
|
||||
let newsreader_fut = newsreader_search(pool, None, None, None, None, &query);
|
||||
let notmuch_fut = notmuch_search(nm, None, None, None, None, &query);
|
||||
let (newsreader_results, notmuch_results) = join!(newsreader_fut, notmuch_fut);
|
||||
|
||||
let newsreader_results = newsreader_results?;
|
||||
let notmuch_results = notmuch_results?;
|
||||
info!(
|
||||
"newsreader_results ({}) notmuch_results ({})",
|
||||
newsreader_results.len(),
|
||||
notmuch_results.len(),
|
||||
);
|
||||
|
||||
let mut results: Vec<_> = newsreader_results
|
||||
.into_iter()
|
||||
.chain(notmuch_results)
|
||||
.collect();
|
||||
// The leading '-' is to reverse sort
|
||||
results.sort_by_key(|item| match item {
|
||||
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
||||
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
||||
});
|
||||
let ids = results
|
||||
.into_iter()
|
||||
.map(|r| match r {
|
||||
ThreadSummaryCursor::Newsreader(_, ts) => ts.thread,
|
||||
ThreadSummaryCursor::Notmuch(_, ts) => ts.thread,
|
||||
})
|
||||
.collect();
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod graphql;
|
||||
pub mod mail;
|
||||
pub mod newsreader;
|
||||
pub mod nm;
|
||||
pub mod ws;
|
||||
|
||||
#[cfg(feature = "tantivy")]
|
||||
pub mod tantivy;
|
||||
|
||||
@@ -18,6 +19,7 @@ use std::{
|
||||
use async_trait::async_trait;
|
||||
use cacher::{Cacher, FilesystemCacher};
|
||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||
pub use error::ServerError;
|
||||
use linkify::{LinkFinder, LinkKind};
|
||||
use log::{debug, error, info, warn};
|
||||
use lol_html::{
|
||||
@@ -33,7 +35,6 @@ use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
error::ServerError,
|
||||
graphql::{Corpus, ThreadSummary},
|
||||
newsreader::is_newsreader_thread,
|
||||
nm::is_notmuch_thread_or_id,
|
||||
@@ -318,13 +319,16 @@ impl<'c> Transformer for SlurpContents<'c> {
|
||||
} else {
|
||||
let resp = reqwest::get(link.as_str()).await?;
|
||||
let status = resp.status();
|
||||
if status.is_server_error() || retryable_status.contains(&status) {
|
||||
return Err(TransformError::RetryableHttpStatusError(
|
||||
status,
|
||||
link.to_string(),
|
||||
));
|
||||
if status.is_server_error() {
|
||||
error!("status error for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
if retryable_status.contains(&status) {
|
||||
error!("retryable error for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
if !status.is_success() {
|
||||
error!("unsuccessful for {link}: {status}");
|
||||
return Ok(html.to_string());
|
||||
}
|
||||
let body = resp.text().await?;
|
||||
@@ -438,6 +442,34 @@ pub fn sanitize_html(
|
||||
}
|
||||
};
|
||||
let mut element_content_handlers = vec![
|
||||
// Remove width and height attributes on elements
|
||||
element!("[width],[height]", |el| {
|
||||
el.remove_attribute("width");
|
||||
el.remove_attribute("height");
|
||||
Ok(())
|
||||
}),
|
||||
// Remove width and height values from inline styles
|
||||
element!("[style]", |el| {
|
||||
let style = el.get_attribute("style").unwrap();
|
||||
let style = style
|
||||
.split(";")
|
||||
.filter(|s| {
|
||||
let Some((k, _)) = s.split_once(':') else {
|
||||
return true;
|
||||
};
|
||||
match k {
|
||||
"width" | "max-width" | "min-width" | "height" | "max-height"
|
||||
| "min-height" => false,
|
||||
_ => true,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(";");
|
||||
if let Err(e) = el.set_attribute("style", &style) {
|
||||
error!("Failed to set style attribute: {e}");
|
||||
}
|
||||
Ok(())
|
||||
}),
|
||||
// Open links in new tab
|
||||
element!("a[href]", |el| {
|
||||
el.set_attribute("target", "_blank").unwrap();
|
||||
@@ -910,3 +942,21 @@ async fn clean_title(title: &str) -> Result<String, ServerError> {
|
||||
}
|
||||
Ok(title)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{SanitizeHtml, Transformer};
|
||||
|
||||
#[tokio::test]
|
||||
async fn strip_sizes() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let ss = SanitizeHtml {
|
||||
cid_prefix: "",
|
||||
base_url: &None,
|
||||
};
|
||||
let input = r#"<p width=16 height=16 style="color:blue;width:16px;height:16px;">This el has width and height attributes and inline styles</p>"#;
|
||||
let want = r#"<p style="color:blue;">This el has width and height attributes and inline styles</p>"#;
|
||||
let got = ss.transform(&None, input).await?;
|
||||
assert_eq!(got, want);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use mailparse::{
|
||||
addrparse_header, dateparse, parse_mail, MailHeaderMap, MailParseError, ParsedMail,
|
||||
};
|
||||
use sqlx::postgres::PgPool;
|
||||
use thiserror::Error;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum MailError {
|
||||
#[error("missing from header")]
|
||||
MissingFrom,
|
||||
#[error("missing from header display name")]
|
||||
MissingFromDisplayName,
|
||||
#[error("missing subject header")]
|
||||
MissingSubject,
|
||||
#[error("missing html part")]
|
||||
MissingHtmlPart,
|
||||
#[error("missing message ID")]
|
||||
MissingMessageId,
|
||||
#[error("missing date")]
|
||||
MissingDate,
|
||||
#[error("DB error {0}")]
|
||||
SqlxError(#[from] sqlx::Error),
|
||||
#[error("IO error {0}")]
|
||||
IOError(#[from] std::io::Error),
|
||||
#[error("mail parse error {0}")]
|
||||
MailParseError(#[from] MailParseError),
|
||||
}
|
||||
|
||||
pub async fn read_mail_to_db(pool: &PgPool, path: &str) -> Result<(), MailError> {
|
||||
let mut file = File::open(path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
let m = parse_mail(&buffer)?;
|
||||
|
||||
let subject = m
|
||||
.headers
|
||||
.get_first_value("subject")
|
||||
.ok_or(MailError::MissingSubject)?;
|
||||
|
||||
let from = addrparse_header(
|
||||
m.headers
|
||||
.get_first_header("from")
|
||||
.ok_or(MailError::MissingFrom)?,
|
||||
)?;
|
||||
let from = from.extract_single_info().ok_or(MailError::MissingFrom)?;
|
||||
let name = from.display_name.ok_or(MailError::MissingFromDisplayName)?;
|
||||
let slug = name.to_lowercase().replace(' ', "-");
|
||||
let url = from.addr;
|
||||
let message_id = m
|
||||
.headers
|
||||
.get_first_value("Message-ID")
|
||||
.ok_or(MailError::MissingMessageId)?;
|
||||
let uid = &message_id;
|
||||
let feed_id = find_feed(&pool, &name, &slug, &url).await?;
|
||||
let date = dateparse(
|
||||
&m.headers
|
||||
.get_first_value("Date")
|
||||
.ok_or(MailError::MissingDate)?,
|
||||
)?;
|
||||
|
||||
println!("Feed: {feed_id} Subject: {}", subject);
|
||||
|
||||
if let Some(_m) = first_html(&m) {
|
||||
info!("add email {slug} {subject} {message_id} {date} {uid} {url}");
|
||||
} else {
|
||||
return Err(MailError::MissingHtmlPart.into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
fn first_html<'m>(m: &'m ParsedMail<'m>) -> Option<&'m ParsedMail<'m>> {
|
||||
for ele in m.parts() {
|
||||
if ele.ctype.mimetype == "text/html" {
|
||||
return Some(ele);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
async fn find_feed(pool: &PgPool, name: &str, slug: &str, url: &str) -> Result<i32, MailError> {
|
||||
match sqlx::query!(
|
||||
r#"
|
||||
SELECT id
|
||||
FROM feed
|
||||
WHERE slug = $1
|
||||
"#,
|
||||
slug
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
{
|
||||
Err(sqlx::Error::RowNotFound) => {
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO feed ( name, slug, url, homepage, selector )
|
||||
VALUES ( $1, $2, $3, '', '' )
|
||||
RETURNING id
|
||||
"#,
|
||||
name,
|
||||
slug,
|
||||
url
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
return Ok(rec.id);
|
||||
}
|
||||
Ok(rec) => return Ok(rec.id),
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
}
|
||||
@@ -338,7 +338,7 @@ pub async fn thread(
|
||||
}
|
||||
|
||||
fn email_addresses(
|
||||
path: &str,
|
||||
_path: &str,
|
||||
m: &ParsedMail,
|
||||
header_name: &str,
|
||||
) -> Result<Vec<Email>, ServerError> {
|
||||
@@ -349,9 +349,7 @@ fn email_addresses(
|
||||
for ma in mal.into_inner() {
|
||||
match ma {
|
||||
mailparse::MailAddr::Group(gi) => {
|
||||
if !gi.group_name.contains("ndisclosed") {
|
||||
println!("[{path}][{header_name}] Group: {gi}");
|
||||
}
|
||||
if !gi.group_name.contains("ndisclosed") {}
|
||||
}
|
||||
mailparse::MailAddr::Single(s) => addrs.push(Email {
|
||||
name: s.display_name,
|
||||
|
||||
35
server/src/ws.rs
Normal file
35
server/src/ws.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use std::{collections::HashMap, net::SocketAddr};
|
||||
|
||||
use axum::extract::ws::{Message, WebSocket};
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use tracing::{info, warn};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ConnectionTracker {
|
||||
peers: HashMap<SocketAddr, WebSocket>,
|
||||
}
|
||||
|
||||
impl ConnectionTracker {
|
||||
pub async fn add_peer(&mut self, socket: WebSocket, who: SocketAddr) {
|
||||
warn!("adding {who:?} to connection tracker");
|
||||
self.peers.insert(who, socket);
|
||||
self.send_message_all(WebsocketMessage::RefreshMessages)
|
||||
.await;
|
||||
}
|
||||
pub async fn send_message_all(&mut self, msg: WebsocketMessage) {
|
||||
info!("send_message_all {msg}");
|
||||
let m = serde_json::to_string(&msg).expect("failed to json encode WebsocketMessage");
|
||||
let mut bad_peers = Vec::new();
|
||||
for (who, socket) in &mut self.peers.iter_mut() {
|
||||
if let Err(e) = socket.send(Message::Text(m.clone().into())).await {
|
||||
warn!("{:?} is bad, scheduling for removal: {e}", who);
|
||||
bad_peers.push(who.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for b in bad_peers {
|
||||
info!("removing bad peer {b:?}");
|
||||
self.peers.remove(&b);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ version.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
build-info = "0.0.39"
|
||||
letterbox-notmuch = { version = "0.9.0", path = "../notmuch", registry = "xinu" }
|
||||
build-info = "0.0.40"
|
||||
letterbox-notmuch = { version = "0.15.0", path = "../notmuch", registry = "xinu" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
strum_macros = "0.27.1"
|
||||
|
||||
@@ -13,8 +13,10 @@ pub struct SearchResult {
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Message {}
|
||||
#[derive(Serialize, Deserialize, Debug, strum_macros::Display)]
|
||||
pub enum WebsocketMessage {
|
||||
RefreshMessages,
|
||||
}
|
||||
|
||||
pub mod urls {
|
||||
pub const MOUNT_POINT: &'static str = "/api";
|
||||
|
||||
@@ -9,7 +9,7 @@ repository.workspace = true
|
||||
version.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
build-info-build = "0.0.39"
|
||||
build-info-build = "0.0.40"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.33"
|
||||
@@ -28,14 +28,18 @@ graphql_client = "0.14.0"
|
||||
thiserror = "2.0.0"
|
||||
gloo-net = { version = "0.6.0", features = ["json", "serde_json"] }
|
||||
human_format = "1.1.0"
|
||||
build-info = "0.0.39"
|
||||
build-info = "0.0.40"
|
||||
wasm-bindgen = "=0.2.100"
|
||||
uuid = { version = "1.13.1", features = [
|
||||
"js",
|
||||
] } # direct dep to set js feature, prevents Rng issues
|
||||
letterbox-shared = { version = "0.9.0", path = "../shared", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.9.0", path = "../notmuch", registry = "xinu" }
|
||||
letterbox-shared = { version = "0.15.0", path = "../shared", registry = "xinu" }
|
||||
letterbox-notmuch = { version = "0.15.0", path = "../notmuch", registry = "xinu" }
|
||||
seed_hooks = { version = "0.4.0", registry = "xinu" }
|
||||
strum_macros = "0.27.1"
|
||||
gloo-console = "0.3.0"
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
wasm-sockets = "1.0.0"
|
||||
|
||||
[package.metadata.wasm-pack.profile.release]
|
||||
wasm-opt = ['-Os']
|
||||
@@ -49,4 +53,6 @@ features = [
|
||||
"MediaQueryList",
|
||||
"Navigator",
|
||||
"Window",
|
||||
"History",
|
||||
"ScrollRestoration",
|
||||
]
|
||||
|
||||
@@ -6,6 +6,10 @@ release = false
|
||||
address = "0.0.0.0"
|
||||
port = 6758
|
||||
|
||||
[[proxy]]
|
||||
ws = true
|
||||
backend = "ws://localhost:9345/api/ws"
|
||||
|
||||
[[proxy]]
|
||||
backend = "http://localhost:9345/api/"
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
// - it's useful when you want to check your code with `cargo make verify`
|
||||
// but some rules are too "annoying" or are not applicable for your case.)
|
||||
#![allow(clippy::wildcard_imports)]
|
||||
// Until https://github.com/rust-lang/rust/issues/138762 is addressed in dependencies
|
||||
#![allow(wasm_c_abi)]
|
||||
|
||||
use log::Level;
|
||||
use seed::App;
|
||||
@@ -11,6 +13,7 @@ mod consts;
|
||||
mod graphql;
|
||||
mod state;
|
||||
mod view;
|
||||
mod websocket;
|
||||
|
||||
fn main() {
|
||||
// This provides better error messages in debug mode.
|
||||
@@ -18,6 +21,9 @@ fn main() {
|
||||
#[cfg(debug_assertions)]
|
||||
console_error_panic_hook::set_once();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let lvl = Level::Debug;
|
||||
#[cfg(not(debug_assertions))]
|
||||
let lvl = Level::Info;
|
||||
console_log::init_with_level(lvl).expect("failed to initialize console logging");
|
||||
// Mount the `app` to the element with the `id` "app".
|
||||
|
||||
115
web/src/state.rs
115
web/src/state.rs
@@ -1,6 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use graphql_client::GraphQLQuery;
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{debug, error, info, warn};
|
||||
use seed::{prelude::*, *};
|
||||
use thiserror::Error;
|
||||
@@ -11,6 +12,7 @@ use crate::{
|
||||
consts::SEARCH_RESULTS_PER_PAGE,
|
||||
graphql,
|
||||
graphql::{front_page_query::*, send_graphql, show_thread_query::*},
|
||||
websocket,
|
||||
};
|
||||
|
||||
/// Used to fake the unread string while in development
|
||||
@@ -29,16 +31,21 @@ pub fn unread_query() -> &'static str {
|
||||
pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
let version = letterbox_shared::build_version(bi);
|
||||
info!("Build Info: {}", version);
|
||||
// Disable restoring to scroll position when navigating
|
||||
window()
|
||||
.history()
|
||||
.expect("couldn't get history")
|
||||
.set_scroll_restoration(web_sys::ScrollRestoration::Manual)
|
||||
.expect("failed to set scroll restoration to manual");
|
||||
if url.hash().is_none() {
|
||||
orders.request_url(urls::search(unread_query(), 0));
|
||||
} else {
|
||||
orders.notify(subs::UrlRequested::new(url));
|
||||
orders.request_url(url.clone());
|
||||
};
|
||||
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
||||
// TODO(wathiede): only do this while viewing the index? Or maybe add a new message that force
|
||||
// 'notmuch new' on the server periodically?
|
||||
orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||
orders.subscribe(on_url_changed);
|
||||
//orders.stream(streams::interval(30_000, || Msg::RefreshStart));
|
||||
orders.subscribe(Msg::OnUrlChanged);
|
||||
orders.stream(streams::window_event(Ev::Scroll, |_| Msg::WindowScrolled));
|
||||
|
||||
build_info::build_info!(fn bi);
|
||||
@@ -54,18 +61,23 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
server: None,
|
||||
},
|
||||
catchup: None,
|
||||
last_url: Url::current(),
|
||||
websocket: websocket::init(&mut orders.proxy(Msg::WebSocket)),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
let mut url = uc.0;
|
||||
fn on_url_changed(old: &Url, mut new: Url) -> Msg {
|
||||
let did_change = *old != new;
|
||||
let mut messages = Vec::new();
|
||||
if did_change {
|
||||
messages.push(Msg::ScrollToTop)
|
||||
}
|
||||
info!(
|
||||
"url changed '{}', history {}",
|
||||
url,
|
||||
"url changed\nold '{old}'\nnew '{new}', history {}",
|
||||
history().length().unwrap_or(0)
|
||||
);
|
||||
let hpp = url.remaining_hash_path_parts();
|
||||
match hpp.as_slice() {
|
||||
let hpp = new.remaining_hash_path_parts();
|
||||
let msg = match hpp.as_slice() {
|
||||
["t", tid] => Msg::ShowThreadRequest {
|
||||
thread_id: tid.to_string(),
|
||||
},
|
||||
@@ -102,11 +114,14 @@ fn on_url_changed(uc: subs::UrlChanged) -> Msg {
|
||||
last: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
messages.push(msg);
|
||||
Msg::MultiMsg(messages)
|
||||
}
|
||||
|
||||
// `update` describes how to handle each `Msg`.
|
||||
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
info!("update({})", msg);
|
||||
match msg {
|
||||
Msg::Noop => {}
|
||||
Msg::RefreshStart => {
|
||||
@@ -132,7 +147,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
orders.perform_cmd(async move { Msg::Refresh });
|
||||
}
|
||||
Msg::Refresh => {
|
||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||
orders.request_url(Url::current());
|
||||
}
|
||||
Msg::Reload => {
|
||||
window()
|
||||
@@ -140,7 +155,10 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
.reload()
|
||||
.expect("failed to reload window");
|
||||
}
|
||||
Msg::OnResize => (),
|
||||
Msg::OnUrlChanged(new_url) => {
|
||||
orders.send_msg(on_url_changed(&model.last_url, new_url.0.clone()));
|
||||
model.last_url = new_url.0;
|
||||
}
|
||||
|
||||
Msg::NextPage => {
|
||||
match &model.context {
|
||||
@@ -182,10 +200,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
};
|
||||
}
|
||||
Msg::GoToSearchResults => {
|
||||
let url = urls::search(&model.query, 0);
|
||||
info!("GoToSearchResults Start");
|
||||
orders.request_url(url);
|
||||
info!("GoToSearchResults End");
|
||||
orders.send_msg(Msg::SearchQuery(model.query.clone()));
|
||||
}
|
||||
|
||||
Msg::UpdateQuery(query) => model.query = query,
|
||||
@@ -280,7 +295,9 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
)
|
||||
});
|
||||
}
|
||||
Msg::FrontPageResult(Err(e)) => error!("error FrontPageResult: {e:?}"),
|
||||
Msg::FrontPageResult(Err(e)) => {
|
||||
error!("error FrontPageResult: {e:?}");
|
||||
}
|
||||
Msg::FrontPageResult(Ok(graphql_client::Response {
|
||||
data: None,
|
||||
errors: None,
|
||||
@@ -308,7 +325,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
info!("pager {:#?}", data.search.page_info);
|
||||
let selected_threads = 'context: {
|
||||
if let Context::SearchResult {
|
||||
results,
|
||||
@@ -536,7 +552,12 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
.expect("failed to copy to clipboard");
|
||||
});
|
||||
}
|
||||
Msg::ScrollToTop => {
|
||||
info!("scrolling to the top");
|
||||
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||
}
|
||||
Msg::WindowScrolled => {
|
||||
// TODO: model.content_el doesn't go to None like it should when a DOM is recreated and the refrenced element goes away
|
||||
if let Some(el) = model.content_el.get() {
|
||||
let ih = window()
|
||||
.inner_height()
|
||||
@@ -576,7 +597,8 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
model.read_completion_ratio = ratio;
|
||||
}
|
||||
Msg::UpdateServerVersion(version) => {
|
||||
if version != model.versions.client {
|
||||
// Only git versions contain dash, don't autoreload there
|
||||
if !version.contains('-') && version != model.versions.client {
|
||||
warn!(
|
||||
"Server ({}) and client ({}) version mismatch, reloading",
|
||||
version, model.versions.client
|
||||
@@ -586,19 +608,17 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
model.versions.server = Some(version);
|
||||
}
|
||||
|
||||
Msg::StartCatchup => {
|
||||
Msg::CatchupStart => {
|
||||
let query = if model.query.contains("is:unread") {
|
||||
model.query.to_string()
|
||||
} else {
|
||||
format!("{} is:unread", model.query)
|
||||
};
|
||||
info!("starting catchup mode w/ {}", query);
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
orders.send_msg(Msg::CatchupRequest { query });
|
||||
}
|
||||
Msg::CatchupKeepUnread => {
|
||||
if let Some(thread_id) = current_thread_id(&model.context) {
|
||||
orders.send_msg(Msg::SetUnread(thread_id, true));
|
||||
};
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupMarkAsRead => {
|
||||
@@ -608,23 +628,52 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
orders.send_msg(Msg::CatchupNext);
|
||||
}
|
||||
Msg::CatchupNext => {
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
let Some(catchup) = &mut model.catchup else {
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
let Some(idx) = catchup.items.iter().position(|i| !i.seen) else {
|
||||
let Some(thread_id) = current_thread_id(&model.context) else {
|
||||
return;
|
||||
};
|
||||
let Some(idx) = catchup
|
||||
.items
|
||||
.iter()
|
||||
.inspect(|i| info!("i {i:?} thread_id {thread_id}"))
|
||||
.position(|i| i.id == thread_id)
|
||||
else {
|
||||
// All items have been seen
|
||||
orders.send_msg(Msg::CatchupExit);
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
catchup.items[idx].seen = true;
|
||||
if idx < catchup.items.len() - 1 {
|
||||
// Reached last item
|
||||
orders.request_url(urls::thread(&catchup.items[idx + 1].id));
|
||||
return;
|
||||
} else {
|
||||
orders.send_msg(Msg::CatchupExit);
|
||||
orders.send_msg(Msg::GoToSearchResults);
|
||||
return;
|
||||
};
|
||||
}
|
||||
Msg::CatchupExit => {
|
||||
orders.send_msg(Msg::ScrollToTop);
|
||||
model.catchup = None;
|
||||
}
|
||||
|
||||
Msg::WebSocket(ws) => {
|
||||
websocket::update(ws, &mut model.websocket, &mut orders.proxy(Msg::WebSocket));
|
||||
while let Some(msg) = model.websocket.updates.pop_front() {
|
||||
orders.send_msg(Msg::WebsocketMessage(msg));
|
||||
}
|
||||
}
|
||||
Msg::WebsocketMessage(msg) => {
|
||||
match msg {
|
||||
WebsocketMessage::RefreshMessages => orders.send_msg(Msg::Refresh),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -656,6 +705,8 @@ pub struct Model {
|
||||
pub content_el: ElRef<HtmlElement>,
|
||||
pub versions: Version,
|
||||
pub catchup: Option<Catchup>,
|
||||
pub last_url: Url,
|
||||
pub websocket: websocket::Model,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -696,6 +747,7 @@ pub struct Catchup {
|
||||
pub items: Vec<CatchupItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CatchupItem {
|
||||
pub id: String,
|
||||
pub seen: bool,
|
||||
@@ -714,20 +766,22 @@ pub enum RefreshingState {
|
||||
Error(String),
|
||||
}
|
||||
// `Msg` describes the different events you can modify state with.
|
||||
#[derive(strum_macros::Display)]
|
||||
pub enum Msg {
|
||||
Noop,
|
||||
// Tell the client to refresh its state
|
||||
Refresh,
|
||||
// Tell the client to reload whole page from server
|
||||
Reload,
|
||||
// Window has changed size
|
||||
OnResize,
|
||||
// TODO: add GoToUrl
|
||||
OnUrlChanged(subs::UrlChanged),
|
||||
// Tell the server to update state
|
||||
RefreshStart,
|
||||
RefreshDone(Option<gloo_net::Error>),
|
||||
NextPage,
|
||||
PreviousPage,
|
||||
GoToSearchResults,
|
||||
|
||||
UpdateQuery(String),
|
||||
SearchQuery(String),
|
||||
|
||||
@@ -774,12 +828,17 @@ pub enum Msg {
|
||||
|
||||
CopyToClipboard(String),
|
||||
|
||||
ScrollToTop,
|
||||
WindowScrolled,
|
||||
SetProgress(f64),
|
||||
UpdateServerVersion(String),
|
||||
|
||||
StartCatchup,
|
||||
CatchupStart,
|
||||
CatchupKeepUnread,
|
||||
CatchupMarkAsRead,
|
||||
CatchupNext,
|
||||
CatchupExit,
|
||||
|
||||
WebSocket(websocket::Msg),
|
||||
WebsocketMessage(WebsocketMessage),
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use chrono::{DateTime, Datelike, Duration, Local, Utc};
|
||||
use human_format::{Formatter, Scales};
|
||||
use itertools::Itertools;
|
||||
use letterbox_shared::compute_color;
|
||||
use log::{debug, error, info};
|
||||
use log::error;
|
||||
use seed::{prelude::*, *};
|
||||
use seed_hooks::{state_access::CloneState, topo, use_state, StateAccessEventHandlers};
|
||||
use web_sys::{HtmlElement, HtmlInputElement};
|
||||
@@ -21,6 +21,8 @@ use crate::{
|
||||
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||
|
||||
mod tw_classes {
|
||||
use seed::{prelude::*, *};
|
||||
|
||||
pub const TAG: &[&str] = &[
|
||||
"rounded-md",
|
||||
"px-2",
|
||||
@@ -37,22 +39,30 @@ mod tw_classes {
|
||||
"text-xs",
|
||||
"[text-shadow:_0_1px_0_rgb(0_0_0_/_40%)]",
|
||||
];
|
||||
pub const BUTTON: &[&str] = &[
|
||||
"bg-neutral-900",
|
||||
"rounded-md",
|
||||
"p-2",
|
||||
"border",
|
||||
"border-neutral-700",
|
||||
"text-center",
|
||||
"text-sm",
|
||||
"transition-all",
|
||||
"shadow-md",
|
||||
"hover:shadow-lg",
|
||||
"hover:bg-neutral-700",
|
||||
"disabled:pointer-events-none",
|
||||
"disabled:opacity-50",
|
||||
"disabled:shadow-none",
|
||||
];
|
||||
|
||||
// TODO: should this be a builder pattern?
|
||||
pub fn button() -> seed::Attrs {
|
||||
button_with_color("bg-neutral-900", "hover:bg-neutral-700")
|
||||
}
|
||||
|
||||
pub fn button_with_color<T: ToClasses>(bg: T, hover: T) -> seed::Attrs {
|
||||
C![
|
||||
"rounded-md",
|
||||
"p-2",
|
||||
"border",
|
||||
"border-neutral-700",
|
||||
"text-center",
|
||||
"text-sm",
|
||||
"transition-all",
|
||||
"shadow-md",
|
||||
"hover:shadow-lg",
|
||||
"disabled:pointer-events-none",
|
||||
"disabled:opacity-50",
|
||||
"disabled:shadow-none",
|
||||
bg,
|
||||
hover,
|
||||
]
|
||||
}
|
||||
|
||||
pub const CHECKBOX: &[&str] = &[
|
||||
"w-8",
|
||||
@@ -83,13 +93,13 @@ pub fn view(model: &Model) -> Node<Msg> {
|
||||
} => {
|
||||
if let Some(catchup) = &model.catchup {
|
||||
catchup_view(
|
||||
thread(thread_data, open_messages, &model.content_el),
|
||||
thread(thread_data, open_messages, &model.content_el, true),
|
||||
&catchup.items,
|
||||
model.read_completion_ratio,
|
||||
)
|
||||
} else {
|
||||
normal_view(
|
||||
thread(thread_data, open_messages, &model.content_el),
|
||||
thread(thread_data, open_messages, &model.content_el, false),
|
||||
&model.versions,
|
||||
&model.query,
|
||||
&model.refreshing_state,
|
||||
@@ -104,13 +114,13 @@ pub fn view(model: &Model) -> Node<Msg> {
|
||||
} => {
|
||||
if let Some(catchup) = &model.catchup {
|
||||
catchup_view(
|
||||
news_post(post, &model.content_el),
|
||||
news_post(post, &model.content_el, true),
|
||||
&catchup.items,
|
||||
model.read_completion_ratio,
|
||||
)
|
||||
} else {
|
||||
normal_view(
|
||||
news_post(post, &model.content_el),
|
||||
news_post(post, &model.content_el, false),
|
||||
&model.versions,
|
||||
&model.query,
|
||||
&model.refreshing_state,
|
||||
@@ -188,19 +198,19 @@ fn catchup_view(
|
||||
"p-4",
|
||||
"border-b",
|
||||
"border-gray-500",
|
||||
"bg-black",
|
||||
"bg-black/50",
|
||||
],
|
||||
div![
|
||||
C!["absolute", "right-4", "text-gray-500"],
|
||||
C!["absolute", "top-0", "right-4", "text-gray-500", "p-4"],
|
||||
span![i![C!["fas", "fa-x"]]],
|
||||
ev(Ev::Click, move |_| Msg::GoToSearchResults)
|
||||
ev(Ev::Click, move |_| Msg::CatchupExit)
|
||||
],
|
||||
h1![
|
||||
C!["text-center"],
|
||||
format!("{} left ", items.iter().filter(|i| !i.seen).count(),)
|
||||
]
|
||||
],
|
||||
div![C!["mt-12", "mb-4"], content],
|
||||
div![C!["mt-12", "mb-20"], content],
|
||||
div![
|
||||
C![
|
||||
"fixed",
|
||||
@@ -213,17 +223,28 @@ fn catchup_view(
|
||||
"p-4",
|
||||
"border-t",
|
||||
"border-gray-500",
|
||||
"bg-black",
|
||||
"bg-black/50",
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON],
|
||||
"Keep unread",
|
||||
ev(Ev::Click, move |_| Msg::CatchupKeepUnread)
|
||||
tw_classes::button(),
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
span![C!["pl-2"], "Keep unread"],
|
||||
ev(Ev::Click, |_| Msg::CatchupKeepUnread)
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "bg-green-500"],
|
||||
"Mark as read",
|
||||
ev(Ev::Click, move |_| Msg::CatchupMarkAsRead)
|
||||
tw_classes::button(),
|
||||
span![i![C!["fas", "fa-house"]]],
|
||||
span![C!["pl-2"], "Go home"],
|
||||
ev(Ev::Click, |_| Msg::MultiMsg(vec![
|
||||
Msg::CatchupExit,
|
||||
Msg::GoToSearchResults
|
||||
]))
|
||||
],
|
||||
button![
|
||||
tw_classes::button_with_color("bg-green-800", "hover:bg-green-700"),
|
||||
span![i![C!["far", "fa-envelope-open"]]],
|
||||
span![C!["pl-2"], "Mark as read"],
|
||||
ev(Ev::Click, |_| Msg::CatchupMarkAsRead)
|
||||
]
|
||||
],
|
||||
reading_progress(read_completion_ratio)
|
||||
@@ -252,6 +273,13 @@ fn search_results(
|
||||
tags.remove(idx);
|
||||
};
|
||||
let is_unread = unread_idx.is_some();
|
||||
let mut title_break = None;
|
||||
const TITLE_LENGTH_WRAP_LIMIT: usize = 40;
|
||||
for w in r.subject.split_whitespace() {
|
||||
if w.len() > TITLE_LENGTH_WRAP_LIMIT {
|
||||
title_break = Some(C!["break-all", "text-pretty"]);
|
||||
}
|
||||
}
|
||||
div![
|
||||
C![
|
||||
"flex",
|
||||
@@ -294,7 +322,7 @@ fn search_results(
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![&r.subject],
|
||||
div![title_break, &r.subject],
|
||||
span![C!["text-xs"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
C!["flex", "flex-wrap", "justify-between"],
|
||||
@@ -440,48 +468,54 @@ fn search_toolbar(
|
||||
if let Some(tri) = tri.get() {
|
||||
tri.set_indeterminate(indeterminate);
|
||||
}
|
||||
let catchup = div![button![
|
||||
tw_classes::button(),
|
||||
attrs! {At::Title => "Catch up"},
|
||||
span![i![C!["far", "fa-eye"]]],
|
||||
span![C!["pl-2", "hidden", "md:inline"], "Catch-up"],
|
||||
ev(Ev::Click, |_| Msg::CatchupStart)
|
||||
]];
|
||||
let tristate_input = div![
|
||||
C!["flex", "items-center", "mr-4"],
|
||||
input![
|
||||
&tri,
|
||||
C![&tw_classes::CHECKBOX],
|
||||
attrs! {
|
||||
At::Type=>"checkbox",
|
||||
},
|
||||
IF!(all_selected=>attrs!{At::Checked=>true})
|
||||
],
|
||||
ev(Ev::Input, move |_| {
|
||||
if all_selected {
|
||||
Msg::SelectionSetNone
|
||||
} else {
|
||||
Msg::SelectionSetAll
|
||||
}
|
||||
}),
|
||||
];
|
||||
nav![
|
||||
C!["py-4", "flex", "w-full", "justify-between"],
|
||||
div![
|
||||
C!["gap-2", "flex", IF!(show_bulk_edit => "hidden")],
|
||||
div![button![
|
||||
C![&tw_classes::BUTTON],
|
||||
attrs! {At::Title => "Mark as read"},
|
||||
span![i![C!["far", "fa-eye"]]],
|
||||
span![C!["pl-2", "hidden", "md:inline"], "Catch-up"],
|
||||
ev(Ev::Click, |_| Msg::StartCatchup)
|
||||
]],
|
||||
&tristate_input,
|
||||
&catchup
|
||||
],
|
||||
div![
|
||||
C!["gap-2", "flex", IF!(!show_bulk_edit => "hidden")],
|
||||
div![
|
||||
C!["flex", "items-center", "mr-4"],
|
||||
input![
|
||||
tri,
|
||||
C![&tw_classes::CHECKBOX],
|
||||
attrs! {
|
||||
At::Type=>"checkbox",
|
||||
At::Checked=>all_selected,
|
||||
}
|
||||
],
|
||||
ev(Ev::Input, move |_| {
|
||||
if all_selected {
|
||||
Msg::SelectionSetNone
|
||||
} else {
|
||||
Msg::SelectionSetAll
|
||||
}
|
||||
}),
|
||||
],
|
||||
&tristate_input,
|
||||
&catchup,
|
||||
div![
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-r-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-r-none"],
|
||||
attrs! {At::Title => "Mark as read"},
|
||||
span![i![C!["far", "fa-envelope-open"]]],
|
||||
span![C!["pl-2", "hidden", "md:inline"], "Read"],
|
||||
ev(Ev::Click, |_| Msg::SelectionMarkAsRead)
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-l-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-l-none"],
|
||||
attrs! {At::Title => "Mark as unread"},
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
span![C!["pl-2", "hidden", "md:inline"], "Unread"],
|
||||
@@ -489,7 +523,8 @@ fn search_toolbar(
|
||||
]
|
||||
],
|
||||
div![button![
|
||||
C![&tw_classes::BUTTON, "text-red-500"],
|
||||
tw_classes::button(),
|
||||
C!["text-red-500"],
|
||||
attrs! {At::Title => "Mark as spam"},
|
||||
span![i![C!["far", "fa-hand"]]],
|
||||
span![C!["pl-2", "hidden", "md:inline"], "Spam"],
|
||||
@@ -503,13 +538,13 @@ fn search_toolbar(
|
||||
C!["flex", "gap-2", "items-center"],
|
||||
p![format!("{count} results")],
|
||||
button![
|
||||
C![&tw_classes::BUTTON],
|
||||
tw_classes::button(),
|
||||
IF!(!pager.has_previous_page => attrs!{ At::Disabled=>true }),
|
||||
"<",
|
||||
IF!(pager.has_previous_page => ev(Ev::Click, |_| Msg::PreviousPage)),
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON],
|
||||
tw_classes::button(),
|
||||
IF!(!pager.has_next_page => attrs!{ At::Disabled=>true }),
|
||||
">",
|
||||
IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage))
|
||||
@@ -910,6 +945,13 @@ fn render_closed_header(msg: &ShowThreadQueryThreadOnEmailThreadMessages) -> Nod
|
||||
|
||||
fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool) -> Node<Msg> {
|
||||
let expand_id = msg.id.clone();
|
||||
let from = match &msg.from {
|
||||
Some(ShowThreadQueryThreadOnEmailThreadMessagesFrom {
|
||||
addr: Some(addr), ..
|
||||
}) => Some(addr.to_string()),
|
||||
_ => None,
|
||||
};
|
||||
let from = from.map(|f| f.replace('.', "-").replace('@', "-"));
|
||||
div![
|
||||
C!["lg:mb-4"],
|
||||
div![
|
||||
@@ -929,7 +971,7 @@ fn message_render(msg: &ShowThreadQueryThreadOnEmailThreadMessages, open: bool)
|
||||
],
|
||||
IF!(open =>
|
||||
div![
|
||||
C!["bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto"],
|
||||
C!["bg-white", "text-black", "p-4", "min-w-full", "w-0","overflow-x-auto", from],
|
||||
match &msg.body {
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBody::UnhandledContentType(
|
||||
ShowThreadQueryThreadOnEmailThreadMessagesBodyOnUnhandledContentType { contents ,content_tree},
|
||||
@@ -1033,11 +1075,13 @@ fn render_attachements(
|
||||
]
|
||||
}
|
||||
|
||||
// TODO: add cathup_mode:bool and hide elements when true
|
||||
#[topo::nested]
|
||||
fn thread(
|
||||
thread: &ShowThreadQueryThreadOnEmailThread,
|
||||
open_messages: &HashSet<String>,
|
||||
content_el: &ElRef<HtmlElement>,
|
||||
catchup_mode: bool,
|
||||
) -> Node<Msg> {
|
||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||
let subject = if thread.subject.is_empty() {
|
||||
@@ -1064,17 +1108,25 @@ fn thread(
|
||||
let unread_thread_id = thread.thread_id.clone();
|
||||
let spam_add_thread_id = thread.thread_id.clone();
|
||||
let spam_unread_thread_id = thread.thread_id.clone();
|
||||
let mut title_break = None;
|
||||
const TITLE_LENGTH_WRAP_LIMIT: usize = 40;
|
||||
for w in subject.split_whitespace() {
|
||||
if w.len() > TITLE_LENGTH_WRAP_LIMIT {
|
||||
title_break = Some(C!["break-all", "text-pretty"]);
|
||||
}
|
||||
}
|
||||
div![
|
||||
C!["lg:p-4"],
|
||||
C!["lg:p-4", "max-w-4xl"],
|
||||
div![
|
||||
C!["p-4", "lg:p-0"],
|
||||
h3![C!["text-xl"], subject],
|
||||
h3![C!["text-xl"], title_break, subject],
|
||||
span![removable_tags_chiclet(&thread.thread_id, &tags)],
|
||||
div![
|
||||
IF!(!catchup_mode => div![
|
||||
C!["pt-4", "gap-2", "flex", "justify-around"],
|
||||
div![
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-r-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-r-none"],
|
||||
attrs! {At::Title => "Mark as read"},
|
||||
span![i![C!["far", "fa-envelope-open"]]],
|
||||
span![C!["pl-2"], "Read"],
|
||||
@@ -1084,7 +1136,8 @@ fn thread(
|
||||
])),
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-l-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-l-none"],
|
||||
attrs! {At::Title => "Mark as unread"},
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
span![C!["pl-2"], "Unread"],
|
||||
@@ -1095,7 +1148,8 @@ fn thread(
|
||||
],
|
||||
],
|
||||
div![button![
|
||||
C![&tw_classes::BUTTON, "text-red-500"],
|
||||
tw_classes::button(),
|
||||
C!["text-red-500"],
|
||||
attrs! {At::Title => "Spam"},
|
||||
span![i![C!["far", "fa-hand"]]],
|
||||
span![C!["pl-2"], "Spam"],
|
||||
@@ -1105,13 +1159,13 @@ fn thread(
|
||||
Msg::GoToSearchResults
|
||||
])),
|
||||
]]
|
||||
],
|
||||
]),
|
||||
],
|
||||
div![
|
||||
C!["lg:mt-4", "mail-thread"],
|
||||
el_ref(content_el),
|
||||
messages,
|
||||
click_to_top()
|
||||
IF!(!catchup_mode => click_to_top())
|
||||
],
|
||||
/* TODO(wathiede): plumb in orignal id
|
||||
a![
|
||||
@@ -1156,49 +1210,75 @@ fn view_header(
|
||||
false
|
||||
};
|
||||
let query = Url::decode_uri_component(query).unwrap_or("".to_string());
|
||||
let filter_all = String::new();
|
||||
let filter_unread = unread_query().to_string();
|
||||
let filter_news = "is:unread is:news".to_string();
|
||||
let filter_mail = "is:unread is:mail".to_string();
|
||||
let highlight_color = "bg-sky-800";
|
||||
|
||||
nav![
|
||||
C!["flex", "px-4", "pt-4", "overflow-hidden"],
|
||||
a![
|
||||
C![IF![is_error => "bg-red-500"], "rounded-r-none"],
|
||||
C![&tw_classes::BUTTON],
|
||||
span![i![C![
|
||||
"fa-solid",
|
||||
"fa-arrow-rotate-right",
|
||||
IF![is_loading => "animate-spin"],
|
||||
]]],
|
||||
ev(Ev::Click, |_| Msg::RefreshStart),
|
||||
C!["flex", "flex-col"],
|
||||
div![
|
||||
C!["flex-auto", "flex"],
|
||||
button![
|
||||
C![IF!(is_error => "bg-red-500"), "rounded-none"],
|
||||
tw_classes::button(),
|
||||
span![i![C![
|
||||
"fa-solid",
|
||||
"fa-arrow-rotate-right",
|
||||
IF![is_loading => "animate-spin"],
|
||||
]]],
|
||||
ev(Ev::Click, |_| Msg::RefreshStart),
|
||||
],
|
||||
button![
|
||||
IF!(query == filter_all => C![highlight_color]),
|
||||
tw_classes::button(),
|
||||
C!["grow", "rounded-none"],
|
||||
"All",
|
||||
ev(Ev::Click, |_| Msg::SearchQuery(filter_all)),
|
||||
],
|
||||
button![
|
||||
IF!(query == filter_unread => C![highlight_color]),
|
||||
tw_classes::button(),
|
||||
C!["grow", "rounded-none"],
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
" Unread",
|
||||
ev(Ev::Click, |_| Msg::SearchQuery(filter_unread)),
|
||||
],
|
||||
button![
|
||||
IF!(query == filter_news => C![highlight_color]),
|
||||
tw_classes::button(),
|
||||
C!["grow", "rounded-none"],
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
" News",
|
||||
ev(Ev::Click, |_| Msg::SearchQuery(filter_news)),
|
||||
],
|
||||
button![
|
||||
IF!(query == filter_mail => C![highlight_color]),
|
||||
tw_classes::button(),
|
||||
C!["grow", "rounded-none"],
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
" Mail",
|
||||
ev(Ev::Click, |_| Msg::SearchQuery(filter_mail)),
|
||||
],
|
||||
],
|
||||
a![
|
||||
C![&tw_classes::BUTTON],
|
||||
C!["px-4", "rounded-none"],
|
||||
attrs! {
|
||||
At::Href => urls::search(unread_query(), 0)
|
||||
},
|
||||
"Unread",
|
||||
],
|
||||
a![
|
||||
C![&tw_classes::BUTTON],
|
||||
C!["px-4", "rounded-none"],
|
||||
attrs! {
|
||||
At::Href => urls::search("", 0)
|
||||
},
|
||||
"All",
|
||||
],
|
||||
input![
|
||||
C!["grow", "pl-2", "text-black", "rounded-r"],
|
||||
attrs! {
|
||||
At::Placeholder => "Search";
|
||||
At::AutoFocus => auto_focus_search.as_at_value();
|
||||
At::Value => query,
|
||||
},
|
||||
input_ev(Ev::Input, |q| Msg::UpdateQuery(q)),
|
||||
// Send search on enter.
|
||||
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
||||
Msg::SearchQuery(query)
|
||||
} else {
|
||||
Msg::Noop
|
||||
}),
|
||||
div![
|
||||
C!["flex-auto", "flex"],
|
||||
input![
|
||||
C!["grow", "text-black", "p-2", "focus-visible:outline-0"],
|
||||
attrs! {
|
||||
At::Placeholder => "Search";
|
||||
At::AutoFocus => auto_focus_search.as_at_value();
|
||||
At::Value => query,
|
||||
},
|
||||
input_ev(Ev::Input, |q| Msg::UpdateQuery(q)),
|
||||
// Send search on enter.
|
||||
keyboard_ev(Ev::KeyUp, move |e| if e.key_code() == 0x0d {
|
||||
Msg::SearchQuery(query)
|
||||
} else {
|
||||
Msg::Noop
|
||||
}),
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1250,11 +1330,7 @@ pub fn view_tags(tags: &Option<Vec<Tag>>) -> Node<Msg> {
|
||||
],
|
||||
IF!(t.unread>0 => format!("{}", t.unread))
|
||||
],
|
||||
ev(Ev::Click, |_| {
|
||||
// Scroll window to the top when searching for a tag.
|
||||
info!("scrolling to the top because you clicked a tag");
|
||||
web_sys::window().unwrap().scroll_to_with_x_and_y(0., 0.);
|
||||
})
|
||||
ev(Ev::Click, |_| { Msg::ScrollToTop })
|
||||
]
|
||||
}
|
||||
fn matches(a: &[&str], b: &[&str]) -> usize {
|
||||
@@ -1334,7 +1410,13 @@ pub fn view_tags(tags: &Option<Vec<Tag>>) -> Node<Msg> {
|
||||
]
|
||||
]
|
||||
}
|
||||
fn news_post(post: &ShowThreadQueryThreadOnNewsPost, content_el: &ElRef<HtmlElement>) -> Node<Msg> {
|
||||
|
||||
// TODO: add cathup_mode:bool and hide elements when true
|
||||
fn news_post(
|
||||
post: &ShowThreadQueryThreadOnNewsPost,
|
||||
content_el: &ElRef<HtmlElement>,
|
||||
catchup_mode: bool,
|
||||
) -> Node<Msg> {
|
||||
let subject = &post.title;
|
||||
set_title(subject);
|
||||
let read_thread_id = post.thread_id.clone();
|
||||
@@ -1359,17 +1441,25 @@ fn news_post(post: &ShowThreadQueryThreadOnNewsPost, content_el: &ElRef<HtmlElem
|
||||
]
|
||||
}
|
||||
|
||||
let mut title_break = None;
|
||||
const TITLE_LENGTH_WRAP_LIMIT: usize = 40;
|
||||
for w in subject.split_whitespace() {
|
||||
if w.len() > TITLE_LENGTH_WRAP_LIMIT {
|
||||
title_break = Some(C!["break-all", "text-pretty"]);
|
||||
}
|
||||
}
|
||||
div![
|
||||
C!["lg:p-4", "max-w-4xl"],
|
||||
div![
|
||||
C!["p-4", "lg:p-0"],
|
||||
h3![C!["text-xl"], subject],
|
||||
h3![C!["text-xl"], title_break, subject],
|
||||
span![tag(format!("News/{}", post.slug))],
|
||||
div![
|
||||
IF!(!catchup_mode => div![
|
||||
C!["pt-4", "gap-2", "flex", "justify-around"],
|
||||
div![
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-r-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-r-none"],
|
||||
attrs! {At::Title => "Mark as read"},
|
||||
span![i![C!["far", "fa-envelope-open"]]],
|
||||
span![C!["pl-2"], "Read"],
|
||||
@@ -1379,7 +1469,8 @@ fn news_post(post: &ShowThreadQueryThreadOnNewsPost, content_el: &ElRef<HtmlElem
|
||||
])),
|
||||
],
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "rounded-l-none"],
|
||||
tw_classes::button(),
|
||||
C!["rounded-l-none"],
|
||||
attrs! {At::Title => "Mark as unread"},
|
||||
span![i![C!["far", "fa-envelope"]]],
|
||||
span![C!["pl-2"], "Unread"],
|
||||
@@ -1391,7 +1482,7 @@ fn news_post(post: &ShowThreadQueryThreadOnNewsPost, content_el: &ElRef<HtmlElem
|
||||
],
|
||||
// Placeholder for symmetry with email view that has Spam button
|
||||
div![],
|
||||
],
|
||||
]),
|
||||
],
|
||||
div![
|
||||
C!["lg:mt-4"],
|
||||
@@ -1411,12 +1502,11 @@ fn news_post(post: &ShowThreadQueryThreadOnNewsPost, content_el: &ElRef<HtmlElem
|
||||
raw![&post.body]
|
||||
]
|
||||
],
|
||||
click_to_top(),
|
||||
IF!(!catchup_mode => click_to_top()),
|
||||
]
|
||||
}
|
||||
fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg> {
|
||||
let from = &post.site;
|
||||
// TODO: move avatar/favicon stuff to the server side and and come up with a solution for emails
|
||||
let id = post.thread_id.clone();
|
||||
let is_unread = !post.is_read;
|
||||
let url = &post.url;
|
||||
@@ -1454,7 +1544,7 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
|
||||
div![
|
||||
C!["flex", "gap-2", "pt-2", "text-sm"],
|
||||
a![
|
||||
C![&tw_classes::BUTTON],
|
||||
tw_classes::button(),
|
||||
attrs! {
|
||||
At::Href => post.url,
|
||||
At::Target => "_blank",
|
||||
@@ -1463,7 +1553,7 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
|
||||
i![C!["fas", "fa-up-right-from-square"]],
|
||||
],
|
||||
a![
|
||||
C![&tw_classes::BUTTON],
|
||||
tw_classes::button(),
|
||||
attrs! {
|
||||
At::Href => add_archive_url,
|
||||
At::Target => "_blank",
|
||||
@@ -1472,7 +1562,7 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
|
||||
i![C!["fas", "fa-plus"]],
|
||||
],
|
||||
a![
|
||||
C![&tw_classes::BUTTON],
|
||||
tw_classes::button(),
|
||||
attrs! {
|
||||
At::Href => view_archive_url,
|
||||
At::Target => "_blank",
|
||||
@@ -1495,12 +1585,12 @@ fn render_news_post_header(post: &ShowThreadQueryThreadOnNewsPost) -> Node<Msg>
|
||||
} else {
|
||||
"fa-envelope-open"
|
||||
},
|
||||
]]
|
||||
],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
})
|
||||
]],
|
||||
ev(Ev::Click, move |e| {
|
||||
e.stop_propagation();
|
||||
Msg::SetUnread(id, !is_unread)
|
||||
})
|
||||
]
|
||||
]
|
||||
}
|
||||
fn reading_progress(ratio: f64) -> Node<Msg> {
|
||||
@@ -1524,7 +1614,6 @@ fn reading_progress(ratio: f64) -> Node<Msg> {
|
||||
]
|
||||
}
|
||||
pub fn view_versions(versions: &Version) -> Node<Msg> {
|
||||
debug!("versions {versions:?}");
|
||||
aside![
|
||||
C!["p-2"],
|
||||
p![C!["uppercase", "font-bold"], "Versions"],
|
||||
@@ -1539,11 +1628,10 @@ pub fn view_versions(versions: &Version) -> Node<Msg> {
|
||||
|
||||
fn click_to_top() -> Node<Msg> {
|
||||
button![
|
||||
C![&tw_classes::BUTTON, "bg-red-500", "lg:m-0", "m-4"],
|
||||
tw_classes::button_with_color("bg-red-500", "hover:bg-neutral-700"),
|
||||
C!["lg:m-0", "m-4"],
|
||||
span!["Top"],
|
||||
span![i![C!["fas", "fa-arrow-turn-up"]]],
|
||||
ev(Ev::Click, |_| web_sys::window()
|
||||
.unwrap()
|
||||
.scroll_to_with_x_and_y(0., 0.))
|
||||
ev(Ev::Click, |_| Msg::ScrollToTop)
|
||||
]
|
||||
}
|
||||
|
||||
218
web/src/websocket.rs
Normal file
218
web/src/websocket.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use std::{collections::VecDeque, rc::Rc};
|
||||
|
||||
use letterbox_shared::WebsocketMessage;
|
||||
use log::{error, info};
|
||||
use seed::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(dead_code)]
|
||||
mod wasm_sockets {
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use thiserror::Error;
|
||||
use web_sys::{CloseEvent, ErrorEvent};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct JsValue;
|
||||
#[derive(Debug)]
|
||||
pub enum ConnectionStatus {
|
||||
/// Connecting to a server
|
||||
Connecting,
|
||||
/// Connected to a server
|
||||
Connected,
|
||||
/// Disconnected from a server due to an error
|
||||
Error,
|
||||
/// Disconnected from a server without an error
|
||||
Disconnected,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct EventClient {
|
||||
pub status: Rc<RefCell<ConnectionStatus>>,
|
||||
}
|
||||
impl EventClient {
|
||||
pub fn new(_: &str) -> Result<Self, WebSocketError> {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn send_string(&self, _essage: &str) -> Result<(), JsValue> {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_error(&mut self, _: Option<Box<dyn Fn(ErrorEvent)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_connection(&mut self, _: Option<Box<dyn Fn(&EventClient)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_close(&mut self, _: Option<Box<dyn Fn(CloseEvent)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
pub fn set_on_message(&mut self, _: Option<Box<dyn Fn(&EventClient, Message)>>) {
|
||||
todo!("this is a mock")
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Message {
|
||||
Text(String),
|
||||
Binary(Vec<u8>),
|
||||
}
|
||||
#[derive(Debug, Clone, Error)]
|
||||
pub enum WebSocketError {}
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use wasm_sockets::{ConnectionStatus, EventClient, Message, WebSocketError};
|
||||
use web_sys::CloseEvent;
|
||||
|
||||
/// Message from the server to the client.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ServerMessage {
|
||||
pub id: usize,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
/// Message from the client to the server.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ClientMessage {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
//const WS_URL: &str = "wss://9000.z.xinu.tv/api/ws";
|
||||
//const WS_URL: &str = "wss://9345.z.xinu.tv/api/graphql/ws";
|
||||
const WS_URL: &str = "wss://6758.z.xinu.tv/api/ws";
|
||||
|
||||
// ------ ------
|
||||
// Model
|
||||
// ------ ------
|
||||
|
||||
pub struct Model {
|
||||
web_socket: EventClient,
|
||||
web_socket_reconnector: Option<StreamHandle>,
|
||||
pub updates: VecDeque<WebsocketMessage>,
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// Init
|
||||
// ------ ------
|
||||
|
||||
pub fn init(orders: &mut impl Orders<Msg>) -> Model {
|
||||
Model {
|
||||
web_socket: create_websocket(orders).unwrap(),
|
||||
web_socket_reconnector: None,
|
||||
updates: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// ------ ------
|
||||
// Update
|
||||
// ------ ------
|
||||
|
||||
pub enum Msg {
|
||||
WebSocketOpened,
|
||||
TextMessageReceived(WebsocketMessage),
|
||||
WebSocketClosed(CloseEvent),
|
||||
WebSocketFailed,
|
||||
ReconnectWebSocket(usize),
|
||||
#[allow(dead_code)]
|
||||
SendMessage(ClientMessage),
|
||||
}
|
||||
|
||||
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
match msg {
|
||||
Msg::WebSocketOpened => {
|
||||
model.web_socket_reconnector = None;
|
||||
info!("WebSocket connection is open now");
|
||||
}
|
||||
Msg::TextMessageReceived(msg) => {
|
||||
model.updates.push_back(msg);
|
||||
}
|
||||
Msg::WebSocketClosed(close_event) => {
|
||||
info!(
|
||||
r#"==================
|
||||
WebSocket connection was closed:
|
||||
Clean: {0}
|
||||
Code: {1}
|
||||
Reason: {2}
|
||||
=================="#,
|
||||
close_event.was_clean(),
|
||||
close_event.code(),
|
||||
close_event.reason()
|
||||
);
|
||||
|
||||
// Chrome doesn't invoke `on_error` when the connection is lost.
|
||||
if !close_event.was_clean() && model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
);
|
||||
}
|
||||
}
|
||||
Msg::WebSocketFailed => {
|
||||
info!("WebSocket failed");
|
||||
if model.web_socket_reconnector.is_none() {
|
||||
model.web_socket_reconnector = Some(
|
||||
orders.stream_with_handle(streams::backoff(None, Msg::ReconnectWebSocket)),
|
||||
);
|
||||
}
|
||||
}
|
||||
Msg::ReconnectWebSocket(retries) => {
|
||||
info!("Reconnect attempt: {}", retries);
|
||||
model.web_socket = create_websocket(orders).unwrap();
|
||||
}
|
||||
Msg::SendMessage(msg) => {
|
||||
let txt = serde_json::to_string(&msg).unwrap();
|
||||
model.web_socket.send_string(&txt).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_websocket(orders: &impl Orders<Msg>) -> Result<EventClient, WebSocketError> {
|
||||
let msg_sender = orders.msg_sender();
|
||||
|
||||
let mut client = EventClient::new(WS_URL)?;
|
||||
|
||||
client.set_on_error(Some(Box::new(|error| {
|
||||
gloo_console::error!("WS: ", error);
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_connection(Some(Box::new(move |client: &EventClient| {
|
||||
info!("{:#?}", client.status);
|
||||
let msg = match *client.status.borrow() {
|
||||
ConnectionStatus::Connecting => {
|
||||
info!("Connecting...");
|
||||
None
|
||||
}
|
||||
ConnectionStatus::Connected => Some(Msg::WebSocketOpened),
|
||||
ConnectionStatus::Error => Some(Msg::WebSocketFailed),
|
||||
ConnectionStatus::Disconnected => {
|
||||
info!("Disconnected");
|
||||
None
|
||||
}
|
||||
};
|
||||
send(msg);
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_close(Some(Box::new(move |ev| {
|
||||
info!("WS: Connection closed");
|
||||
send(Some(Msg::WebSocketClosed(ev)));
|
||||
})));
|
||||
|
||||
let send = msg_sender.clone();
|
||||
client.set_on_message(Some(Box::new(move |_: &EventClient, msg: Message| {
|
||||
decode_message(msg, Rc::clone(&send))
|
||||
})));
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
fn decode_message(message: Message, msg_sender: Rc<dyn Fn(Option<Msg>)>) {
|
||||
match message {
|
||||
Message::Text(txt) => {
|
||||
let msg: WebsocketMessage = serde_json::from_str(&txt).unwrap_or_else(|e| {
|
||||
panic!("failed to parse json into WebsocketMessage: {e}\n'{txt}'")
|
||||
});
|
||||
msg_sender(Some(Msg::TextMessageReceived(msg)));
|
||||
}
|
||||
m => error!("unexpected message type received of {m:?}"),
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
html {
|
||||
background-color: black;
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.mail-thread a,
|
||||
.news-post a {
|
||||
color: var(--color-link) !important;
|
||||
text-decoration: underline;
|
||||
color: var(--color-link) !important;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.mail-thread br,
|
||||
.news-post br {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
content: " ";
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
content: " ";
|
||||
}
|
||||
|
||||
.mail-thread h1,
|
||||
@@ -23,47 +23,62 @@ html {
|
||||
.news-post h2,
|
||||
.news-post h3,
|
||||
.news-post h4 {
|
||||
margin-top: 1em !important;
|
||||
margin-bottom: 1em !important;
|
||||
margin-top: 1em !important;
|
||||
margin-bottom: 1em !important;
|
||||
}
|
||||
|
||||
.mail-thread p,
|
||||
.news-post p {
|
||||
margin-bottom: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.mail-thread pre,
|
||||
.news-post pre {
|
||||
font-family: monospace;
|
||||
background-color: #eee !important;
|
||||
padding: 0.5em;
|
||||
white-space: break-spaces;
|
||||
}
|
||||
|
||||
.mail-thread code,
|
||||
.news-post pre,
|
||||
.news-post code {
|
||||
font-family: monospace;
|
||||
background-color: #eee !important;
|
||||
padding: 0.5em !important;
|
||||
font-family: monospace;
|
||||
white-space: break-spaces;
|
||||
background-color: #eee !important;
|
||||
}
|
||||
|
||||
.mail-thread blockquote {
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
}
|
||||
|
||||
.mail-thread ol,
|
||||
.mail-thread ul {
|
||||
margin-left: 2em;
|
||||
margin-left: 2em;
|
||||
}
|
||||
|
||||
.mail-thread .noreply-news-bloomberg-com a {
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.mail-thread .noreply-news-bloomberg-com h2 {
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
/* Hackaday figures have unreadable black on dark grey */
|
||||
.news-post figcaption.wp-caption-text {
|
||||
background-color: initial !important;
|
||||
background-color: initial !important;
|
||||
}
|
||||
|
||||
.news-post.site-nautilus .article-ad,
|
||||
.news-post.site-nautilus .primis-ad {
|
||||
display: none !important;
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.news-post.site-slashdot .story-byline {
|
||||
display: block !important;
|
||||
height: initial !important;
|
||||
overflow: auto !important;
|
||||
position: static !important;
|
||||
}
|
||||
display: block !important;
|
||||
height: initial !important;
|
||||
overflow: auto !important;
|
||||
position: static !important;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user