Compare commits
36 Commits
48f66c7096
...
news
| Author | SHA1 | Date | |
|---|---|---|---|
| 1106377550 | |||
| b5468bced2 | |||
| 01cbe6c037 | |||
| d0a02c2f61 | |||
| c499672dde | |||
| 3aa0b94db4 | |||
| cdb64ed952 | |||
| 834efc5c94 | |||
| 79db94f67f | |||
| ec41f840d5 | |||
| d9d57c66f8 | |||
| 9746c9912b | |||
| abaaddae3a | |||
| 0bf64004ff | |||
| 6fae9cd018 | |||
| 65fcbd4b77 | |||
| dd09bc3168 | |||
| 0bf865fdef | |||
| 5c0c45b99f | |||
| 221f046664 | |||
| 2a9d5b393e | |||
| 90860e5511 | |||
| 0b1f806276 | |||
| 0482713241 | |||
| bb3e18519f | |||
| 3a4d08facc | |||
| 30064d5904 | |||
| c288b7fd67 | |||
| b4d1528612 | |||
| 5fc272054c | |||
| 714e73aeb1 | |||
| 3dfd2d48b3 | |||
| 3a5a9bd66a | |||
| 55d7aec516 | |||
| 96d3e4a7d6 | |||
| beb96aba14 |
10
.envrc
Normal file
10
.envrc
Normal file
@@ -0,0 +1,10 @@
|
||||
source_up
|
||||
|
||||
export DATABASE_USER="newsreader";
|
||||
export DATABASE_NAME="newsreader";
|
||||
export DATABASE_HOST="nixos-07.h.xinu.tv";
|
||||
export DATABASE_URL="postgres://${DATABASE_USER}@${DATABASE_HOST}/${DATABASE_NAME}";
|
||||
export PROD_DATABASE_USER="newsreader";
|
||||
export PROD_DATABASE_NAME="newsreader";
|
||||
export PROD_DATABASE_HOST="postgres.h.xinu.tv";
|
||||
export PROD_DATABASE_URL="postgres://${PROD_DATABASE_USER}@${PROD_DATABASE_HOST}/${PROD_DATABASE_NAME}";
|
||||
2036
Cargo.lock
generated
2036
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -21,16 +21,13 @@ async-graphql = { version = "6.0.11", features = ["log"] }
|
||||
async-graphql-rocket = "6.0.11"
|
||||
rocket_cors = "0.6.0"
|
||||
memmap = "0.7.0"
|
||||
mailparse = { git = "https://github.com/wathiede/mailparse" }
|
||||
#mailparse = "0.14.0"
|
||||
mailparse = "0.15.0"
|
||||
ammonia = "3.3.0"
|
||||
lol_html = "1.2.0"
|
||||
css-inline = "0.13.0"
|
||||
anyhow = "1.0.79"
|
||||
maplit = "1.0.2"
|
||||
linkify = "0.10.0"
|
||||
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
|
||||
url = "2.5.2"
|
||||
|
||||
[dependencies.rocket_contrib]
|
||||
version = "0.4.11"
|
||||
default-features = false
|
||||
features = ["json"]
|
||||
|
||||
@@ -7,3 +7,4 @@ address = "0.0.0.0"
|
||||
port = 9345
|
||||
# Uncomment to make it production like.
|
||||
#log_level = "critical"
|
||||
newsreader_database_url = "postgres://newsreader@nixos-07.h.xinu.tv/newsreader"
|
||||
|
||||
10
server/sql/count.sql
Normal file
10
server/sql/count.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
SELECT
|
||||
COUNT(*) count
|
||||
FROM
|
||||
post
|
||||
WHERE
|
||||
site = $1
|
||||
AND (
|
||||
NOT $2
|
||||
OR NOT is_read
|
||||
)
|
||||
21
server/sql/tags.sql
Normal file
21
server/sql/tags.sql
Normal file
@@ -0,0 +1,21 @@
|
||||
SELECT
|
||||
site,
|
||||
name,
|
||||
count (
|
||||
NOT is_read
|
||||
OR NULL
|
||||
) unread
|
||||
FROM
|
||||
post AS p
|
||||
JOIN feed AS f ON p.site = f.slug --
|
||||
-- TODO: figure this out to make the query faster when only looking for unread
|
||||
--WHERE
|
||||
-- (
|
||||
-- NOT $1
|
||||
-- OR NOT is_read
|
||||
-- )
|
||||
GROUP BY
|
||||
1,
|
||||
2
|
||||
ORDER BY
|
||||
site
|
||||
14
server/sql/thread.sql
Normal file
14
server/sql/thread.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
SELECT
|
||||
date,
|
||||
is_read,
|
||||
link,
|
||||
site,
|
||||
summary,
|
||||
title,
|
||||
name,
|
||||
homepage
|
||||
FROM
|
||||
post p
|
||||
JOIN feed f ON p.site = f.slug
|
||||
WHERE
|
||||
uid = $1
|
||||
20
server/sql/threads.sql
Normal file
20
server/sql/threads.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
SELECT
|
||||
date,
|
||||
is_read,
|
||||
title,
|
||||
uid,
|
||||
name
|
||||
FROM
|
||||
post p
|
||||
JOIN feed f ON p.site = f.slug
|
||||
WHERE
|
||||
site = $1
|
||||
AND (
|
||||
NOT $2
|
||||
OR NOT is_read
|
||||
)
|
||||
ORDER BY
|
||||
date DESC,
|
||||
title OFFSET $3
|
||||
LIMIT
|
||||
$4
|
||||
@@ -9,7 +9,7 @@ fn main() -> anyhow::Result<()> {
|
||||
println!("Sanitizing {src} into {dst}");
|
||||
let bytes = fs::read(src)?;
|
||||
let html = String::from_utf8_lossy(&bytes);
|
||||
let html = sanitize_html(&html)?;
|
||||
let html = sanitize_html(&html, "")?;
|
||||
fs::write(dst, html)?;
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -7,6 +7,7 @@ use async_graphql_rocket::{GraphQLQuery, GraphQLRequest, GraphQLResponse};
|
||||
use glog::Flags;
|
||||
use notmuch::{Notmuch, NotmuchError, ThreadSet};
|
||||
use rocket::{
|
||||
fairing::AdHoc,
|
||||
http::{ContentType, Header},
|
||||
request::Request,
|
||||
response::{content, Debug, Responder},
|
||||
@@ -14,44 +15,24 @@ use rocket::{
|
||||
Response, State,
|
||||
};
|
||||
use rocket_cors::{AllowedHeaders, AllowedOrigins};
|
||||
use serde::Deserialize;
|
||||
use server::{
|
||||
error::ServerError,
|
||||
graphql::{attachment_bytes, Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||
graphql::{Attachment, GraphqlSchema, Mutation, QueryRoot},
|
||||
nm::{attachment_bytes, cid_attachment_bytes},
|
||||
};
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Config {
|
||||
newsreader_database_url: String,
|
||||
}
|
||||
|
||||
#[get("/refresh")]
|
||||
async fn refresh(nm: &State<Notmuch>) -> Result<Json<String>, Debug<NotmuchError>> {
|
||||
Ok(Json(String::from_utf8_lossy(&nm.new()?).to_string()))
|
||||
}
|
||||
|
||||
#[get("/search")]
|
||||
async fn search_all(
|
||||
nm: &State<Notmuch>,
|
||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
||||
search(nm, "*", None, None).await
|
||||
}
|
||||
|
||||
#[get("/search/<query>?<page>&<results_per_page>")]
|
||||
async fn search(
|
||||
nm: &State<Notmuch>,
|
||||
query: &str,
|
||||
page: Option<usize>,
|
||||
results_per_page: Option<usize>,
|
||||
) -> Result<Json<shared::SearchResult>, Debug<NotmuchError>> {
|
||||
let page = page.unwrap_or(0);
|
||||
let results_per_page = results_per_page.unwrap_or(20);
|
||||
let query = urlencoding::decode(query).map_err(NotmuchError::from)?;
|
||||
info!(" search '{query}'");
|
||||
let res = shared::SearchResult {
|
||||
summary: nm.search(&query, page * results_per_page, results_per_page)?,
|
||||
query: query.to_string(),
|
||||
page,
|
||||
results_per_page,
|
||||
total: nm.count(&query)?,
|
||||
};
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
#[get("/show/<query>/pretty")]
|
||||
async fn show_pretty(
|
||||
nm: &State<Notmuch>,
|
||||
@@ -111,6 +92,22 @@ impl<'r, 'o: 'r> Responder<'r, 'o> for DownloadAttachmentResponder {
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/cid/<id>/<cid>")]
|
||||
async fn view_cid(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
cid: &str,
|
||||
) -> Result<InlineAttachmentResponder, Debug<ServerError>> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
info!("view cid attachment {mid} {cid}");
|
||||
let attachment = cid_attachment_bytes(nm, &mid, &cid)?;
|
||||
Ok(InlineAttachmentResponder(attachment))
|
||||
}
|
||||
|
||||
#[get("/view/attachment/<id>/<idx>/<_>")]
|
||||
async fn view_attachment(
|
||||
nm: &State<Notmuch>,
|
||||
@@ -206,34 +203,36 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
.to_cors()?;
|
||||
|
||||
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||
.data(Notmuch::default())
|
||||
.extension(async_graphql::extensions::Logger)
|
||||
.finish();
|
||||
|
||||
let _ = rocket::build()
|
||||
let rkt = rocket::build()
|
||||
.mount(
|
||||
"/",
|
||||
shared::urls::MOUNT_POINT,
|
||||
routes![
|
||||
original,
|
||||
refresh,
|
||||
search_all,
|
||||
search,
|
||||
show_pretty,
|
||||
show,
|
||||
graphql_query,
|
||||
graphql_request,
|
||||
graphiql,
|
||||
view_cid,
|
||||
view_attachment,
|
||||
download_attachment,
|
||||
],
|
||||
)
|
||||
.attach(cors)
|
||||
.manage(schema)
|
||||
.manage(Notmuch::default())
|
||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||
.launch()
|
||||
.await?;
|
||||
.attach(AdHoc::config::<Config>());
|
||||
|
||||
let config: Config = rkt.figment().extract()?;
|
||||
let pool = PgPool::connect(&config.newsreader_database_url).await?;
|
||||
let schema = Schema::build(QueryRoot, Mutation, EmptySubscription)
|
||||
.data(Notmuch::default())
|
||||
.data(pool.clone())
|
||||
.extension(async_graphql::extensions::Logger)
|
||||
.finish();
|
||||
|
||||
let rkt = rkt.manage(schema).manage(pool).manage(Notmuch::default());
|
||||
//.manage(Notmuch::with_config("../notmuch/testdata/notmuch.config"))
|
||||
|
||||
rkt.launch().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,16 +1,34 @@
|
||||
use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error};
|
||||
|
||||
use mailparse::MailParseError;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::SanitizeError;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ServerError {
|
||||
#[error("notmuch")]
|
||||
#[error("notmuch: {0}")]
|
||||
NotmuchError(#[from] notmuch::NotmuchError),
|
||||
#[error("flatten")]
|
||||
FlattenError,
|
||||
#[error("mail parse error")]
|
||||
#[error("mail parse error: {0}")]
|
||||
MailParseError(#[from] MailParseError),
|
||||
#[error("IO error")]
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
#[error("attachement not found")]
|
||||
PartNotFound,
|
||||
#[error("sqlx error: {0}")]
|
||||
SQLXError(#[from] sqlx::Error),
|
||||
#[error("html sanitize error: {0}")]
|
||||
SanitizeError(#[from] SanitizeError),
|
||||
#[error("UTF8 error: {0}")]
|
||||
Utf8Error(#[from] Utf8Error),
|
||||
#[error("FromUTF8 error: {0}")]
|
||||
FromUtf8Error(#[from] FromUtf8Error),
|
||||
#[error("error: {0}")]
|
||||
StringError(String),
|
||||
#[error("invalid url: {0}")]
|
||||
UrlParseError(#[from] url::ParseError),
|
||||
#[error("impossible: {0}")]
|
||||
InfaillibleError(#[from] Infallible),
|
||||
}
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
};
|
||||
|
||||
use async_graphql::{
|
||||
connection::{self, Connection, Edge},
|
||||
connection::{Connection},
|
||||
Context, EmptySubscription, Enum, Error, FieldResult, Object, Schema, SimpleObject, Union,
|
||||
};
|
||||
use log::{error, info, warn};
|
||||
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
use log::info;
|
||||
use notmuch::Notmuch;
|
||||
use rocket::time::Instant;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
use crate::{error::ServerError, linkify_html, sanitize_html};
|
||||
use crate::{newsreader, nm};
|
||||
|
||||
/// # Number of seconds since the Epoch
|
||||
pub type UnixTime = isize;
|
||||
@@ -23,14 +15,6 @@ pub type UnixTime = isize;
|
||||
/// # Thread ID, sans "thread:"
|
||||
pub type ThreadId = String;
|
||||
|
||||
const TEXT_PLAIN: &'static str = "text/plain";
|
||||
const TEXT_HTML: &'static str = "text/html";
|
||||
const IMAGE_JPEG: &'static str = "image/jpeg";
|
||||
const IMAGE_PNG: &'static str = "image/png";
|
||||
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
|
||||
const MULTIPART_MIXED: &'static str = "multipart/mixed";
|
||||
const MULTIPART_RELATED: &'static str = "multipart/related";
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct ThreadSummary {
|
||||
pub thread: ThreadId,
|
||||
@@ -49,9 +33,9 @@ pub struct ThreadSummary {
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct Thread {
|
||||
thread_id: String,
|
||||
subject: String,
|
||||
messages: Vec<Message>,
|
||||
pub thread_id: String,
|
||||
pub subject: String,
|
||||
pub messages: Vec<Message>,
|
||||
}
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
@@ -95,16 +79,45 @@ pub struct Attachment {
|
||||
pub bytes: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Disposition {
|
||||
pub r#type: DispositionType,
|
||||
pub filename: Option<String>,
|
||||
pub size: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum DispositionType {
|
||||
Inline,
|
||||
Attachment,
|
||||
}
|
||||
|
||||
impl From<mailparse::DispositionType> for DispositionType {
|
||||
fn from(value: mailparse::DispositionType) -> Self {
|
||||
match value {
|
||||
mailparse::DispositionType::Inline => DispositionType::Inline,
|
||||
mailparse::DispositionType::Attachment => DispositionType::Attachment,
|
||||
dt => panic!("unhandled DispositionType {dt:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DispositionType {
|
||||
fn default() -> Self {
|
||||
DispositionType::Attachment
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct Header {
|
||||
key: String,
|
||||
value: String,
|
||||
pub key: String,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnhandledContentType {
|
||||
text: String,
|
||||
content_tree: String,
|
||||
pub text: String,
|
||||
pub content_tree: String,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
@@ -119,8 +132,8 @@ impl UnhandledContentType {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PlainText {
|
||||
text: String,
|
||||
content_tree: String,
|
||||
pub text: String,
|
||||
pub content_tree: String,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
@@ -135,8 +148,8 @@ impl PlainText {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Html {
|
||||
html: String,
|
||||
content_tree: String,
|
||||
pub html: String,
|
||||
pub content_tree: String,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
@@ -160,13 +173,13 @@ pub enum Body {
|
||||
}
|
||||
|
||||
impl Body {
|
||||
fn html(html: String) -> Body {
|
||||
pub fn html(html: String) -> Body {
|
||||
Body::Html(Html {
|
||||
html,
|
||||
content_tree: "".to_string(),
|
||||
})
|
||||
}
|
||||
fn text(text: String) -> Body {
|
||||
pub fn text(text: String) -> Body {
|
||||
Body::PlainText(PlainText {
|
||||
text,
|
||||
content_tree: "".to_string(),
|
||||
@@ -181,11 +194,11 @@ pub struct Email {
|
||||
}
|
||||
|
||||
#[derive(SimpleObject)]
|
||||
struct Tag {
|
||||
name: String,
|
||||
fg_color: String,
|
||||
bg_color: String,
|
||||
unread: usize,
|
||||
pub(crate) struct Tag {
|
||||
pub name: String,
|
||||
pub fg_color: String,
|
||||
pub bg_color: String,
|
||||
pub unread: usize,
|
||||
}
|
||||
|
||||
pub struct QueryRoot;
|
||||
@@ -193,7 +206,14 @@ pub struct QueryRoot;
|
||||
impl QueryRoot {
|
||||
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
Ok(nm.count(&query)?)
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
|
||||
// TODO: make this search both copra and merge results
|
||||
if newsreader::is_newsreader_search(&query) {
|
||||
Ok(newsreader::count(pool, &query).await?)
|
||||
} else {
|
||||
Ok(nm::count(nm, &query).await?)
|
||||
}
|
||||
}
|
||||
|
||||
async fn search<'ctx>(
|
||||
@@ -205,230 +225,41 @@ impl QueryRoot {
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, Error> {
|
||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})");
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
let total = nm.count(&query)?;
|
||||
let (first, last) = if let (None, None) = (first, last) {
|
||||
info!("neither first nor last set, defaulting first to 20");
|
||||
(Some(20), None)
|
||||
} else {
|
||||
(first, last)
|
||||
};
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
|
||||
let mut start = after.map(|after| after + 1).unwrap_or(0);
|
||||
let mut end = before.unwrap_or(total);
|
||||
if let Some(first) = first {
|
||||
end = (start + first).min(end);
|
||||
}
|
||||
if let Some(last) = last {
|
||||
start = if last > end - start { end } else { end - last };
|
||||
}
|
||||
|
||||
let count = end - start;
|
||||
let slice: Vec<ThreadSummary> = nm
|
||||
.search(&query, start, count)?
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|ts| ThreadSummary {
|
||||
thread: ts.thread,
|
||||
timestamp: ts.timestamp,
|
||||
date_relative: ts.date_relative,
|
||||
matched: ts.matched,
|
||||
total: ts.total,
|
||||
authors: ts.authors,
|
||||
subject: ts.subject,
|
||||
tags: ts.tags,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut connection = Connection::new(start > 0, end < total);
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
// TODO: make this search both copra and merge results
|
||||
if newsreader::is_newsreader_search(&query) {
|
||||
Ok(newsreader::search(pool, after, before, first, last, query).await?)
|
||||
} else {
|
||||
Ok(nm::search(nm, after, before, first, last, query).await?)
|
||||
}
|
||||
}
|
||||
|
||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let now = Instant::now();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
let needs_unread = ctx.look_ahead().field("unread").exists();
|
||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||
// get an inaccurate count.
|
||||
nm.search("is:unread", 0, 10000)?
|
||||
.0
|
||||
.iter()
|
||||
.fold(HashMap::new(), |mut m, ts| {
|
||||
ts.tags.iter().for_each(|t| {
|
||||
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
||||
});
|
||||
m
|
||||
})
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
let tags = nm
|
||||
.tags()?
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = if needs_unread {
|
||||
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
Tag {
|
||||
name: tag,
|
||||
fg_color: "white".to_string(),
|
||||
bg_color: hex,
|
||||
unread,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
info!("Fetching tags took {}", now.elapsed());
|
||||
let mut tags = newsreader::tags(pool, needs_unread).await?;
|
||||
tags.append(&mut nm::tags(nm, needs_unread)?);
|
||||
Ok(tags)
|
||||
}
|
||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||
// display names (that default to the most commonly seen name).
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
let debug_content_tree = ctx
|
||||
.look_ahead()
|
||||
.field("messages")
|
||||
.field("body")
|
||||
.field("contentTree")
|
||||
.exists();
|
||||
let mut messages = Vec::new();
|
||||
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
||||
let tags = nm.tags_for_query(&format!("id:{id}"))?;
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
let from = email_addresses(&path, &m, "from")?;
|
||||
let from = match from.len() {
|
||||
0 => None,
|
||||
1 => from.into_iter().next(),
|
||||
_ => {
|
||||
warn!(
|
||||
"Got {} from addresses in message, truncating: {:?}",
|
||||
from.len(),
|
||||
from
|
||||
);
|
||||
from.into_iter().next()
|
||||
}
|
||||
};
|
||||
let to = email_addresses(&path, &m, "to")?;
|
||||
let cc = email_addresses(&path, &m, "cc")?;
|
||||
let subject = m.headers.get_first_value("subject");
|
||||
let timestamp = m
|
||||
.headers
|
||||
.get_first_value("date")
|
||||
.and_then(|d| mailparse::dateparse(&d).ok());
|
||||
let body = match extract_body(&m, &id)? {
|
||||
Body::PlainText(PlainText { text, content_tree }) => {
|
||||
let text = if text.len() > MAX_RAW_MESSAGE_SIZE {
|
||||
format!(
|
||||
"{}...\n\nMESSAGE WAS TRUNCATED @ {} bytes",
|
||||
&text[..MAX_RAW_MESSAGE_SIZE],
|
||||
MAX_RAW_MESSAGE_SIZE
|
||||
)
|
||||
} else {
|
||||
text
|
||||
};
|
||||
|
||||
Body::Html(Html {
|
||||
html: format!(
|
||||
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
sanitize_html(&linkify_html(&text.trim_matches('\n')))?
|
||||
),
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
})
|
||||
}
|
||||
Body::Html(Html { html, content_tree }) => Body::Html(Html {
|
||||
html: sanitize_html(&html)?,
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
}),
|
||||
|
||||
Body::UnhandledContentType(UnhandledContentType { content_tree, .. }) => {
|
||||
let body_start = mmap
|
||||
.windows(2)
|
||||
.take(20_000)
|
||||
.position(|w| w == b"\n\n")
|
||||
.unwrap_or(0);
|
||||
let body = mmap[body_start + 2..].to_vec();
|
||||
Body::UnhandledContentType(UnhandledContentType {
|
||||
text: String::from_utf8(body)?,
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
})
|
||||
}
|
||||
b => b,
|
||||
};
|
||||
let headers = m
|
||||
.headers
|
||||
.iter()
|
||||
.map(|h| Header {
|
||||
key: h.get_key(),
|
||||
value: h.get_value(),
|
||||
})
|
||||
.collect();
|
||||
// TODO(wathiede): parse message and fill out attachments
|
||||
let attachments = extract_attachments(&m, &id)?;
|
||||
messages.push(Message {
|
||||
id,
|
||||
from,
|
||||
to,
|
||||
cc,
|
||||
subject,
|
||||
tags,
|
||||
timestamp,
|
||||
headers,
|
||||
body,
|
||||
path,
|
||||
attachments,
|
||||
});
|
||||
// TODO: look at thread_id and conditionally load newsreader
|
||||
if newsreader::is_newsreader_thread(&thread_id) {
|
||||
Ok(newsreader::thread(pool, thread_id).await?)
|
||||
} else {
|
||||
Ok(nm::thread(nm, thread_id, debug_content_tree).await?)
|
||||
}
|
||||
messages.reverse();
|
||||
// Find the first subject that's set. After reversing the vec, this should be the oldest
|
||||
// message.
|
||||
let subject: String = messages
|
||||
.iter()
|
||||
.skip_while(|m| m.subject.is_none())
|
||||
.next()
|
||||
.and_then(|m| m.subject.clone())
|
||||
.unwrap_or("(NO SUBJECT)".to_string());
|
||||
Ok(Thread {
|
||||
thread_id,
|
||||
subject,
|
||||
messages,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -475,448 +306,3 @@ impl Mutation {
|
||||
}
|
||||
|
||||
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Disposition {
|
||||
pub r#type: DispositionType,
|
||||
pub filename: Option<String>,
|
||||
pub size: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum DispositionType {
|
||||
Inline,
|
||||
Attachment,
|
||||
}
|
||||
|
||||
impl From<mailparse::DispositionType> for DispositionType {
|
||||
fn from(value: mailparse::DispositionType) -> Self {
|
||||
match value {
|
||||
mailparse::DispositionType::Inline => DispositionType::Inline,
|
||||
mailparse::DispositionType::Attachment => DispositionType::Attachment,
|
||||
dt => panic!("unhandled DispositionType {dt:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DispositionType {
|
||||
fn default() -> Self {
|
||||
DispositionType::Attachment
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_body(m: &ParsedMail, id: &str) -> Result<Body, Error> {
|
||||
let mut part_addr = Vec::new();
|
||||
part_addr.push(id.to_string());
|
||||
let body = m.get_body()?;
|
||||
let ret = match m.ctype.mimetype.as_str() {
|
||||
TEXT_PLAIN => return Ok(Body::text(body)),
|
||||
TEXT_HTML => return Ok(Body::html(body)),
|
||||
MULTIPART_MIXED => extract_mixed(m, &mut part_addr),
|
||||
MULTIPART_ALTERNATIVE => extract_alternative(m, &mut part_addr),
|
||||
_ => extract_unhandled(m),
|
||||
};
|
||||
if let Err(err) = ret {
|
||||
error!("Failed to extract body: {err:?}");
|
||||
return Ok(extract_unhandled(m)?);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
fn extract_unhandled(m: &ParsedMail) -> Result<Body, Error> {
|
||||
let msg = format!(
|
||||
"Unhandled body content type:\n{}",
|
||||
render_content_type_tree(m)
|
||||
);
|
||||
Ok(Body::UnhandledContentType(UnhandledContentType {
|
||||
text: m.get_body()?,
|
||||
content_tree: render_content_type_tree(m),
|
||||
}))
|
||||
}
|
||||
|
||||
// multipart/alternative defines multiple representations of the same message, and clients should
|
||||
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
||||
// then give up.
|
||||
fn extract_alternative(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Error> {
|
||||
let handled_types = vec![
|
||||
MULTIPART_ALTERNATIVE,
|
||||
MULTIPART_MIXED,
|
||||
MULTIPART_RELATED,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
];
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_ALTERNATIVE {
|
||||
return extract_alternative(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_MIXED {
|
||||
return extract_related(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_RELATED {
|
||||
return extract_related(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == TEXT_HTML {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::html(body));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == TEXT_PLAIN {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::text(body));
|
||||
}
|
||||
}
|
||||
Err(format!(
|
||||
"extract_alternative failed to find suitable subpart, searched: {:?}",
|
||||
handled_types
|
||||
)
|
||||
.into())
|
||||
}
|
||||
|
||||
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
||||
// 'serially'.
|
||||
fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, Error> {
|
||||
let handled_types = vec![
|
||||
MULTIPART_ALTERNATIVE,
|
||||
MULTIPART_RELATED,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
IMAGE_JPEG,
|
||||
IMAGE_PNG,
|
||||
];
|
||||
let mut unhandled_types: Vec<_> = m
|
||||
.subparts
|
||||
.iter()
|
||||
.map(|sp| sp.ctype.mimetype.as_str())
|
||||
.filter(|mt| !handled_types.contains(&mt))
|
||||
.collect();
|
||||
unhandled_types.sort();
|
||||
if !unhandled_types.is_empty() {
|
||||
warn!("{MULTIPART_MIXED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||
}
|
||||
let mut parts = Vec::new();
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
part_addr.push(idx.to_string());
|
||||
match sp.ctype.mimetype.as_str() {
|
||||
MULTIPART_RELATED => parts.push(extract_related(sp, part_addr)?),
|
||||
MULTIPART_ALTERNATIVE => parts.push(extract_alternative(sp, part_addr)?),
|
||||
TEXT_PLAIN => parts.push(Body::text(sp.get_body()?)),
|
||||
TEXT_HTML => parts.push(Body::html(sp.get_body()?)),
|
||||
IMAGE_JPEG | IMAGE_PNG => {
|
||||
let pcd = sp.get_content_disposition();
|
||||
let filename = pcd
|
||||
.params
|
||||
.get("filename")
|
||||
.map(|s| s.clone())
|
||||
.unwrap_or("".to_string());
|
||||
// Only add inline images, attachments are handled as an attribute of the top level Message and rendered separate client-side.
|
||||
if pcd.disposition == mailparse::DispositionType::Inline {
|
||||
parts.push(Body::html(format!(
|
||||
r#"<img src="/view/attachment/{}/{}/{filename}">"#,
|
||||
part_addr[0],
|
||||
part_addr
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|i| i.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(".")
|
||||
)));
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
part_addr.pop();
|
||||
}
|
||||
Ok(flatten_body_parts(&parts))
|
||||
}
|
||||
|
||||
fn flatten_body_parts(parts: &[Body]) -> Body {
|
||||
let html = parts
|
||||
.iter()
|
||||
.map(|p| match p {
|
||||
Body::PlainText(PlainText { text, .. }) => {
|
||||
format!(
|
||||
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
linkify_html(&text.trim_matches('\n'))
|
||||
)
|
||||
}
|
||||
Body::Html(Html { html, .. }) => html.clone(),
|
||||
Body::UnhandledContentType(UnhandledContentType { text, .. }) => {
|
||||
error!("text len {}", text.len());
|
||||
format!(
|
||||
r#"<p class="view-part-unhandled">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
linkify_html(&text.trim_matches('\n'))
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
info!("flatten_body_parts {} {html}", parts.len());
|
||||
Body::html(html)
|
||||
}
|
||||
|
||||
fn extract_related(m: &ParsedMail, _part_addr: &mut Vec<String>) -> Result<Body, Error> {
|
||||
// TODO(wathiede): collect related things and change return type to new Body arm.
|
||||
let handled_types = vec![TEXT_HTML, TEXT_PLAIN];
|
||||
let mut unhandled_types: Vec<_> = m
|
||||
.subparts
|
||||
.iter()
|
||||
.map(|sp| sp.ctype.mimetype.as_str())
|
||||
.filter(|mt| !handled_types.contains(&mt))
|
||||
.collect();
|
||||
unhandled_types.sort();
|
||||
warn!("{MULTIPART_RELATED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == TEXT_HTML {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::html(body));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == TEXT_PLAIN {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::text(body));
|
||||
}
|
||||
}
|
||||
Err(format!(
|
||||
"extract_related failed to find suitable subpart, searched: {:?}",
|
||||
handled_types
|
||||
)
|
||||
.into())
|
||||
}
|
||||
|
||||
fn walk_attachments<T, F: Fn(&ParsedMail, &[usize]) -> Option<T>>(
|
||||
m: &ParsedMail,
|
||||
visitor: F,
|
||||
) -> Option<T> {
|
||||
let mut cur_addr = Vec::new();
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
cur_addr.push(idx);
|
||||
let val = visitor(sp, &cur_addr);
|
||||
if val.is_some() {
|
||||
return val;
|
||||
}
|
||||
cur_addr.pop();
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// TODO(wathiede): make this walk_attachments that takes a closure.
|
||||
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
||||
// get the bytes for serving attachments of HTTP
|
||||
fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, Error> {
|
||||
let mut attachments = Vec::new();
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
if let Some(attachment) = extract_attachment(sp, id, &[idx]) {
|
||||
// Filter out inline attachements, they're flattened into the body of the message.
|
||||
if attachment.disposition == DispositionType::Attachment {
|
||||
attachments.push(attachment);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(attachments)
|
||||
}
|
||||
|
||||
fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachment> {
|
||||
let pcd = m.get_content_disposition();
|
||||
// TODO: do we need to handle empty filename attachments, or should we change the definition of
|
||||
// Attachment::filename?
|
||||
let Some(filename) = pcd.params.get("filename").map(|f| f.clone()) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// TODO: grab this from somewhere
|
||||
let content_id = None;
|
||||
let bytes = match m.get_body_raw() {
|
||||
Ok(bytes) => bytes,
|
||||
Err(err) => {
|
||||
error!("failed to get body for attachment: {err}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some(Attachment {
|
||||
id: id.to_string(),
|
||||
idx: idx
|
||||
.iter()
|
||||
.map(|i| i.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("."),
|
||||
disposition: pcd.disposition.into(),
|
||||
filename: Some(filename),
|
||||
size: bytes.len(),
|
||||
// TODO: what is the default for ctype?
|
||||
// TODO: do we want to use m.ctype.params for anything?
|
||||
content_type: Some(m.ctype.mimetype.clone()),
|
||||
content_id,
|
||||
bytes,
|
||||
});
|
||||
}
|
||||
|
||||
fn get_attachment_filename(header_value: &str) -> &str {
|
||||
info!("get_attachment_filename {header_value}");
|
||||
// Strip last "
|
||||
let v = &header_value[..header_value.len() - 1];
|
||||
if let Some(idx) = v.rfind('"') {
|
||||
&v[idx + 1..]
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
for h in headers {
|
||||
if h.get_key() == "Content-Type" {
|
||||
let v = h.get_value();
|
||||
if let Some(idx) = v.find(';') {
|
||||
return Some(v[..idx].to_string());
|
||||
} else {
|
||||
return Some(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
for h in headers {
|
||||
if h.get_key() == "Content-ID" {
|
||||
return Some(h.get_value());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||
const WIDTH: usize = 4;
|
||||
const SKIP_HEADERS: [&str; 4] = [
|
||||
"Authentication-Results",
|
||||
"DKIM-Signature",
|
||||
"Received",
|
||||
"Received-SPF",
|
||||
];
|
||||
fn render_ct_rec(m: &ParsedMail, depth: usize) -> String {
|
||||
let mut parts = Vec::new();
|
||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||
parts.push(msg);
|
||||
for sp in &m.subparts {
|
||||
parts.push(render_ct_rec(sp, depth + 1))
|
||||
}
|
||||
parts.join("\n")
|
||||
}
|
||||
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||
let mut parts = Vec::new();
|
||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||
parts.push(msg);
|
||||
let indent = " ".repeat(depth * WIDTH);
|
||||
if !m.ctype.charset.is_empty() {
|
||||
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
||||
}
|
||||
for (k, v) in m.ctype.params.iter() {
|
||||
parts.push(format!("{indent} {k}: {v}"));
|
||||
}
|
||||
if !m.headers.is_empty() {
|
||||
parts.push(format!("{indent} == headers =="));
|
||||
for h in &m.headers {
|
||||
if h.get_key().starts_with('X') {
|
||||
continue;
|
||||
}
|
||||
if SKIP_HEADERS.contains(&h.get_key().as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
parts.push(format!("{indent} {}: {}", h.get_key_ref(), h.get_value()));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
parts.push(render_rec(sp, depth + 1))
|
||||
}
|
||||
parts.join("\n")
|
||||
}
|
||||
format!(
|
||||
"Outline:\n{}\n\nDetailed:\n{}\n\nNot showing headers:\n {}\n X.*",
|
||||
render_ct_rec(m, 1),
|
||||
render_rec(m, 1),
|
||||
SKIP_HEADERS.join("\n ")
|
||||
)
|
||||
}
|
||||
|
||||
fn email_addresses(path: &str, m: &ParsedMail, header_name: &str) -> Result<Vec<Email>, Error> {
|
||||
let mut addrs = Vec::new();
|
||||
for header_value in m.headers.get_all_values(header_name) {
|
||||
match mailparse::addrparse(&header_value) {
|
||||
Ok(mal) => {
|
||||
for ma in mal.into_inner() {
|
||||
match ma {
|
||||
mailparse::MailAddr::Group(gi) => {
|
||||
if !gi.group_name.contains("ndisclosed") {
|
||||
println!("[{path}][{header_name}] Group: {gi}");
|
||||
}
|
||||
}
|
||||
mailparse::MailAddr::Single(s) => addrs.push(Email {
|
||||
name: s.display_name,
|
||||
addr: Some(s.addr),
|
||||
}), //println!("Single: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let v = header_value;
|
||||
if v.matches('@').count() == 1 {
|
||||
if v.matches('<').count() == 1 && v.ends_with('>') {
|
||||
let idx = v.find('<').unwrap();
|
||||
let addr = &v[idx + 1..v.len() - 1].trim();
|
||||
let name = &v[..idx].trim();
|
||||
addrs.push(Email {
|
||||
name: Some(name.to_string()),
|
||||
addr: Some(addr.to_string()),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
addrs.push(Email {
|
||||
name: Some(v),
|
||||
addr: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(addrs)
|
||||
}
|
||||
|
||||
pub fn attachment_bytes(nm: &Notmuch, id: &str, idx: &[usize]) -> Result<Attachment, ServerError> {
|
||||
let files = nm.files(id)?;
|
||||
let Some(path) = files.first() else {
|
||||
warn!("failed to find files for message {id}");
|
||||
return Err(ServerError::PartNotFound);
|
||||
};
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
if let Some(attachment) = walk_attachments(&m, |sp, cur_idx| {
|
||||
if cur_idx == idx {
|
||||
let attachment = extract_attachment(&sp, id, idx).unwrap_or(Attachment {
|
||||
..Attachment::default()
|
||||
});
|
||||
return Some(attachment);
|
||||
}
|
||||
None
|
||||
}) {
|
||||
return Ok(attachment);
|
||||
}
|
||||
|
||||
Err(ServerError::PartNotFound)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod error;
|
||||
pub mod graphql;
|
||||
pub mod newsreader;
|
||||
pub mod nm;
|
||||
|
||||
use css_inline::{CSSInliner, InlineError, InlineOptions};
|
||||
@@ -8,6 +9,7 @@ use log::error;
|
||||
use lol_html::{element, errors::RewritingError, rewrite_str, RewriteStrSettings};
|
||||
use maplit::{hashmap, hashset};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SanitizeError {
|
||||
@@ -43,7 +45,13 @@ pub fn linkify_html(text: &str) -> String {
|
||||
parts.join("")
|
||||
}
|
||||
|
||||
pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
||||
// html contains the content to be cleaned, and cid_prefix is used to resolve mixed part image
|
||||
// referrences
|
||||
pub fn sanitize_html(
|
||||
html: &str,
|
||||
cid_prefix: &str,
|
||||
base_url: &Url,
|
||||
) -> Result<String, SanitizeError> {
|
||||
let element_content_handlers = vec![
|
||||
// Open links in new tab
|
||||
element!("a[href]", |el| {
|
||||
@@ -51,6 +59,33 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
// Make links with relative URLs absolute
|
||||
element!("a[href]", |el| {
|
||||
if let Some(Ok(href)) = el.get_attribute("href").map(|href| base_url.join(&href)) {
|
||||
el.set_attribute("href", &href.as_str()).unwrap();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
// Make images with relative srcs absolute
|
||||
element!("img[src]", |el| {
|
||||
if let Some(Ok(src)) = el.get_attribute("src").map(|src| base_url.join(&src)) {
|
||||
el.set_attribute("src", &src.as_str()).unwrap();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
// Replace mixed part CID images with URL
|
||||
element!("img[src]", |el| {
|
||||
let src = el
|
||||
.get_attribute("src")
|
||||
.expect("src was required")
|
||||
.replace("cid:", cid_prefix);
|
||||
|
||||
el.set_attribute("src", &src)?;
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
// Only secure image URLs
|
||||
element!("img[src]", |el| {
|
||||
let src = el
|
||||
@@ -170,7 +205,7 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
||||
];
|
||||
let tag_attributes = hashmap![
|
||||
"a" => hashset![
|
||||
"href", "hreflang"
|
||||
"href", "hreflang", "target",
|
||||
],
|
||||
"bdo" => hashset![
|
||||
"dir"
|
||||
@@ -225,19 +260,19 @@ pub fn sanitize_html(html: &str) -> Result<String, SanitizeError> {
|
||||
],
|
||||
];
|
||||
|
||||
let clean_html = ammonia::Builder::default()
|
||||
.tags(tags)
|
||||
.tag_attributes(tag_attributes)
|
||||
.generic_attributes(attributes)
|
||||
.clean(&inlined_html)
|
||||
.to_string();
|
||||
//let clean_html = inlined_html;
|
||||
|
||||
Ok(rewrite_str(
|
||||
&clean_html,
|
||||
let rewritten_html = rewrite_str(
|
||||
&inlined_html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers,
|
||||
..RewriteStrSettings::default()
|
||||
},
|
||||
)?)
|
||||
)?;
|
||||
let clean_html = ammonia::Builder::default()
|
||||
.tags(tags)
|
||||
.tag_attributes(tag_attributes)
|
||||
.generic_attributes(attributes)
|
||||
.clean(&rewritten_html)
|
||||
.to_string();
|
||||
|
||||
Ok(clean_html)
|
||||
}
|
||||
|
||||
281
server/src/newsreader.rs
Normal file
281
server/src/newsreader.rs
Normal file
@@ -0,0 +1,281 @@
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::info;
|
||||
use sqlx::postgres::PgPool;
|
||||
use url::Url;
|
||||
|
||||
const TAG_PREFIX: &'static str = "News/";
|
||||
const THREAD_PREFIX: &'static str = "news:";
|
||||
|
||||
use crate::{
|
||||
error::ServerError,
|
||||
graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary},
|
||||
sanitize_html,
|
||||
};
|
||||
|
||||
pub fn is_newsreader_search(query: &str) -> bool {
|
||||
query.contains(TAG_PREFIX)
|
||||
}
|
||||
|
||||
pub fn is_newsreader_thread(query: &str) -> bool {
|
||||
query.starts_with(THREAD_PREFIX)
|
||||
}
|
||||
|
||||
pub async fn count(pool: &PgPool, query: &str) -> Result<usize, ServerError> {
|
||||
let query: Query = query.parse()?;
|
||||
let site = query.site.expect("search has no site");
|
||||
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
pool: &PgPool,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
let query: Query = query.parse()?;
|
||||
info!("news search query {query:?}");
|
||||
let site = query.site.expect("search has no site");
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after: Option<usize>, before: Option<usize>, first, last| async move {
|
||||
info!("search page info {after:#?}, {before:#?}, {first:#?}, {last:#?}");
|
||||
let default_page_size = 100;
|
||||
let (offset, limit) = match (after, before, first, last) {
|
||||
// Reasonable defaults
|
||||
(None, None, None, None) => (0, default_page_size),
|
||||
(None, None, Some(first), None) => (0, first),
|
||||
(Some(after), None, None, None) => (after, default_page_size),
|
||||
(Some(after), None, Some(first), None) => (after, first),
|
||||
(None, Some(before), None, None) => {
|
||||
(before.saturating_sub(default_page_size), default_page_size)
|
||||
}
|
||||
(None, Some(before), None, Some(last)) => (before.saturating_sub(last), last),
|
||||
(None, None, None, Some(_)) => {
|
||||
panic!("specifying last and no before doesn't make sense")
|
||||
}
|
||||
(None, None, Some(_), Some(_)) => {
|
||||
panic!("specifying first and last doesn't make sense")
|
||||
}
|
||||
(None, Some(_), Some(_), _) => {
|
||||
panic!("specifying before and first doesn't make sense")
|
||||
}
|
||||
(Some(_), Some(_), _, _) => {
|
||||
panic!("specifying after and before doesn't make sense")
|
||||
}
|
||||
(Some(_), None, None, Some(_)) => {
|
||||
panic!("specifying after and last doesn't make sense")
|
||||
}
|
||||
(Some(_), None, Some(_), Some(_)) => {
|
||||
panic!("specifying after, first and last doesn't make sense")
|
||||
}
|
||||
};
|
||||
// The +1 is to see if there are more pages of data available.
|
||||
let limit = limit + 1;
|
||||
|
||||
info!("search page offset {offset} limit {limit}");
|
||||
let rows = sqlx::query_file!(
|
||||
"sql/threads.sql",
|
||||
site,
|
||||
query.unread_only,
|
||||
offset as i64,
|
||||
limit as i64
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let mut slice = rows
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
ThreadSummary {
|
||||
thread: format!("{THREAD_PREFIX}{}", r.uid),
|
||||
timestamp: r
|
||||
.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp() as isize,
|
||||
date_relative: "TODO date_relative".to_string(),
|
||||
matched: 0,
|
||||
total: 1,
|
||||
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||
subject: r.title.unwrap_or("NO TITLE".to_string()),
|
||||
tags,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let has_more = slice.len() == limit;
|
||||
let mut connection = Connection::new(offset > 0, has_more);
|
||||
if has_more {
|
||||
slice.pop();
|
||||
};
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(offset + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
// TODO: optimize query by using needs_unread
|
||||
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||
let tags = tags
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.site.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
|
||||
Tag {
|
||||
name,
|
||||
fg_color: "white".to_string(),
|
||||
bg_color: hex,
|
||||
unread,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerError> {
|
||||
let id = thread_id
|
||||
.strip_prefix(THREAD_PREFIX)
|
||||
.expect("news thread doesn't start with '{THREAD_PREFIX}'")
|
||||
.to_string();
|
||||
|
||||
let r = sqlx::query_file!("sql/thread.sql", id)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
let site = r.site.unwrap_or("NO SITE".to_string());
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
let default_homepage = "http://no-homepage";
|
||||
let homepage = Url::parse(
|
||||
&r.homepage
|
||||
.map(|h| {
|
||||
if h.is_empty() {
|
||||
default_homepage.to_string()
|
||||
} else {
|
||||
h
|
||||
}
|
||||
})
|
||||
.unwrap_or(default_homepage.to_string()),
|
||||
)?;
|
||||
let link = Url::parse(
|
||||
&r.link
|
||||
.as_ref()
|
||||
.map(|h| {
|
||||
if h.is_empty() {
|
||||
default_homepage.to_string()
|
||||
} else {
|
||||
h.to_string()
|
||||
}
|
||||
})
|
||||
.unwrap_or(default_homepage.to_string()),
|
||||
)?;
|
||||
let addr = r.link.as_ref().map(|link| {
|
||||
if link.contains('@') {
|
||||
link.clone()
|
||||
} else {
|
||||
if let Ok(url) = homepage.join(&link) {
|
||||
url.to_string()
|
||||
} else {
|
||||
link.clone()
|
||||
}
|
||||
}
|
||||
});
|
||||
let html = r.summary.unwrap_or("NO SUMMARY".to_string());
|
||||
// TODO: add site specific cleanups. For example:
|
||||
// * Grafana does <div class="image-wrapp"><img class="lazyload>"<img src="/media/...>"</img></div>
|
||||
// * Some sites appear to be HTML encoded, unencode them, i.e. imperialviolet
|
||||
let html = sanitize_html(&html, "", &link)?;
|
||||
let body = Body::Html(Html {
|
||||
html,
|
||||
content_tree: "".to_string(),
|
||||
});
|
||||
let title = r.title.unwrap_or("NO TITLE".to_string());
|
||||
let from = Some(Email {
|
||||
name: r.name,
|
||||
addr: addr.map(|a| a.to_string()),
|
||||
});
|
||||
Ok(Thread {
|
||||
thread_id,
|
||||
subject: title.clone(),
|
||||
messages: vec![Message {
|
||||
id,
|
||||
from,
|
||||
to: Vec::new(),
|
||||
cc: Vec::new(),
|
||||
subject: Some(title),
|
||||
timestamp: Some(
|
||||
r.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp(),
|
||||
),
|
||||
headers: Vec::new(),
|
||||
body,
|
||||
path: "".to_string(),
|
||||
attachments: Vec::new(),
|
||||
tags,
|
||||
}],
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Query {
|
||||
unread_only: bool,
|
||||
site: Option<String>,
|
||||
remainder: Vec<String>,
|
||||
}
|
||||
|
||||
impl FromStr for Query {
|
||||
type Err = Infallible;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut unread_only = false;
|
||||
let mut site = None;
|
||||
let mut remainder = Vec::new();
|
||||
let site_prefix = format!("tag:{TAG_PREFIX}");
|
||||
for word in s.split_whitespace() {
|
||||
if word == "is:unread" {
|
||||
unread_only = true
|
||||
} else if word.starts_with(&site_prefix) {
|
||||
site = Some(word[site_prefix.len()..].to_string())
|
||||
} else {
|
||||
remainder.push(word.to_string());
|
||||
}
|
||||
}
|
||||
Ok(Query {
|
||||
unread_only,
|
||||
site,
|
||||
remainder,
|
||||
})
|
||||
}
|
||||
}
|
||||
751
server/src/nm.rs
751
server/src/nm.rs
@@ -1,13 +1,754 @@
|
||||
use shared::Message;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use crate::error;
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::{error, info, warn};
|
||||
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
use notmuch::Notmuch;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
error::ServerError,
|
||||
graphql::{
|
||||
Attachment, Body, DispositionType, Email, Header, Html, Message, PlainText, Tag, Thread,
|
||||
ThreadSummary, UnhandledContentType,
|
||||
},
|
||||
linkify_html, sanitize_html,
|
||||
};
|
||||
|
||||
const TEXT_PLAIN: &'static str = "text/plain";
|
||||
const TEXT_HTML: &'static str = "text/html";
|
||||
const IMAGE_JPEG: &'static str = "image/jpeg";
|
||||
const IMAGE_PNG: &'static str = "image/png";
|
||||
const MULTIPART_ALTERNATIVE: &'static str = "multipart/alternative";
|
||||
const MULTIPART_MIXED: &'static str = "multipart/mixed";
|
||||
const MULTIPART_RELATED: &'static str = "multipart/related";
|
||||
|
||||
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||
|
||||
// TODO(wathiede): decide good error type
|
||||
pub fn threadset_to_messages(
|
||||
thread_set: notmuch::ThreadSet,
|
||||
) -> Result<Vec<Message>, error::ServerError> {
|
||||
pub fn threadset_to_messages(thread_set: notmuch::ThreadSet) -> Result<Vec<Message>, ServerError> {
|
||||
for t in thread_set.0 {
|
||||
for _tn in t.0 {}
|
||||
}
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
pub async fn count(nm: &Notmuch, query: &str) -> Result<usize, ServerError> {
|
||||
Ok(nm.count(query)?)
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
nm: &Notmuch,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
let total = nm.count(&query)?;
|
||||
let (first, last) = if let (None, None) = (first, last) {
|
||||
info!("neither first nor last set, defaulting first to 20");
|
||||
(Some(20), None)
|
||||
} else {
|
||||
(first, last)
|
||||
};
|
||||
|
||||
let mut start = after.map(|after| after + 1).unwrap_or(0);
|
||||
let mut end = before.unwrap_or(total);
|
||||
if let Some(first) = first {
|
||||
end = (start + first).min(end);
|
||||
}
|
||||
if let Some(last) = last {
|
||||
start = if last > end - start { end } else { end - last };
|
||||
}
|
||||
|
||||
let count = end - start;
|
||||
let slice: Vec<ThreadSummary> = nm
|
||||
.search(&query, start, count)?
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|ts| ThreadSummary {
|
||||
thread: ts.thread,
|
||||
timestamp: ts.timestamp,
|
||||
date_relative: ts.date_relative,
|
||||
matched: ts.matched,
|
||||
total: ts.total,
|
||||
authors: ts.authors,
|
||||
subject: ts.subject,
|
||||
tags: ts.tags,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut connection = Connection::new(start > 0, end < total);
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
let now = Instant::now();
|
||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||
// get an inaccurate count.
|
||||
nm.search("is:unread", 0, 10000)?
|
||||
.0
|
||||
.iter()
|
||||
.fold(HashMap::new(), |mut m, ts| {
|
||||
ts.tags.iter().for_each(|t| {
|
||||
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
||||
});
|
||||
m
|
||||
})
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
let tags = nm
|
||||
.tags()?
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = if needs_unread {
|
||||
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
Tag {
|
||||
name: tag,
|
||||
fg_color: "white".to_string(),
|
||||
bg_color: hex,
|
||||
unread,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
info!("Fetching tags took {} seconds", now.elapsed().as_secs_f32());
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
pub async fn thread(
|
||||
nm: &Notmuch,
|
||||
thread_id: String,
|
||||
debug_content_tree: bool,
|
||||
) -> Result<Thread, ServerError> {
|
||||
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||
// display names (that default to the most commonly seen name).
|
||||
let mut messages = Vec::new();
|
||||
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
||||
let tags = nm.tags_for_query(&format!("id:{id}"))?;
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
let from = email_addresses(&path, &m, "from")?;
|
||||
let from = match from.len() {
|
||||
0 => None,
|
||||
1 => from.into_iter().next(),
|
||||
_ => {
|
||||
warn!(
|
||||
"Got {} from addresses in message, truncating: {:?}",
|
||||
from.len(),
|
||||
from
|
||||
);
|
||||
from.into_iter().next()
|
||||
}
|
||||
};
|
||||
let to = email_addresses(&path, &m, "to")?;
|
||||
let cc = email_addresses(&path, &m, "cc")?;
|
||||
let subject = m.headers.get_first_value("subject");
|
||||
let timestamp = m
|
||||
.headers
|
||||
.get_first_value("date")
|
||||
.and_then(|d| mailparse::dateparse(&d).ok());
|
||||
let cid_prefix = shared::urls::cid_prefix(None, &id);
|
||||
let base_url = Url::parse("https://there-should-be-no-relative-urls-in-email").unwrap();
|
||||
let body = match extract_body(&m, &id)? {
|
||||
Body::PlainText(PlainText { text, content_tree }) => {
|
||||
let text = if text.len() > MAX_RAW_MESSAGE_SIZE {
|
||||
format!(
|
||||
"{}...\n\nMESSAGE WAS TRUNCATED @ {} bytes",
|
||||
&text[..MAX_RAW_MESSAGE_SIZE],
|
||||
MAX_RAW_MESSAGE_SIZE
|
||||
)
|
||||
} else {
|
||||
text
|
||||
};
|
||||
|
||||
Body::Html(Html {
|
||||
html: format!(
|
||||
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
sanitize_html(
|
||||
&linkify_html(&text.trim_matches('\n')),
|
||||
&cid_prefix,
|
||||
&base_url
|
||||
)?
|
||||
),
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
})
|
||||
}
|
||||
Body::Html(Html { html, content_tree }) => Body::Html(Html {
|
||||
html: sanitize_html(&html, &cid_prefix, &base_url)?,
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
}),
|
||||
|
||||
Body::UnhandledContentType(UnhandledContentType { content_tree, .. }) => {
|
||||
let body_start = mmap
|
||||
.windows(2)
|
||||
.take(20_000)
|
||||
.position(|w| w == b"\n\n")
|
||||
.unwrap_or(0);
|
||||
let body = mmap[body_start + 2..].to_vec();
|
||||
Body::UnhandledContentType(UnhandledContentType {
|
||||
text: String::from_utf8(body)?,
|
||||
content_tree: if debug_content_tree {
|
||||
render_content_type_tree(&m)
|
||||
} else {
|
||||
content_tree
|
||||
},
|
||||
})
|
||||
}
|
||||
};
|
||||
let headers = m
|
||||
.headers
|
||||
.iter()
|
||||
.map(|h| Header {
|
||||
key: h.get_key(),
|
||||
value: h.get_value(),
|
||||
})
|
||||
.collect();
|
||||
// TODO(wathiede): parse message and fill out attachments
|
||||
let attachments = extract_attachments(&m, &id)?;
|
||||
messages.push(Message {
|
||||
id,
|
||||
from,
|
||||
to,
|
||||
cc,
|
||||
subject,
|
||||
tags,
|
||||
timestamp,
|
||||
headers,
|
||||
body,
|
||||
path,
|
||||
attachments,
|
||||
});
|
||||
}
|
||||
messages.reverse();
|
||||
// Find the first subject that's set. After reversing the vec, this should be the oldest
|
||||
// message.
|
||||
let subject: String = messages
|
||||
.iter()
|
||||
.skip_while(|m| m.subject.is_none())
|
||||
.next()
|
||||
.and_then(|m| m.subject.clone())
|
||||
.unwrap_or("(NO SUBJECT)".to_string());
|
||||
Ok(Thread {
|
||||
thread_id,
|
||||
subject,
|
||||
messages,
|
||||
})
|
||||
}
|
||||
|
||||
fn email_addresses(
|
||||
path: &str,
|
||||
m: &ParsedMail,
|
||||
header_name: &str,
|
||||
) -> Result<Vec<Email>, ServerError> {
|
||||
let mut addrs = Vec::new();
|
||||
for header_value in m.headers.get_all_values(header_name) {
|
||||
match mailparse::addrparse(&header_value) {
|
||||
Ok(mal) => {
|
||||
for ma in mal.into_inner() {
|
||||
match ma {
|
||||
mailparse::MailAddr::Group(gi) => {
|
||||
if !gi.group_name.contains("ndisclosed") {
|
||||
println!("[{path}][{header_name}] Group: {gi}");
|
||||
}
|
||||
}
|
||||
mailparse::MailAddr::Single(s) => addrs.push(Email {
|
||||
name: s.display_name,
|
||||
addr: Some(s.addr),
|
||||
}), //println!("Single: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let v = header_value;
|
||||
if v.matches('@').count() == 1 {
|
||||
if v.matches('<').count() == 1 && v.ends_with('>') {
|
||||
let idx = v.find('<').unwrap();
|
||||
let addr = &v[idx + 1..v.len() - 1].trim();
|
||||
let name = &v[..idx].trim();
|
||||
addrs.push(Email {
|
||||
name: Some(name.to_string()),
|
||||
addr: Some(addr.to_string()),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
addrs.push(Email {
|
||||
name: Some(v),
|
||||
addr: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(addrs)
|
||||
}
|
||||
|
||||
pub fn cid_attachment_bytes(nm: &Notmuch, id: &str, cid: &str) -> Result<Attachment, ServerError> {
|
||||
let files = nm.files(id)?;
|
||||
let Some(path) = files.first() else {
|
||||
warn!("failed to find files for message {id}");
|
||||
return Err(ServerError::PartNotFound);
|
||||
};
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
if let Some(attachment) = walk_attachments(&m, |sp, _cur_idx| {
|
||||
info!("{cid} {:?}", get_content_id(&sp.headers));
|
||||
if let Some(h_cid) = get_content_id(&sp.headers) {
|
||||
let h_cid = &h_cid[1..h_cid.len() - 1];
|
||||
if h_cid == cid {
|
||||
let attachment = extract_attachment(&sp, id, &[]).unwrap_or(Attachment {
|
||||
..Attachment::default()
|
||||
});
|
||||
return Some(attachment);
|
||||
}
|
||||
}
|
||||
None
|
||||
}) {
|
||||
return Ok(attachment);
|
||||
}
|
||||
|
||||
Err(ServerError::PartNotFound)
|
||||
}
|
||||
|
||||
pub fn attachment_bytes(nm: &Notmuch, id: &str, idx: &[usize]) -> Result<Attachment, ServerError> {
|
||||
let files = nm.files(id)?;
|
||||
let Some(path) = files.first() else {
|
||||
warn!("failed to find files for message {id}");
|
||||
return Err(ServerError::PartNotFound);
|
||||
};
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
if let Some(attachment) = walk_attachments(&m, |sp, cur_idx| {
|
||||
if cur_idx == idx {
|
||||
let attachment = extract_attachment(&sp, id, idx).unwrap_or(Attachment {
|
||||
..Attachment::default()
|
||||
});
|
||||
return Some(attachment);
|
||||
}
|
||||
None
|
||||
}) {
|
||||
return Ok(attachment);
|
||||
}
|
||||
|
||||
Err(ServerError::PartNotFound)
|
||||
}
|
||||
|
||||
fn extract_body(m: &ParsedMail, id: &str) -> Result<Body, ServerError> {
|
||||
let mut part_addr = Vec::new();
|
||||
part_addr.push(id.to_string());
|
||||
let body = m.get_body()?;
|
||||
let ret = match m.ctype.mimetype.as_str() {
|
||||
TEXT_PLAIN => return Ok(Body::text(body)),
|
||||
TEXT_HTML => return Ok(Body::html(body)),
|
||||
MULTIPART_MIXED => extract_mixed(m, &mut part_addr),
|
||||
MULTIPART_ALTERNATIVE => extract_alternative(m, &mut part_addr),
|
||||
MULTIPART_RELATED => extract_related(m, &mut part_addr),
|
||||
_ => extract_unhandled(m),
|
||||
};
|
||||
if let Err(err) = ret {
|
||||
error!("Failed to extract body: {err:?}");
|
||||
return Ok(extract_unhandled(m)?);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
fn extract_unhandled(m: &ParsedMail) -> Result<Body, ServerError> {
|
||||
let msg = format!(
|
||||
"Unhandled body content type:\n{}\n{}",
|
||||
render_content_type_tree(m),
|
||||
m.get_body()?,
|
||||
);
|
||||
Ok(Body::UnhandledContentType(UnhandledContentType {
|
||||
text: msg,
|
||||
content_tree: render_content_type_tree(m),
|
||||
}))
|
||||
}
|
||||
|
||||
// multipart/alternative defines multiple representations of the same message, and clients should
|
||||
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
||||
// then give up.
|
||||
fn extract_alternative(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||
let handled_types = vec![
|
||||
MULTIPART_ALTERNATIVE,
|
||||
MULTIPART_MIXED,
|
||||
MULTIPART_RELATED,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
];
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_ALTERNATIVE {
|
||||
return extract_alternative(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_MIXED {
|
||||
return extract_related(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == MULTIPART_RELATED {
|
||||
return extract_related(sp, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == TEXT_HTML {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::html(body));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype.as_str() == TEXT_PLAIN {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::text(body));
|
||||
}
|
||||
}
|
||||
Err(ServerError::StringError(format!(
|
||||
"extract_alternative failed to find suitable subpart, searched: {:?}",
|
||||
handled_types
|
||||
)))
|
||||
}
|
||||
|
||||
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
||||
// 'serially'.
|
||||
fn extract_mixed(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||
let handled_types = vec![
|
||||
MULTIPART_ALTERNATIVE,
|
||||
MULTIPART_RELATED,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
IMAGE_JPEG,
|
||||
IMAGE_PNG,
|
||||
];
|
||||
let mut unhandled_types: Vec<_> = m
|
||||
.subparts
|
||||
.iter()
|
||||
.map(|sp| sp.ctype.mimetype.as_str())
|
||||
.filter(|mt| !handled_types.contains(&mt))
|
||||
.collect();
|
||||
unhandled_types.sort();
|
||||
if !unhandled_types.is_empty() {
|
||||
warn!("{MULTIPART_MIXED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||
}
|
||||
let mut parts = Vec::new();
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
part_addr.push(idx.to_string());
|
||||
match sp.ctype.mimetype.as_str() {
|
||||
MULTIPART_RELATED => parts.push(extract_related(sp, part_addr)?),
|
||||
MULTIPART_ALTERNATIVE => parts.push(extract_alternative(sp, part_addr)?),
|
||||
TEXT_PLAIN => parts.push(Body::text(sp.get_body()?)),
|
||||
TEXT_HTML => parts.push(Body::html(sp.get_body()?)),
|
||||
IMAGE_JPEG | IMAGE_PNG => {
|
||||
let pcd = sp.get_content_disposition();
|
||||
let filename = pcd
|
||||
.params
|
||||
.get("filename")
|
||||
.map(|s| s.clone())
|
||||
.unwrap_or("".to_string());
|
||||
// Only add inline images, attachments are handled as an attribute of the top level Message and rendered separate client-side.
|
||||
if pcd.disposition == mailparse::DispositionType::Inline {
|
||||
parts.push(Body::html(format!(
|
||||
r#"<img src="/view/attachment/{}/{}/{filename}">"#,
|
||||
part_addr[0],
|
||||
part_addr
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|i| i.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(".")
|
||||
)));
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
part_addr.pop();
|
||||
}
|
||||
Ok(flatten_body_parts(&parts))
|
||||
}
|
||||
|
||||
fn flatten_body_parts(parts: &[Body]) -> Body {
|
||||
let html = parts
|
||||
.iter()
|
||||
.map(|p| match p {
|
||||
Body::PlainText(PlainText { text, .. }) => {
|
||||
format!(
|
||||
r#"<p class="view-part-text-plain">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
linkify_html(&text.trim_matches('\n'))
|
||||
)
|
||||
}
|
||||
Body::Html(Html { html, .. }) => html.clone(),
|
||||
Body::UnhandledContentType(UnhandledContentType { text, .. }) => {
|
||||
error!("text len {}", text.len());
|
||||
format!(
|
||||
r#"<p class="view-part-unhandled">{}</p>"#,
|
||||
// Trim newlines to prevent excessive white space at the beginning/end of
|
||||
// presenation. Leave tabs and spaces incase plain text attempts to center a
|
||||
// header on the first line.
|
||||
linkify_html(&text.trim_matches('\n'))
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
info!("flatten_body_parts {} {html}", parts.len());
|
||||
Body::html(html)
|
||||
}
|
||||
|
||||
fn extract_related(m: &ParsedMail, part_addr: &mut Vec<String>) -> Result<Body, ServerError> {
|
||||
// TODO(wathiede): collect related things and change return type to new Body arm.
|
||||
let handled_types = vec![
|
||||
MULTIPART_ALTERNATIVE,
|
||||
TEXT_HTML,
|
||||
TEXT_PLAIN,
|
||||
IMAGE_JPEG,
|
||||
IMAGE_PNG,
|
||||
];
|
||||
let mut unhandled_types: Vec<_> = m
|
||||
.subparts
|
||||
.iter()
|
||||
.map(|sp| sp.ctype.mimetype.as_str())
|
||||
.filter(|mt| !handled_types.contains(&mt))
|
||||
.collect();
|
||||
unhandled_types.sort();
|
||||
if !unhandled_types.is_empty() {
|
||||
warn!("{MULTIPART_RELATED} contains the following unhandled mimetypes {unhandled_types:?}");
|
||||
}
|
||||
|
||||
for (i, sp) in m.subparts.iter().enumerate() {
|
||||
if sp.ctype.mimetype == IMAGE_PNG || sp.ctype.mimetype == IMAGE_JPEG {
|
||||
info!("sp.ctype {:#?}", sp.ctype);
|
||||
//info!("sp.headers {:#?}", sp.headers);
|
||||
if let Some(cid) = sp.headers.get_first_value("Content-Id") {
|
||||
let mut part_id = part_addr.clone();
|
||||
part_id.push(i.to_string());
|
||||
info!("cid: {cid} part_id {part_id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == MULTIPART_ALTERNATIVE {
|
||||
return extract_alternative(m, part_addr);
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == TEXT_HTML {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::html(body));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == TEXT_PLAIN {
|
||||
let body = sp.get_body()?;
|
||||
return Ok(Body::text(body));
|
||||
}
|
||||
}
|
||||
Err(ServerError::StringError(format!(
|
||||
"extract_related failed to find suitable subpart, searched: {:?}",
|
||||
handled_types
|
||||
)))
|
||||
}
|
||||
|
||||
fn walk_attachments<T, F: Fn(&ParsedMail, &[usize]) -> Option<T> + Copy>(
|
||||
m: &ParsedMail,
|
||||
visitor: F,
|
||||
) -> Option<T> {
|
||||
let mut cur_addr = Vec::new();
|
||||
walk_attachments_inner(m, visitor, &mut cur_addr)
|
||||
}
|
||||
|
||||
fn walk_attachments_inner<T, F: Fn(&ParsedMail, &[usize]) -> Option<T> + Copy>(
|
||||
m: &ParsedMail,
|
||||
visitor: F,
|
||||
cur_addr: &mut Vec<usize>,
|
||||
) -> Option<T> {
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
cur_addr.push(idx);
|
||||
let val = visitor(sp, &cur_addr);
|
||||
if val.is_some() {
|
||||
return val;
|
||||
}
|
||||
let val = walk_attachments_inner(sp, visitor, cur_addr);
|
||||
if val.is_some() {
|
||||
return val;
|
||||
}
|
||||
cur_addr.pop();
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// TODO(wathiede): make this walk_attachments that takes a closure.
|
||||
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
||||
// get the bytes for serving attachments of HTTP
|
||||
fn extract_attachments(m: &ParsedMail, id: &str) -> Result<Vec<Attachment>, ServerError> {
|
||||
let mut attachments = Vec::new();
|
||||
for (idx, sp) in m.subparts.iter().enumerate() {
|
||||
if let Some(attachment) = extract_attachment(sp, id, &[idx]) {
|
||||
// Filter out inline attachements, they're flattened into the body of the message.
|
||||
if attachment.disposition == DispositionType::Attachment {
|
||||
attachments.push(attachment);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(attachments)
|
||||
}
|
||||
|
||||
fn extract_attachment(m: &ParsedMail, id: &str, idx: &[usize]) -> Option<Attachment> {
|
||||
let pcd = m.get_content_disposition();
|
||||
// TODO: do we need to handle empty filename attachments, or should we change the definition of
|
||||
// Attachment::filename?
|
||||
let Some(filename) = pcd.params.get("filename").map(|f| f.clone()) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// TODO: grab this from somewhere
|
||||
let content_id = None;
|
||||
let bytes = match m.get_body_raw() {
|
||||
Ok(bytes) => bytes,
|
||||
Err(err) => {
|
||||
error!("failed to get body for attachment: {err}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some(Attachment {
|
||||
id: id.to_string(),
|
||||
idx: idx
|
||||
.iter()
|
||||
.map(|i| i.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("."),
|
||||
disposition: pcd.disposition.into(),
|
||||
filename: Some(filename),
|
||||
size: bytes.len(),
|
||||
// TODO: what is the default for ctype?
|
||||
// TODO: do we want to use m.ctype.params for anything?
|
||||
content_type: Some(m.ctype.mimetype.clone()),
|
||||
content_id,
|
||||
bytes,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_attachment_filename(header_value: &str) -> &str {
|
||||
info!("get_attachment_filename {header_value}");
|
||||
// Strip last "
|
||||
let v = &header_value[..header_value.len() - 1];
|
||||
if let Some(idx) = v.rfind('"') {
|
||||
&v[idx + 1..]
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
if let Some(v) = headers.get_first_value("Content-Type") {
|
||||
if let Some(idx) = v.find(';') {
|
||||
return Some(v[..idx].to_string());
|
||||
} else {
|
||||
return Some(v);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
headers.get_first_value("Content-Id")
|
||||
}
|
||||
|
||||
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||
const WIDTH: usize = 4;
|
||||
const SKIP_HEADERS: [&str; 4] = [
|
||||
"Authentication-Results",
|
||||
"DKIM-Signature",
|
||||
"Received",
|
||||
"Received-SPF",
|
||||
];
|
||||
fn render_ct_rec(m: &ParsedMail, depth: usize) -> String {
|
||||
let mut parts = Vec::new();
|
||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||
parts.push(msg);
|
||||
for sp in &m.subparts {
|
||||
parts.push(render_ct_rec(sp, depth + 1))
|
||||
}
|
||||
parts.join("\n")
|
||||
}
|
||||
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||
let mut parts = Vec::new();
|
||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||
parts.push(msg);
|
||||
let indent = " ".repeat(depth * WIDTH);
|
||||
if !m.ctype.charset.is_empty() {
|
||||
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
||||
}
|
||||
for (k, v) in m.ctype.params.iter() {
|
||||
parts.push(format!("{indent} {k}: {v}"));
|
||||
}
|
||||
if !m.headers.is_empty() {
|
||||
parts.push(format!("{indent} == headers =="));
|
||||
for h in &m.headers {
|
||||
if h.get_key().starts_with('X') {
|
||||
continue;
|
||||
}
|
||||
if SKIP_HEADERS.contains(&h.get_key().as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
parts.push(format!("{indent} {}: {}", h.get_key_ref(), h.get_value()));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
parts.push(render_rec(sp, depth + 1))
|
||||
}
|
||||
parts.join("\n")
|
||||
}
|
||||
format!(
|
||||
"Outline:\n{}\n\nDetailed:\n{}\n\nNot showing headers:\n {}\n X.*",
|
||||
render_ct_rec(m, 1),
|
||||
render_rec(m, 1),
|
||||
SKIP_HEADERS.join("\n ")
|
||||
)
|
||||
}
|
||||
|
||||
@@ -12,3 +12,24 @@ pub struct SearchResult {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Message {}
|
||||
|
||||
pub mod urls {
|
||||
pub const MOUNT_POINT: &'static str = "/api";
|
||||
pub fn cid_prefix(host: Option<&str>, cid: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!("//{host}/api/cid/{cid}/")
|
||||
} else {
|
||||
format!("/api/cid/{cid}/")
|
||||
}
|
||||
}
|
||||
pub fn download_attachment(host: Option<&str>, id: &str, idx: &str, filename: &str) -> String {
|
||||
if let Some(host) = host {
|
||||
format!(
|
||||
"//{host}/api/download/attachment/{}/{}/{}",
|
||||
id, idx, filename
|
||||
)
|
||||
} else {
|
||||
format!("/api/download/attachment/{}/{}/{}", id, idx, filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,9 +9,6 @@ license = "MIT"
|
||||
readme = "./README.md"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.33"
|
||||
|
||||
@@ -26,7 +23,6 @@ notmuch = {path = "../notmuch"}
|
||||
shared = {path = "../shared"}
|
||||
itertools = "0.10.5"
|
||||
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
|
||||
wasm-timer = "0.2.5"
|
||||
chrono = "0.4.31"
|
||||
graphql_client = "0.13.0"
|
||||
thiserror = "1.0.50"
|
||||
@@ -43,8 +39,3 @@ features = [
|
||||
"MediaQueryList",
|
||||
"Window"
|
||||
]
|
||||
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
|
||||
|
||||
@@ -7,20 +7,9 @@ address = "0.0.0.0"
|
||||
port = 6758
|
||||
|
||||
[[proxy]]
|
||||
backend = "http://localhost:9345/"
|
||||
rewrite= "/api/"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/original"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/graphiql"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/graphql"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/download"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/view"
|
||||
backend = "http://localhost:9345/api/"
|
||||
|
||||
[[hooks]]
|
||||
stage = "pre_build"
|
||||
command = "cargo"
|
||||
command_arguments = [ "test" ]
|
||||
#[[hooks]]
|
||||
#stage = "pre_build"
|
||||
#command = "cargo"
|
||||
#command_arguments = [ "test" ]
|
||||
|
||||
@@ -51,7 +51,7 @@ where
|
||||
{
|
||||
use web_sys::RequestMode;
|
||||
|
||||
Request::post("/graphql/")
|
||||
Request::post("/api/graphql/")
|
||||
.mode(RequestMode::Cors)
|
||||
.json(&body)?
|
||||
.send()
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
#![allow(clippy::wildcard_imports)]
|
||||
|
||||
use log::Level;
|
||||
use seed::{prelude::wasm_bindgen, App};
|
||||
use seed::App;
|
||||
|
||||
mod api;
|
||||
mod consts;
|
||||
@@ -12,9 +12,7 @@ mod graphql;
|
||||
mod state;
|
||||
mod view;
|
||||
|
||||
// (This function is invoked by `init` function in `index.html`.)
|
||||
#[wasm_bindgen(start)]
|
||||
pub fn start() {
|
||||
fn main() {
|
||||
// This provides better error messages in debug mode.
|
||||
// It's disabled in release mode so it doesn't bloat up the file size.
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -238,6 +238,14 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
first,
|
||||
last,
|
||||
} => {
|
||||
let (after, before, first, last) = match (after.as_ref(), before.as_ref(), first, last)
|
||||
{
|
||||
// If no pagination set, set reasonable defaults
|
||||
(None, None, None, None) => {
|
||||
(None, None, Some(SEARCH_RESULTS_PER_PAGE as i64), None)
|
||||
}
|
||||
_ => (after, before, first, last),
|
||||
};
|
||||
model.query = query.clone();
|
||||
orders.skip().perform_cmd(async move {
|
||||
Msg::FrontPageResult(
|
||||
|
||||
@@ -12,7 +12,6 @@ use seed_hooks::{state_access::CloneState, topo, use_state};
|
||||
|
||||
use crate::{
|
||||
api::urls,
|
||||
consts::SEARCH_RESULTS_PER_PAGE,
|
||||
graphql::{front_page_query::*, show_thread_query::*},
|
||||
state::{unread_query, Model, Msg, RefreshingState},
|
||||
};
|
||||
@@ -308,7 +307,14 @@ fn search_toolbar(
|
||||
.start_cursor
|
||||
.as_ref()
|
||||
.map(|i| i.parse().unwrap_or(0))
|
||||
.unwrap_or(0);
|
||||
.unwrap_or(0)
|
||||
+ 1;
|
||||
let end = pager
|
||||
.end_cursor
|
||||
.as_ref()
|
||||
.map(|i| i.parse().unwrap_or(count))
|
||||
.unwrap_or(count)
|
||||
+ 1;
|
||||
nav![
|
||||
C!["level", "is-mobile"],
|
||||
IF!(show_bulk_edit =>
|
||||
@@ -377,12 +383,7 @@ fn search_toolbar(
|
||||
],
|
||||
ul![
|
||||
C!["pagination-list"],
|
||||
li![format!(
|
||||
"{} - {} of {}",
|
||||
start,
|
||||
count.min(start + SEARCH_RESULTS_PER_PAGE),
|
||||
count
|
||||
)],
|
||||
li![format!("{} - {} of {}", start, end, count)],
|
||||
],
|
||||
]
|
||||
]
|
||||
@@ -732,7 +733,7 @@ fn message_render(msg: &ShowThreadQueryThreadMessages, open: bool) -> Node<Msg>
|
||||
let default = "UNKNOWN_FILE".to_string();
|
||||
let filename = a.filename.as_ref().unwrap_or(&default);
|
||||
let host = seed::window().location().host().expect("couldn't get host");
|
||||
let url = format!("//{host}/download/attachment/{}/{}/{}", a.id,a.idx, filename);
|
||||
let url = shared::urls::download_attachment(Some(&host), &a.id, &a.idx, filename);
|
||||
let mut fmtr = Formatter::new();
|
||||
fmtr.with_separator(" ");
|
||||
fmtr.with_scales(Scales::Binary());
|
||||
@@ -854,7 +855,7 @@ fn thread(
|
||||
IF!(show_icon_text=>span!["Spam"]),
|
||||
ev(Ev::Click, move |_| Msg::MultiMsg(vec![
|
||||
Msg::AddTag(format!("thread:{spam_thread_id}"), "Spam".to_string()),
|
||||
Msg::SetUnread(format!("thread:{spam_thread_id}"), true)
|
||||
Msg::SetUnread(format!("thread:{spam_thread_id}"), false)
|
||||
])),
|
||||
],
|
||||
],
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.body {
|
||||
.message .body {
|
||||
background: white;
|
||||
color: black;
|
||||
margin-top: 0.5em;
|
||||
@@ -48,6 +48,18 @@
|
||||
margin: 0.5rem 0;
|
||||
}
|
||||
|
||||
.message .body blockquote {
|
||||
padding-left: 1em;
|
||||
border-left: 2px solid #ddd;
|
||||
}
|
||||
|
||||
.message .body ul,
|
||||
.message .body ol,
|
||||
.message .body li {
|
||||
margin: revert;
|
||||
padding: revert;
|
||||
}
|
||||
|
||||
.error {
|
||||
background-color: red;
|
||||
}
|
||||
@@ -126,13 +138,13 @@ iframe {
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
@@ -190,8 +202,7 @@ input::placeholder,
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.search-results td.subject {
|
||||
}
|
||||
.search-results td.subject {}
|
||||
|
||||
.search-results .subject .tag {}
|
||||
|
||||
@@ -281,8 +292,9 @@ display: none;
|
||||
margin: .25em;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.attachment .card-content {
|
||||
padding: 0.5rem 1.5rem;
|
||||
padding: 0.5rem 1.5rem;
|
||||
}
|
||||
|
||||
.button.spam {
|
||||
|
||||
Reference in New Issue
Block a user