Compare commits
16 Commits
28d5562491
...
94f7ad109a
| Author | SHA1 | Date | |
|---|---|---|---|
| 94f7ad109a | |||
| f6bdf302fe | |||
| b76c535738 | |||
| 29949c703d | |||
| f5f9eb175d | |||
| 488c3b86f8 | |||
| be8fd59703 | |||
| 071fe2e206 | |||
| ac5660a6d0 | |||
| 99a104517d | |||
| c3692cadec | |||
| b14000952c | |||
| 7a32d5c630 | |||
| 714b057fdb | |||
| 4c2526c70b | |||
| a8f4aa03bd |
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -3113,7 +3113,6 @@ dependencies = [
|
||||
"mailparse",
|
||||
"memmap",
|
||||
"notmuch",
|
||||
"rayon",
|
||||
"rocket 0.5.0",
|
||||
"rocket_contrib",
|
||||
"rocket_cors",
|
||||
|
||||
@ -208,9 +208,9 @@
|
||||
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
io::{self, BufRead, BufReader, Lines},
|
||||
io::{self},
|
||||
path::{Path, PathBuf},
|
||||
process::{Child, ChildStdout, Command, Stdio},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use log::info;
|
||||
@ -518,7 +518,7 @@ impl Notmuch {
|
||||
"--format=json",
|
||||
query,
|
||||
])?;
|
||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||
// use that for parsing in rust.
|
||||
let s = String::from_utf8_lossy(&slice);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||
@ -537,7 +537,7 @@ impl Notmuch {
|
||||
&format!("--part={}", part),
|
||||
query,
|
||||
])?;
|
||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||
// use that for parsing in rust.
|
||||
let s = String::from_utf8_lossy(&slice);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||
@ -556,14 +556,14 @@ impl Notmuch {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn message_ids(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
||||
let mut child = self.run_notmuch_pipe(["search", "--output=messages", query])?;
|
||||
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
||||
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
||||
Ok(serde_json::from_slice(&res)?)
|
||||
}
|
||||
|
||||
pub fn files(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
||||
let mut child = self.run_notmuch_pipe(["search", "--output=files", query])?;
|
||||
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
||||
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
||||
Ok(serde_json::from_slice(&res)?)
|
||||
}
|
||||
|
||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||
@ -580,21 +580,6 @@ impl Notmuch {
|
||||
let out = cmd.output()?;
|
||||
Ok(out.stdout)
|
||||
}
|
||||
|
||||
fn run_notmuch_pipe<I, S>(&self, args: I) -> Result<Child, NotmuchError>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<OsStr>,
|
||||
{
|
||||
let mut cmd = Command::new("notmuch");
|
||||
if let Some(config_path) = &self.config_path {
|
||||
cmd.arg("--config").arg(config_path);
|
||||
}
|
||||
cmd.args(args);
|
||||
info!("{:?}", &cmd);
|
||||
let child = cmd.stdout(Stdio::piped()).spawn()?;
|
||||
Ok(child)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@ -20,7 +20,6 @@ urlencoding = "2.1.3"
|
||||
async-graphql = { version = "6.0.11", features = ["log"] }
|
||||
async-graphql-rocket = "6.0.11"
|
||||
rocket_cors = "0.6.0"
|
||||
rayon = "1.8.0"
|
||||
memmap = "0.7.0"
|
||||
mailparse = "0.14.0"
|
||||
ammonia = "3.3.0"
|
||||
|
||||
@ -89,6 +89,22 @@ impl<'r, 'o: 'r> Responder<'r, 'o> for PartResponder {
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
#[get("/attachment/<id>/<idx>")]
|
||||
async fn attachment(
|
||||
nm: &State<Notmuch>,
|
||||
id: &str,
|
||||
idx: usize,
|
||||
) -> Result<PartResponder, Debug<NotmuchError>> {
|
||||
let mid = if id.starts_with("id:") {
|
||||
id.to_string()
|
||||
} else {
|
||||
format!("id:{}", id)
|
||||
};
|
||||
let bytes = Vec::new();
|
||||
let filename = None;
|
||||
// TODO(wathiede): use walk_attachments from graphql to fill this out
|
||||
Ok(PartResponder { bytes, filename })
|
||||
}
|
||||
|
||||
#[get("/original/<id>/part/<part>")]
|
||||
async fn original_part(
|
||||
|
||||
@ -1,18 +1,20 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use async_graphql::{
|
||||
connection::{self, Connection, Edge},
|
||||
Context, EmptyMutation, EmptySubscription, Error, FieldResult, Object, Schema, SimpleObject,
|
||||
Union,
|
||||
Context, EmptyMutation, EmptySubscription, Enum, Error, FieldResult, Object, Schema,
|
||||
SimpleObject, Union,
|
||||
};
|
||||
use log::{error, info, warn};
|
||||
use mailparse::{parse_mail, MailHeaderMap, ParsedMail};
|
||||
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||
use memmap::MmapOptions;
|
||||
use notmuch::Notmuch;
|
||||
use rayon::prelude::*;
|
||||
use rocket::time::Instant;
|
||||
|
||||
pub struct QueryRoot;
|
||||
|
||||
@ -46,6 +48,8 @@ pub struct Thread {
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct Message {
|
||||
// Message-ID for message, prepend `id:<id>` to search in notmuch
|
||||
pub id: String,
|
||||
// First From header found in email
|
||||
pub from: Option<Email>,
|
||||
// All To headers found in email
|
||||
@ -56,10 +60,50 @@ pub struct Message {
|
||||
pub subject: Option<String>,
|
||||
// Parsed Date header, if found and valid
|
||||
pub timestamp: Option<i64>,
|
||||
// Headers
|
||||
pub headers: Vec<Header>,
|
||||
// The body contents
|
||||
pub body: Body,
|
||||
// On disk location of message
|
||||
pub path: String,
|
||||
pub attachments: Vec<Attachment>,
|
||||
}
|
||||
|
||||
// Content-Type: image/jpeg; name="PXL_20231125_204826860.jpg"
|
||||
// Content-Disposition: attachment; filename="PXL_20231125_204826860.jpg"
|
||||
// Content-Transfer-Encoding: base64
|
||||
// Content-ID: <f_lponoluo1>
|
||||
// X-Attachment-Id: f_lponoluo1
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct Attachment {
|
||||
filename: String,
|
||||
content_type: Option<String>,
|
||||
content_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||
enum DispositionType {
|
||||
Inline,
|
||||
Attachment,
|
||||
}
|
||||
|
||||
impl FromStr for DispositionType {
|
||||
type Err = String;
|
||||
|
||||
// Required method
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"inline" => DispositionType::Inline,
|
||||
"attachment" => DispositionType::Attachment,
|
||||
c => return Err(format!("unknown disposition type: {c}")),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, SimpleObject)]
|
||||
pub struct Header {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -104,6 +148,9 @@ impl Html {
|
||||
async fn content_tree(&self) -> &str {
|
||||
&self.content_tree
|
||||
}
|
||||
async fn headers(&self) -> Vec<Header> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Union)]
|
||||
@ -214,15 +261,32 @@ impl QueryRoot {
|
||||
|
||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
Ok(nm
|
||||
let now = Instant::now();
|
||||
let needs_unread = ctx.look_ahead().field("unread").exists();
|
||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||
// get an inaccurate count.
|
||||
nm.search("is:unread", 0, 10000)?
|
||||
.0
|
||||
.iter()
|
||||
.fold(HashMap::new(), |mut m, ts| {
|
||||
ts.tags.iter().for_each(|t| {
|
||||
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
||||
});
|
||||
m
|
||||
})
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
let tags = nm
|
||||
.tags()?
|
||||
.into_par_iter()
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = if ctx.look_ahead().field("unread").exists() {
|
||||
nm.count(&format!("tag:{tag} is:unread")).unwrap_or(0)
|
||||
let unread = if needs_unread {
|
||||
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
@ -233,7 +297,9 @@ impl QueryRoot {
|
||||
unread,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
.collect();
|
||||
info!("Fetching tags took {}", now.elapsed());
|
||||
Ok(tags)
|
||||
}
|
||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||
@ -246,8 +312,8 @@ impl QueryRoot {
|
||||
.field("contentTree")
|
||||
.exists();
|
||||
let mut messages = Vec::new();
|
||||
for path in nm.files(&thread_id)? {
|
||||
let path = path?;
|
||||
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
||||
info!("{id}\nfile: {path}");
|
||||
let file = File::open(&path)?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||
let m = parse_mail(&mmap)?;
|
||||
@ -290,14 +356,27 @@ impl QueryRoot {
|
||||
}),
|
||||
b => b,
|
||||
};
|
||||
let headers = m
|
||||
.headers
|
||||
.iter()
|
||||
.map(|h| Header {
|
||||
key: h.get_key(),
|
||||
value: h.get_value(),
|
||||
})
|
||||
.collect();
|
||||
// TODO(wathiede): parse message and fill out attachments
|
||||
let attachments = extract_attachments(&m)?;
|
||||
messages.push(Message {
|
||||
id,
|
||||
from,
|
||||
to,
|
||||
cc,
|
||||
subject,
|
||||
timestamp,
|
||||
headers,
|
||||
body,
|
||||
path,
|
||||
attachments,
|
||||
});
|
||||
}
|
||||
messages.reverse();
|
||||
@ -339,6 +418,10 @@ fn extract_unhandled(m: &ParsedMail) -> Result<Body, Error> {
|
||||
text: msg,
|
||||
}))
|
||||
}
|
||||
|
||||
// multipart/alternative defines multiple representations of the same message, and clients should
|
||||
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
||||
// then give up.
|
||||
fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == "text/html" {
|
||||
@ -355,6 +438,8 @@ fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
||||
Err("extract_alternative".into())
|
||||
}
|
||||
|
||||
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
||||
// 'serially'.
|
||||
fn extract_mixed(m: &ParsedMail) -> Result<Body, Error> {
|
||||
for sp in &m.subparts {
|
||||
if sp.ctype.mimetype == "multipart/alternative" {
|
||||
@ -394,21 +479,92 @@ fn extract_related(m: &ParsedMail) -> Result<Body, Error> {
|
||||
Err("extract_related".into())
|
||||
}
|
||||
|
||||
// TODO(wathiede): make this walk_attachments that takes a closure.
|
||||
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
||||
// get the bytes for serving attachments of HTTP
|
||||
fn extract_attachments(m: &ParsedMail) -> Result<Vec<Attachment>, Error> {
|
||||
let mut attachements = Vec::new();
|
||||
for sp in &m.subparts {
|
||||
for h in &sp.headers {
|
||||
if h.get_key() == "Content-Disposition" {
|
||||
let v = h.get_value();
|
||||
if let Some(idx) = v.find(";") {
|
||||
let dt = &v[..idx];
|
||||
match DispositionType::from_str(dt) {
|
||||
Ok(DispositionType::Attachment) => {
|
||||
attachements.push(Attachment {
|
||||
filename: get_attachment_filename(&v).to_string(),
|
||||
content_type: get_content_type(&sp.headers),
|
||||
content_id: get_content_id(&sp.headers),
|
||||
});
|
||||
}
|
||||
Ok(DispositionType::Inline) => continue,
|
||||
Err(e) => {
|
||||
warn!("failed to parse Content-Disposition type '{}'", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
warn!("header has Content-Disposition missing ';'");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(attachements)
|
||||
}
|
||||
|
||||
fn get_attachment_filename(header_value: &str) -> &str {
|
||||
// Strip last "
|
||||
let v = &header_value[..header_value.len() - 1];
|
||||
if let Some(idx) = v.rfind('"') {
|
||||
&v[idx + 1..]
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
for h in headers {
|
||||
if h.get_key() == "Content-Type" {
|
||||
let v = h.get_value();
|
||||
if let Some(idx) = v.find(';') {
|
||||
return Some(v[..idx].to_string());
|
||||
} else {
|
||||
return Some(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||
for h in headers {
|
||||
if h.get_key() == "Content-ID" {
|
||||
return Some(h.get_value());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||
const WIDTH: usize = 4;
|
||||
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||
let mut parts = Vec::new();
|
||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||
parts.push(msg);
|
||||
let indent = " ".repeat(depth * WIDTH);
|
||||
if !m.ctype.charset.is_empty() {
|
||||
parts.push(format!(
|
||||
"{} Character Set: {}",
|
||||
" ".repeat(depth * WIDTH),
|
||||
m.ctype.charset
|
||||
));
|
||||
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
||||
}
|
||||
for (k, v) in m.ctype.params.iter() {
|
||||
parts.push(format!("{} {k}: {v}", " ".repeat(depth * WIDTH),));
|
||||
parts.push(format!("{indent} {k}: {v}"));
|
||||
}
|
||||
if !m.headers.is_empty() {
|
||||
parts.push(format!("{indent} == headers =="));
|
||||
for h in &m.headers {
|
||||
parts.push(format!("{indent} {}: {}", h.get_key(), h.get_value()));
|
||||
}
|
||||
}
|
||||
for sp in &m.subparts {
|
||||
parts.push(render_rec(sp, depth + 1))
|
||||
|
||||
@ -3,4 +3,5 @@
|
||||
# Build in release mode and push to minio for serving.
|
||||
all:
|
||||
trunk build --release
|
||||
mc mirror m/letterbox/ /tmp/letterbox-$(shell date +%s)
|
||||
mc mirror --overwrite --remove dist/ m/letterbox/
|
||||
|
||||
@ -10,6 +10,8 @@ port = 6758
|
||||
backend = "http://localhost:9345/"
|
||||
rewrite= "/api/"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/original"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/graphiql"
|
||||
[[proxy]]
|
||||
backend="http://localhost:9345/graphql"
|
||||
|
||||
@ -59,6 +59,57 @@
|
||||
},
|
||||
"subscriptionType": null,
|
||||
"types": [
|
||||
{
|
||||
"description": null,
|
||||
"enumValues": null,
|
||||
"fields": [
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "filename",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "contentType",
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "contentId",
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"kind": "OBJECT",
|
||||
"name": "Attachment",
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"description": null,
|
||||
"enumValues": null,
|
||||
@ -140,6 +191,49 @@
|
||||
"name": "Float",
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"description": null,
|
||||
"enumValues": null,
|
||||
"fields": [
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "key",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "value",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"kind": "OBJECT",
|
||||
"name": "Header",
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"description": null,
|
||||
"enumValues": null,
|
||||
@ -175,6 +269,30 @@
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "headers",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Header",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
@ -207,6 +325,22 @@
|
||||
"description": null,
|
||||
"enumValues": null,
|
||||
"fields": [
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "id",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
@ -291,6 +425,30 @@
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "headers",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Header",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
@ -322,6 +480,30 @@
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"args": [],
|
||||
"deprecationReason": null,
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"name": "attachments",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Attachment",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
|
||||
@ -2,6 +2,7 @@ query ShowThreadQuery($threadId: String!) {
|
||||
thread(threadId: $threadId) {
|
||||
subject
|
||||
messages {
|
||||
id
|
||||
subject
|
||||
from {
|
||||
name
|
||||
@ -31,6 +32,11 @@ query ShowThreadQuery($threadId: String!) {
|
||||
}
|
||||
}
|
||||
path
|
||||
attachments {
|
||||
filename
|
||||
contentType
|
||||
contentId
|
||||
}
|
||||
}
|
||||
}
|
||||
tags {
|
||||
|
||||
@ -133,7 +133,7 @@ blockquote[type="cite"],
|
||||
}
|
||||
*/
|
||||
|
||||
.desktop-main-content {
|
||||
.desktop .main-content {
|
||||
display: grid;
|
||||
grid-template-columns: 12rem 1fr;
|
||||
}
|
||||
@ -153,6 +153,11 @@ blockquote[type="cite"],
|
||||
.navbar {
|
||||
border: none;
|
||||
}
|
||||
.desktop nav.pagination,
|
||||
.tablet nav.pagination {
|
||||
margin-left: .5em;
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use graphql_client::GraphQLQuery;
|
||||
use log::{debug, info};
|
||||
use log::{debug, error, info};
|
||||
use notmuch::ThreadSet;
|
||||
use seed::{prelude::*, *};
|
||||
use thiserror::Error;
|
||||
@ -19,6 +19,7 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
||||
} else {
|
||||
orders.notify(subs::UrlRequested::new(url));
|
||||
};
|
||||
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
||||
orders.subscribe(on_url_changed);
|
||||
|
||||
Model {
|
||||
@ -127,6 +128,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
Msg::Reload => {
|
||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||
}
|
||||
Msg::OnResize => (),
|
||||
|
||||
Msg::SearchRequest {
|
||||
query,
|
||||
@ -300,7 +302,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
||||
model.context = Context::ThreadResult(data.thread);
|
||||
}
|
||||
Msg::ShowThreadResult(bad) => {
|
||||
error!("show_thread_query error: {bad:?}");
|
||||
error!("show_thread_query error: {bad:#?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -357,6 +359,8 @@ pub enum Msg {
|
||||
Noop,
|
||||
// Tell the client to refresh its state
|
||||
Reload,
|
||||
// Window has changed size
|
||||
OnResize,
|
||||
// Tell the server to update state
|
||||
RefreshStart,
|
||||
RefreshDone(Option<FetchError>),
|
||||
|
||||
@ -5,10 +5,7 @@ use seed_hooks::{state_access::CloneState, topo, use_state};
|
||||
use crate::{
|
||||
api::urls,
|
||||
state::{Context, Model, Msg, Tag},
|
||||
view::{
|
||||
legacy::{view_search_results_legacy, view_thread_legacy},
|
||||
view_header, view_search_results, view_thread,
|
||||
},
|
||||
view::{self, legacy, view_header, view_search_results},
|
||||
};
|
||||
|
||||
#[topo::nested]
|
||||
@ -16,9 +13,9 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||
Context::ThreadResult(thread) => view_thread(thread),
|
||||
Context::Search(search_results) => view_search_results_legacy(&model.query, search_results),
|
||||
Context::Thread(thread_set) => legacy::thread(thread_set),
|
||||
Context::ThreadResult(thread) => view::thread(thread),
|
||||
Context::Search(search_results) => legacy::search_results(&model.query, search_results),
|
||||
Context::SearchResult {
|
||||
query,
|
||||
results,
|
||||
@ -89,7 +86,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||
let tags_open = use_state(|| false);
|
||||
let force_tags_open = unread.is_empty();
|
||||
div![
|
||||
C!["desktop-main-content"],
|
||||
C!["main-content"],
|
||||
aside![
|
||||
C!["tags-menu", "menu"],
|
||||
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
||||
|
||||
@ -9,10 +9,7 @@ use crate::{
|
||||
view::{human_age, pretty_authors, set_title, tags_chiclet},
|
||||
};
|
||||
|
||||
pub(super) fn view_search_results_legacy(
|
||||
query: &str,
|
||||
search_results: &shared::SearchResult,
|
||||
) -> Node<Msg> {
|
||||
pub(super) fn search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
@ -45,7 +42,7 @@ pub(super) fn view_search_results_legacy(
|
||||
});
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||
search_pager(first, summaries.len(), search_results.total),
|
||||
table![
|
||||
C![
|
||||
"table",
|
||||
@ -62,11 +59,11 @@ pub(super) fn view_search_results_legacy(
|
||||
]],
|
||||
tbody![rows]
|
||||
],
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||
search_pager(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
pub(super) fn view_search_pager_legacy(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||
fn search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||
let is_first = start <= 0;
|
||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
||||
nav![
|
||||
@ -90,7 +87,7 @@ pub(super) fn view_search_pager_legacy(start: usize, count: usize, total: usize)
|
||||
]
|
||||
}
|
||||
|
||||
pub(super) fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
pub(super) fn thread(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
assert_eq!(thread_set.0.len(), 1);
|
||||
let thread = &thread_set.0[0];
|
||||
assert_eq!(thread.0.len(), 1);
|
||||
@ -107,6 +104,56 @@ pub(super) fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
||||
],
|
||||
]
|
||||
}
|
||||
pub(super) fn mobile_search_results(
|
||||
query: &str,
|
||||
search_results: &shared::SearchResult,
|
||||
) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
/*
|
||||
let tid = r.thread.clone();
|
||||
tr![
|
||||
td![
|
||||
C!["from"],
|
||||
pretty_authors(&r.authors),
|
||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||
],
|
||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||
td![C!["date"], &r.date_relative],
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||
]
|
||||
*/
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![
|
||||
C!["row"],
|
||||
div![C!["subject"], &r.subject],
|
||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||
]
|
||||
]
|
||||
]
|
||||
});
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
C!["search-results"],
|
||||
search_pager(first, summaries.len(), search_results.total),
|
||||
rows,
|
||||
search_pager(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||
let children = &thread.1;
|
||||
|
||||
@ -5,26 +5,25 @@ use crate::{
|
||||
graphql::front_page_query::*,
|
||||
state::{Context, Model, Msg},
|
||||
view::{
|
||||
human_age,
|
||||
legacy::{view_search_pager_legacy, view_thread_legacy},
|
||||
pretty_authors, set_title, tags_chiclet, view_header, view_search_pager, view_thread,
|
||||
self, human_age, legacy, pretty_authors, set_title, tags_chiclet, view_header,
|
||||
view_search_pager,
|
||||
},
|
||||
};
|
||||
|
||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||
Context::ThreadResult(thread) => view_thread(thread),
|
||||
Context::Thread(thread_set) => legacy::thread(thread_set),
|
||||
Context::ThreadResult(thread) => view::thread(thread),
|
||||
Context::Search(search_results) => {
|
||||
view_mobile_search_results_legacy(&model.query, search_results)
|
||||
legacy::mobile_search_results(&model.query, search_results)
|
||||
}
|
||||
Context::SearchResult {
|
||||
query,
|
||||
results,
|
||||
count,
|
||||
pager,
|
||||
} => view_mobile_search_results(&query, results.as_slice(), *count, pager),
|
||||
} => search_results(&query, results.as_slice(), *count, pager),
|
||||
};
|
||||
div![
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
@ -33,7 +32,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||
]
|
||||
}
|
||||
|
||||
fn view_mobile_search_results(
|
||||
fn search_results(
|
||||
query: &str,
|
||||
results: &[FrontPageQuerySearchNodes],
|
||||
count: usize,
|
||||
@ -70,54 +69,3 @@ fn view_mobile_search_results(
|
||||
view_search_pager(count, pager),
|
||||
]
|
||||
}
|
||||
|
||||
fn view_mobile_search_results_legacy(
|
||||
query: &str,
|
||||
search_results: &shared::SearchResult,
|
||||
) -> Node<Msg> {
|
||||
if query.is_empty() {
|
||||
set_title("all mail");
|
||||
} else {
|
||||
set_title(query);
|
||||
}
|
||||
let summaries = &search_results.summary.0;
|
||||
let rows = summaries.iter().map(|r| {
|
||||
/*
|
||||
let tid = r.thread.clone();
|
||||
tr![
|
||||
td![
|
||||
C!["from"],
|
||||
pretty_authors(&r.authors),
|
||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||
],
|
||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||
td![C!["date"], &r.date_relative],
|
||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||
]
|
||||
*/
|
||||
let tid = r.thread.clone();
|
||||
let datetime = human_age(r.timestamp as i64);
|
||||
a![
|
||||
C!["has-text-light"],
|
||||
attrs! {
|
||||
At::Href => urls::thread(&tid)
|
||||
},
|
||||
div![
|
||||
C!["row"],
|
||||
div![C!["subject"], &r.subject],
|
||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||
div![
|
||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||
]
|
||||
]
|
||||
]
|
||||
});
|
||||
let first = search_results.page * search_results.results_per_page;
|
||||
div![
|
||||
C!["search-results"],
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||
rows,
|
||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||
]
|
||||
}
|
||||
|
||||
@ -19,7 +19,9 @@ use crate::{
|
||||
mod desktop;
|
||||
mod legacy;
|
||||
mod mobile;
|
||||
mod tablet;
|
||||
|
||||
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||
fn set_title(title: &str) {
|
||||
seed::document().set_title(&format!("lb: {}", title));
|
||||
}
|
||||
@ -249,19 +251,31 @@ fn view_addresses(addrs: &[impl Email]) -> Vec<Node<Msg>> {
|
||||
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||
fn raw_text_message(contents: &str) -> Node<Msg> {
|
||||
let (contents, truncated_msg) = if contents.len() > MAX_RAW_MESSAGE_SIZE {
|
||||
(
|
||||
&contents[..MAX_RAW_MESSAGE_SIZE],
|
||||
Some(div!["... contents truncated"]),
|
||||
)
|
||||
} else {
|
||||
(contents, None)
|
||||
};
|
||||
div![C!["view-part-text-plain"], contents, truncated_msg,]
|
||||
}
|
||||
|
||||
fn thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||
set_title(&thread.subject);
|
||||
let messages = thread.messages.iter().map(|msg| {
|
||||
div![
|
||||
C!["message"],
|
||||
/* TODO(wathiede): collect all the tags and show them here. */
|
||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||
msg.from
|
||||
.as_ref()
|
||||
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
||||
msg.timestamp
|
||||
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
||||
div!["Message-ID: ", &msg.id],
|
||||
div![
|
||||
C!["header"],
|
||||
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
||||
@ -278,7 +292,7 @@ fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||
contents,
|
||||
content_tree,
|
||||
},
|
||||
) => div![C!["view-part-text-plain"], contents, pre![content_tree]],
|
||||
) => div![raw_text_message(&contents), pre![content_tree]],
|
||||
ShowThreadQueryThreadMessagesBody::Html(
|
||||
ShowThreadQueryThreadMessagesBodyOnHtml {
|
||||
contents,
|
||||
@ -287,6 +301,15 @@ fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||
) => div![
|
||||
C!["view-part-text-html"],
|
||||
raw![contents],
|
||||
IF!(!msg.attachments.is_empty() =>
|
||||
div![
|
||||
C!["attachments"],
|
||||
br![],
|
||||
h2!["Attachments"],
|
||||
msg.attachments
|
||||
.iter()
|
||||
.map(|a| div!["Filename: ", &a.filename, " ", &a.content_type])
|
||||
]),
|
||||
pre![content_tree]
|
||||
],
|
||||
}
|
||||
@ -395,26 +418,28 @@ fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
||||
|
||||
// `view` describes what to display.
|
||||
pub fn view(model: &Model) -> Node<Msg> {
|
||||
info!("refreshing {:?}", model.refreshing_state);
|
||||
let is_mobile = seed::window()
|
||||
.match_media("(max-width: 768px)")
|
||||
.expect("failed media query")
|
||||
.map(|mql| mql.matches())
|
||||
.unwrap_or(false);
|
||||
|
||||
let start = Instant::now();
|
||||
info!("refreshing {:?}", model.refreshing_state);
|
||||
let win = seed::window();
|
||||
let w = win
|
||||
.inner_width()
|
||||
.expect("window width")
|
||||
.as_f64()
|
||||
.expect("window width f64");
|
||||
let h = win
|
||||
.inner_height()
|
||||
.expect("window height")
|
||||
.as_f64()
|
||||
.expect("window height f64");
|
||||
info!("win: {w}x{h}");
|
||||
|
||||
info!("view called");
|
||||
div![
|
||||
if is_mobile {
|
||||
C!["mobile"]
|
||||
} else {
|
||||
C!["desktop"]
|
||||
match w {
|
||||
w if w < 800. => div![C!["mobile"], mobile::view(model)],
|
||||
w if w < 1024. => div![C!["tablet"], tablet::view(model)],
|
||||
_ => div![C!["desktop"], desktop::view(model)],
|
||||
},
|
||||
if is_mobile {
|
||||
mobile::view(model)
|
||||
} else {
|
||||
desktop::view(model)
|
||||
},
|
||||
view_footer(start.elapsed().as_millis())
|
||||
view_footer(start.elapsed().as_millis()),
|
||||
]
|
||||
}
|
||||
|
||||
30
web/src/view/tablet.rs
Normal file
30
web/src/view/tablet.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use seed::{prelude::*, *};
|
||||
|
||||
use crate::{
|
||||
state::{Context, Model, Msg},
|
||||
view::{self, view_header, view_search_results},
|
||||
};
|
||||
|
||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||
let content = match &model.context {
|
||||
Context::None => div![h1!["Loading"]],
|
||||
Context::Thread(_) => unimplemented!("tablet legacy thread view"),
|
||||
Context::ThreadResult(thread) => view::thread(thread),
|
||||
Context::Search(_) => unimplemented!("tablet legacy search results view"),
|
||||
Context::SearchResult {
|
||||
query,
|
||||
results,
|
||||
count,
|
||||
pager,
|
||||
} => view_search_results(&query, results.as_slice(), *count, pager),
|
||||
};
|
||||
div![
|
||||
C!["main-content"],
|
||||
div![
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
content,
|
||||
view_header(&model.query, &model.refreshing_state),
|
||||
]
|
||||
]
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user