Compare commits
16 Commits
28d5562491
...
94f7ad109a
| Author | SHA1 | Date | |
|---|---|---|---|
| 94f7ad109a | |||
| f6bdf302fe | |||
| b76c535738 | |||
| 29949c703d | |||
| f5f9eb175d | |||
| 488c3b86f8 | |||
| be8fd59703 | |||
| 071fe2e206 | |||
| ac5660a6d0 | |||
| 99a104517d | |||
| c3692cadec | |||
| b14000952c | |||
| 7a32d5c630 | |||
| 714b057fdb | |||
| 4c2526c70b | |||
| a8f4aa03bd |
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -3113,7 +3113,6 @@ dependencies = [
|
|||||||
"mailparse",
|
"mailparse",
|
||||||
"memmap",
|
"memmap",
|
||||||
"notmuch",
|
"notmuch",
|
||||||
"rayon",
|
|
||||||
"rocket 0.5.0",
|
"rocket 0.5.0",
|
||||||
"rocket_contrib",
|
"rocket_contrib",
|
||||||
"rocket_cors",
|
"rocket_cors",
|
||||||
|
|||||||
@ -208,9 +208,9 @@
|
|||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
io::{self, BufRead, BufReader, Lines},
|
io::{self},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::{Child, ChildStdout, Command, Stdio},
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::info;
|
use log::info;
|
||||||
@ -518,7 +518,7 @@ impl Notmuch {
|
|||||||
"--format=json",
|
"--format=json",
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@ -537,7 +537,7 @@ impl Notmuch {
|
|||||||
&format!("--part={}", part),
|
&format!("--part={}", part),
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@ -556,14 +556,14 @@ impl Notmuch {
|
|||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_ids(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
let mut child = self.run_notmuch_pipe(["search", "--output=messages", query])?;
|
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
||||||
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
Ok(serde_json::from_slice(&res)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn files(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
||||||
let mut child = self.run_notmuch_pipe(["search", "--output=files", query])?;
|
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
||||||
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
Ok(serde_json::from_slice(&res)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||||
@ -580,21 +580,6 @@ impl Notmuch {
|
|||||||
let out = cmd.output()?;
|
let out = cmd.output()?;
|
||||||
Ok(out.stdout)
|
Ok(out.stdout)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_notmuch_pipe<I, S>(&self, args: I) -> Result<Child, NotmuchError>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = S>,
|
|
||||||
S: AsRef<OsStr>,
|
|
||||||
{
|
|
||||||
let mut cmd = Command::new("notmuch");
|
|
||||||
if let Some(config_path) = &self.config_path {
|
|
||||||
cmd.arg("--config").arg(config_path);
|
|
||||||
}
|
|
||||||
cmd.args(args);
|
|
||||||
info!("{:?}", &cmd);
|
|
||||||
let child = cmd.stdout(Stdio::piped()).spawn()?;
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@ -20,7 +20,6 @@ urlencoding = "2.1.3"
|
|||||||
async-graphql = { version = "6.0.11", features = ["log"] }
|
async-graphql = { version = "6.0.11", features = ["log"] }
|
||||||
async-graphql-rocket = "6.0.11"
|
async-graphql-rocket = "6.0.11"
|
||||||
rocket_cors = "0.6.0"
|
rocket_cors = "0.6.0"
|
||||||
rayon = "1.8.0"
|
|
||||||
memmap = "0.7.0"
|
memmap = "0.7.0"
|
||||||
mailparse = "0.14.0"
|
mailparse = "0.14.0"
|
||||||
ammonia = "3.3.0"
|
ammonia = "3.3.0"
|
||||||
|
|||||||
@ -89,6 +89,22 @@ impl<'r, 'o: 'r> Responder<'r, 'o> for PartResponder {
|
|||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[get("/attachment/<id>/<idx>")]
|
||||||
|
async fn attachment(
|
||||||
|
nm: &State<Notmuch>,
|
||||||
|
id: &str,
|
||||||
|
idx: usize,
|
||||||
|
) -> Result<PartResponder, Debug<NotmuchError>> {
|
||||||
|
let mid = if id.starts_with("id:") {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("id:{}", id)
|
||||||
|
};
|
||||||
|
let bytes = Vec::new();
|
||||||
|
let filename = None;
|
||||||
|
// TODO(wathiede): use walk_attachments from graphql to fill this out
|
||||||
|
Ok(PartResponder { bytes, filename })
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/original/<id>/part/<part>")]
|
#[get("/original/<id>/part/<part>")]
|
||||||
async fn original_part(
|
async fn original_part(
|
||||||
|
|||||||
@ -1,18 +1,20 @@
|
|||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
fs::File,
|
fs::File,
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
hash::{DefaultHasher, Hash, Hasher},
|
||||||
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
use async_graphql::{
|
use async_graphql::{
|
||||||
connection::{self, Connection, Edge},
|
connection::{self, Connection, Edge},
|
||||||
Context, EmptyMutation, EmptySubscription, Error, FieldResult, Object, Schema, SimpleObject,
|
Context, EmptyMutation, EmptySubscription, Enum, Error, FieldResult, Object, Schema,
|
||||||
Union,
|
SimpleObject, Union,
|
||||||
};
|
};
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use mailparse::{parse_mail, MailHeaderMap, ParsedMail};
|
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
||||||
use memmap::MmapOptions;
|
use memmap::MmapOptions;
|
||||||
use notmuch::Notmuch;
|
use notmuch::Notmuch;
|
||||||
use rayon::prelude::*;
|
use rocket::time::Instant;
|
||||||
|
|
||||||
pub struct QueryRoot;
|
pub struct QueryRoot;
|
||||||
|
|
||||||
@ -46,6 +48,8 @@ pub struct Thread {
|
|||||||
|
|
||||||
#[derive(Debug, SimpleObject)]
|
#[derive(Debug, SimpleObject)]
|
||||||
pub struct Message {
|
pub struct Message {
|
||||||
|
// Message-ID for message, prepend `id:<id>` to search in notmuch
|
||||||
|
pub id: String,
|
||||||
// First From header found in email
|
// First From header found in email
|
||||||
pub from: Option<Email>,
|
pub from: Option<Email>,
|
||||||
// All To headers found in email
|
// All To headers found in email
|
||||||
@ -56,10 +60,50 @@ pub struct Message {
|
|||||||
pub subject: Option<String>,
|
pub subject: Option<String>,
|
||||||
// Parsed Date header, if found and valid
|
// Parsed Date header, if found and valid
|
||||||
pub timestamp: Option<i64>,
|
pub timestamp: Option<i64>,
|
||||||
|
// Headers
|
||||||
|
pub headers: Vec<Header>,
|
||||||
// The body contents
|
// The body contents
|
||||||
pub body: Body,
|
pub body: Body,
|
||||||
// On disk location of message
|
// On disk location of message
|
||||||
pub path: String,
|
pub path: String,
|
||||||
|
pub attachments: Vec<Attachment>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Content-Type: image/jpeg; name="PXL_20231125_204826860.jpg"
|
||||||
|
// Content-Disposition: attachment; filename="PXL_20231125_204826860.jpg"
|
||||||
|
// Content-Transfer-Encoding: base64
|
||||||
|
// Content-ID: <f_lponoluo1>
|
||||||
|
// X-Attachment-Id: f_lponoluo1
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct Attachment {
|
||||||
|
filename: String,
|
||||||
|
content_type: Option<String>,
|
||||||
|
content_id: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
||||||
|
enum DispositionType {
|
||||||
|
Inline,
|
||||||
|
Attachment,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for DispositionType {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
// Required method
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(match s {
|
||||||
|
"inline" => DispositionType::Inline,
|
||||||
|
"attachment" => DispositionType::Attachment,
|
||||||
|
c => return Err(format!("unknown disposition type: {c}")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, SimpleObject)]
|
||||||
|
pub struct Header {
|
||||||
|
key: String,
|
||||||
|
value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -104,6 +148,9 @@ impl Html {
|
|||||||
async fn content_tree(&self) -> &str {
|
async fn content_tree(&self) -> &str {
|
||||||
&self.content_tree
|
&self.content_tree
|
||||||
}
|
}
|
||||||
|
async fn headers(&self) -> Vec<Header> {
|
||||||
|
Vec::new()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Union)]
|
#[derive(Debug, Union)]
|
||||||
@ -214,15 +261,32 @@ impl QueryRoot {
|
|||||||
|
|
||||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
Ok(nm
|
let now = Instant::now();
|
||||||
|
let needs_unread = ctx.look_ahead().field("unread").exists();
|
||||||
|
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||||
|
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
||||||
|
// get an inaccurate count.
|
||||||
|
nm.search("is:unread", 0, 10000)?
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.fold(HashMap::new(), |mut m, ts| {
|
||||||
|
ts.tags.iter().for_each(|t| {
|
||||||
|
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
||||||
|
});
|
||||||
|
m
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
};
|
||||||
|
let tags = nm
|
||||||
.tags()?
|
.tags()?
|
||||||
.into_par_iter()
|
.into_iter()
|
||||||
.map(|tag| {
|
.map(|tag| {
|
||||||
let mut hasher = DefaultHasher::new();
|
let mut hasher = DefaultHasher::new();
|
||||||
tag.hash(&mut hasher);
|
tag.hash(&mut hasher);
|
||||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||||
let unread = if ctx.look_ahead().field("unread").exists() {
|
let unread = if needs_unread {
|
||||||
nm.count(&format!("tag:{tag} is:unread")).unwrap_or(0)
|
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
@ -233,7 +297,9 @@ impl QueryRoot {
|
|||||||
unread,
|
unread,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect())
|
.collect();
|
||||||
|
info!("Fetching tags took {}", now.elapsed());
|
||||||
|
Ok(tags)
|
||||||
}
|
}
|
||||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||||
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||||
@ -246,8 +312,8 @@ impl QueryRoot {
|
|||||||
.field("contentTree")
|
.field("contentTree")
|
||||||
.exists();
|
.exists();
|
||||||
let mut messages = Vec::new();
|
let mut messages = Vec::new();
|
||||||
for path in nm.files(&thread_id)? {
|
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
||||||
let path = path?;
|
info!("{id}\nfile: {path}");
|
||||||
let file = File::open(&path)?;
|
let file = File::open(&path)?;
|
||||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||||
let m = parse_mail(&mmap)?;
|
let m = parse_mail(&mmap)?;
|
||||||
@ -290,14 +356,27 @@ impl QueryRoot {
|
|||||||
}),
|
}),
|
||||||
b => b,
|
b => b,
|
||||||
};
|
};
|
||||||
|
let headers = m
|
||||||
|
.headers
|
||||||
|
.iter()
|
||||||
|
.map(|h| Header {
|
||||||
|
key: h.get_key(),
|
||||||
|
value: h.get_value(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
// TODO(wathiede): parse message and fill out attachments
|
||||||
|
let attachments = extract_attachments(&m)?;
|
||||||
messages.push(Message {
|
messages.push(Message {
|
||||||
|
id,
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
cc,
|
cc,
|
||||||
subject,
|
subject,
|
||||||
timestamp,
|
timestamp,
|
||||||
|
headers,
|
||||||
body,
|
body,
|
||||||
path,
|
path,
|
||||||
|
attachments,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
messages.reverse();
|
messages.reverse();
|
||||||
@ -339,6 +418,10 @@ fn extract_unhandled(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
text: msg,
|
text: msg,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// multipart/alternative defines multiple representations of the same message, and clients should
|
||||||
|
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
||||||
|
// then give up.
|
||||||
fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
if sp.ctype.mimetype == "text/html" {
|
if sp.ctype.mimetype == "text/html" {
|
||||||
@ -355,6 +438,8 @@ fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
Err("extract_alternative".into())
|
Err("extract_alternative".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
||||||
|
// 'serially'.
|
||||||
fn extract_mixed(m: &ParsedMail) -> Result<Body, Error> {
|
fn extract_mixed(m: &ParsedMail) -> Result<Body, Error> {
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
if sp.ctype.mimetype == "multipart/alternative" {
|
if sp.ctype.mimetype == "multipart/alternative" {
|
||||||
@ -394,21 +479,92 @@ fn extract_related(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
Err("extract_related".into())
|
Err("extract_related".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(wathiede): make this walk_attachments that takes a closure.
|
||||||
|
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
||||||
|
// get the bytes for serving attachments of HTTP
|
||||||
|
fn extract_attachments(m: &ParsedMail) -> Result<Vec<Attachment>, Error> {
|
||||||
|
let mut attachements = Vec::new();
|
||||||
|
for sp in &m.subparts {
|
||||||
|
for h in &sp.headers {
|
||||||
|
if h.get_key() == "Content-Disposition" {
|
||||||
|
let v = h.get_value();
|
||||||
|
if let Some(idx) = v.find(";") {
|
||||||
|
let dt = &v[..idx];
|
||||||
|
match DispositionType::from_str(dt) {
|
||||||
|
Ok(DispositionType::Attachment) => {
|
||||||
|
attachements.push(Attachment {
|
||||||
|
filename: get_attachment_filename(&v).to_string(),
|
||||||
|
content_type: get_content_type(&sp.headers),
|
||||||
|
content_id: get_content_id(&sp.headers),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(DispositionType::Inline) => continue,
|
||||||
|
Err(e) => {
|
||||||
|
warn!("failed to parse Content-Disposition type '{}'", e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
warn!("header has Content-Disposition missing ';'");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(attachements)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_attachment_filename(header_value: &str) -> &str {
|
||||||
|
// Strip last "
|
||||||
|
let v = &header_value[..header_value.len() - 1];
|
||||||
|
if let Some(idx) = v.rfind('"') {
|
||||||
|
&v[idx + 1..]
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||||
|
for h in headers {
|
||||||
|
if h.get_key() == "Content-Type" {
|
||||||
|
let v = h.get_value();
|
||||||
|
if let Some(idx) = v.find(';') {
|
||||||
|
return Some(v[..idx].to_string());
|
||||||
|
} else {
|
||||||
|
return Some(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
||||||
|
for h in headers {
|
||||||
|
if h.get_key() == "Content-ID" {
|
||||||
|
return Some(h.get_value());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
fn render_content_type_tree(m: &ParsedMail) -> String {
|
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||||
const WIDTH: usize = 4;
|
const WIDTH: usize = 4;
|
||||||
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||||
let mut parts = Vec::new();
|
let mut parts = Vec::new();
|
||||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||||
parts.push(msg);
|
parts.push(msg);
|
||||||
|
let indent = " ".repeat(depth * WIDTH);
|
||||||
if !m.ctype.charset.is_empty() {
|
if !m.ctype.charset.is_empty() {
|
||||||
parts.push(format!(
|
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
||||||
"{} Character Set: {}",
|
|
||||||
" ".repeat(depth * WIDTH),
|
|
||||||
m.ctype.charset
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
for (k, v) in m.ctype.params.iter() {
|
for (k, v) in m.ctype.params.iter() {
|
||||||
parts.push(format!("{} {k}: {v}", " ".repeat(depth * WIDTH),));
|
parts.push(format!("{indent} {k}: {v}"));
|
||||||
|
}
|
||||||
|
if !m.headers.is_empty() {
|
||||||
|
parts.push(format!("{indent} == headers =="));
|
||||||
|
for h in &m.headers {
|
||||||
|
parts.push(format!("{indent} {}: {}", h.get_key(), h.get_value()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
parts.push(render_rec(sp, depth + 1))
|
parts.push(render_rec(sp, depth + 1))
|
||||||
|
|||||||
@ -3,4 +3,5 @@
|
|||||||
# Build in release mode and push to minio for serving.
|
# Build in release mode and push to minio for serving.
|
||||||
all:
|
all:
|
||||||
trunk build --release
|
trunk build --release
|
||||||
|
mc mirror m/letterbox/ /tmp/letterbox-$(shell date +%s)
|
||||||
mc mirror --overwrite --remove dist/ m/letterbox/
|
mc mirror --overwrite --remove dist/ m/letterbox/
|
||||||
|
|||||||
@ -10,6 +10,8 @@ port = 6758
|
|||||||
backend = "http://localhost:9345/"
|
backend = "http://localhost:9345/"
|
||||||
rewrite= "/api/"
|
rewrite= "/api/"
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
|
backend="http://localhost:9345/original"
|
||||||
|
[[proxy]]
|
||||||
backend="http://localhost:9345/graphiql"
|
backend="http://localhost:9345/graphiql"
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
backend="http://localhost:9345/graphql"
|
backend="http://localhost:9345/graphql"
|
||||||
|
|||||||
@ -59,6 +59,57 @@
|
|||||||
},
|
},
|
||||||
"subscriptionType": null,
|
"subscriptionType": null,
|
||||||
"types": [
|
"types": [
|
||||||
|
{
|
||||||
|
"description": null,
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "filename",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "contentType",
|
||||||
|
"type": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "contentId",
|
||||||
|
"type": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": [],
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Attachment",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@ -140,6 +191,49 @@
|
|||||||
"name": "Float",
|
"name": "Float",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": null,
|
||||||
|
"enumValues": null,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "key",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "value",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputFields": null,
|
||||||
|
"interfaces": [],
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Header",
|
||||||
|
"possibleTypes": null
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@ -175,6 +269,30 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "headers",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "LIST",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Header",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
@ -207,6 +325,22 @@
|
|||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
"fields": [
|
"fields": [
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "id",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "SCALAR",
|
||||||
|
"name": "String",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"deprecationReason": null,
|
"deprecationReason": null,
|
||||||
@ -291,6 +425,30 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "headers",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "LIST",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Header",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"deprecationReason": null,
|
"deprecationReason": null,
|
||||||
@ -322,6 +480,30 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": [],
|
||||||
|
"deprecationReason": null,
|
||||||
|
"description": null,
|
||||||
|
"isDeprecated": false,
|
||||||
|
"name": "attachments",
|
||||||
|
"type": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "LIST",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "NON_NULL",
|
||||||
|
"name": null,
|
||||||
|
"ofType": {
|
||||||
|
"kind": "OBJECT",
|
||||||
|
"name": "Attachment",
|
||||||
|
"ofType": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
|
|||||||
@ -2,6 +2,7 @@ query ShowThreadQuery($threadId: String!) {
|
|||||||
thread(threadId: $threadId) {
|
thread(threadId: $threadId) {
|
||||||
subject
|
subject
|
||||||
messages {
|
messages {
|
||||||
|
id
|
||||||
subject
|
subject
|
||||||
from {
|
from {
|
||||||
name
|
name
|
||||||
@ -31,6 +32,11 @@ query ShowThreadQuery($threadId: String!) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
path
|
path
|
||||||
|
attachments {
|
||||||
|
filename
|
||||||
|
contentType
|
||||||
|
contentId
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tags {
|
tags {
|
||||||
|
|||||||
@ -133,7 +133,7 @@ blockquote[type="cite"],
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
.desktop-main-content {
|
.desktop .main-content {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: 12rem 1fr;
|
grid-template-columns: 12rem 1fr;
|
||||||
}
|
}
|
||||||
@ -153,6 +153,11 @@ blockquote[type="cite"],
|
|||||||
.navbar {
|
.navbar {
|
||||||
border: none;
|
border: none;
|
||||||
}
|
}
|
||||||
|
.desktop nav.pagination,
|
||||||
|
.tablet nav.pagination {
|
||||||
|
margin-left: .5em;
|
||||||
|
margin-bottom: 0 !important;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use graphql_client::GraphQLQuery;
|
use graphql_client::GraphQLQuery;
|
||||||
use log::{debug, info};
|
use log::{debug, error, info};
|
||||||
use notmuch::ThreadSet;
|
use notmuch::ThreadSet;
|
||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
@ -19,6 +19,7 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|||||||
} else {
|
} else {
|
||||||
orders.notify(subs::UrlRequested::new(url));
|
orders.notify(subs::UrlRequested::new(url));
|
||||||
};
|
};
|
||||||
|
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
||||||
orders.subscribe(on_url_changed);
|
orders.subscribe(on_url_changed);
|
||||||
|
|
||||||
Model {
|
Model {
|
||||||
@ -127,6 +128,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
Msg::Reload => {
|
Msg::Reload => {
|
||||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||||
}
|
}
|
||||||
|
Msg::OnResize => (),
|
||||||
|
|
||||||
Msg::SearchRequest {
|
Msg::SearchRequest {
|
||||||
query,
|
query,
|
||||||
@ -300,7 +302,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
model.context = Context::ThreadResult(data.thread);
|
model.context = Context::ThreadResult(data.thread);
|
||||||
}
|
}
|
||||||
Msg::ShowThreadResult(bad) => {
|
Msg::ShowThreadResult(bad) => {
|
||||||
error!("show_thread_query error: {bad:?}");
|
error!("show_thread_query error: {bad:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -357,6 +359,8 @@ pub enum Msg {
|
|||||||
Noop,
|
Noop,
|
||||||
// Tell the client to refresh its state
|
// Tell the client to refresh its state
|
||||||
Reload,
|
Reload,
|
||||||
|
// Window has changed size
|
||||||
|
OnResize,
|
||||||
// Tell the server to update state
|
// Tell the server to update state
|
||||||
RefreshStart,
|
RefreshStart,
|
||||||
RefreshDone(Option<FetchError>),
|
RefreshDone(Option<FetchError>),
|
||||||
|
|||||||
@ -5,10 +5,7 @@ use seed_hooks::{state_access::CloneState, topo, use_state};
|
|||||||
use crate::{
|
use crate::{
|
||||||
api::urls,
|
api::urls,
|
||||||
state::{Context, Model, Msg, Tag},
|
state::{Context, Model, Msg, Tag},
|
||||||
view::{
|
view::{self, legacy, view_header, view_search_results},
|
||||||
legacy::{view_search_results_legacy, view_thread_legacy},
|
|
||||||
view_header, view_search_results, view_thread,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[topo::nested]
|
#[topo::nested]
|
||||||
@ -16,9 +13,9 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
Context::Thread(thread_set) => legacy::thread(thread_set),
|
||||||
Context::ThreadResult(thread) => view_thread(thread),
|
Context::ThreadResult(thread) => view::thread(thread),
|
||||||
Context::Search(search_results) => view_search_results_legacy(&model.query, search_results),
|
Context::Search(search_results) => legacy::search_results(&model.query, search_results),
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
@ -89,7 +86,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
let tags_open = use_state(|| false);
|
let tags_open = use_state(|| false);
|
||||||
let force_tags_open = unread.is_empty();
|
let force_tags_open = unread.is_empty();
|
||||||
div![
|
div![
|
||||||
C!["desktop-main-content"],
|
C!["main-content"],
|
||||||
aside![
|
aside![
|
||||||
C!["tags-menu", "menu"],
|
C!["tags-menu", "menu"],
|
||||||
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
||||||
|
|||||||
@ -9,10 +9,7 @@ use crate::{
|
|||||||
view::{human_age, pretty_authors, set_title, tags_chiclet},
|
view::{human_age, pretty_authors, set_title, tags_chiclet},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn view_search_results_legacy(
|
pub(super) fn search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
||||||
query: &str,
|
|
||||||
search_results: &shared::SearchResult,
|
|
||||||
) -> Node<Msg> {
|
|
||||||
if query.is_empty() {
|
if query.is_empty() {
|
||||||
set_title("all mail");
|
set_title("all mail");
|
||||||
} else {
|
} else {
|
||||||
@ -45,7 +42,7 @@ pub(super) fn view_search_results_legacy(
|
|||||||
});
|
});
|
||||||
let first = search_results.page * search_results.results_per_page;
|
let first = search_results.page * search_results.results_per_page;
|
||||||
div![
|
div![
|
||||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
search_pager(first, summaries.len(), search_results.total),
|
||||||
table![
|
table![
|
||||||
C![
|
C![
|
||||||
"table",
|
"table",
|
||||||
@ -62,11 +59,11 @@ pub(super) fn view_search_results_legacy(
|
|||||||
]],
|
]],
|
||||||
tbody![rows]
|
tbody![rows]
|
||||||
],
|
],
|
||||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
search_pager(first, summaries.len(), search_results.total)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn view_search_pager_legacy(start: usize, count: usize, total: usize) -> Node<Msg> {
|
fn search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||||
let is_first = start <= 0;
|
let is_first = start <= 0;
|
||||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
||||||
nav![
|
nav![
|
||||||
@ -90,7 +87,7 @@ pub(super) fn view_search_pager_legacy(start: usize, count: usize, total: usize)
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
pub(super) fn thread(thread_set: &ThreadSet) -> Node<Msg> {
|
||||||
assert_eq!(thread_set.0.len(), 1);
|
assert_eq!(thread_set.0.len(), 1);
|
||||||
let thread = &thread_set.0[0];
|
let thread = &thread_set.0[0];
|
||||||
assert_eq!(thread.0.len(), 1);
|
assert_eq!(thread.0.len(), 1);
|
||||||
@ -107,6 +104,56 @@ pub(super) fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
|||||||
],
|
],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
pub(super) fn mobile_search_results(
|
||||||
|
query: &str,
|
||||||
|
search_results: &shared::SearchResult,
|
||||||
|
) -> Node<Msg> {
|
||||||
|
if query.is_empty() {
|
||||||
|
set_title("all mail");
|
||||||
|
} else {
|
||||||
|
set_title(query);
|
||||||
|
}
|
||||||
|
let summaries = &search_results.summary.0;
|
||||||
|
let rows = summaries.iter().map(|r| {
|
||||||
|
/*
|
||||||
|
let tid = r.thread.clone();
|
||||||
|
tr![
|
||||||
|
td![
|
||||||
|
C!["from"],
|
||||||
|
pretty_authors(&r.authors),
|
||||||
|
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||||
|
],
|
||||||
|
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||||
|
td![C!["date"], &r.date_relative],
|
||||||
|
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||||
|
]
|
||||||
|
*/
|
||||||
|
let tid = r.thread.clone();
|
||||||
|
let datetime = human_age(r.timestamp as i64);
|
||||||
|
a![
|
||||||
|
C!["has-text-light"],
|
||||||
|
attrs! {
|
||||||
|
At::Href => urls::thread(&tid)
|
||||||
|
},
|
||||||
|
div![
|
||||||
|
C!["row"],
|
||||||
|
div![C!["subject"], &r.subject],
|
||||||
|
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||||
|
div![
|
||||||
|
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||||
|
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
});
|
||||||
|
let first = search_results.page * search_results.results_per_page;
|
||||||
|
div![
|
||||||
|
C!["search-results"],
|
||||||
|
search_pager(first, summaries.len(), search_results.total),
|
||||||
|
rows,
|
||||||
|
search_pager(first, summaries.len(), search_results.total)
|
||||||
|
]
|
||||||
|
}
|
||||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||||
let children = &thread.1;
|
let children = &thread.1;
|
||||||
|
|||||||
@ -5,26 +5,25 @@ use crate::{
|
|||||||
graphql::front_page_query::*,
|
graphql::front_page_query::*,
|
||||||
state::{Context, Model, Msg},
|
state::{Context, Model, Msg},
|
||||||
view::{
|
view::{
|
||||||
human_age,
|
self, human_age, legacy, pretty_authors, set_title, tags_chiclet, view_header,
|
||||||
legacy::{view_search_pager_legacy, view_thread_legacy},
|
view_search_pager,
|
||||||
pretty_authors, set_title, tags_chiclet, view_header, view_search_pager, view_thread,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
Context::Thread(thread_set) => legacy::thread(thread_set),
|
||||||
Context::ThreadResult(thread) => view_thread(thread),
|
Context::ThreadResult(thread) => view::thread(thread),
|
||||||
Context::Search(search_results) => {
|
Context::Search(search_results) => {
|
||||||
view_mobile_search_results_legacy(&model.query, search_results)
|
legacy::mobile_search_results(&model.query, search_results)
|
||||||
}
|
}
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
count,
|
count,
|
||||||
pager,
|
pager,
|
||||||
} => view_mobile_search_results(&query, results.as_slice(), *count, pager),
|
} => search_results(&query, results.as_slice(), *count, pager),
|
||||||
};
|
};
|
||||||
div![
|
div![
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
@ -33,7 +32,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn view_mobile_search_results(
|
fn search_results(
|
||||||
query: &str,
|
query: &str,
|
||||||
results: &[FrontPageQuerySearchNodes],
|
results: &[FrontPageQuerySearchNodes],
|
||||||
count: usize,
|
count: usize,
|
||||||
@ -70,54 +69,3 @@ fn view_mobile_search_results(
|
|||||||
view_search_pager(count, pager),
|
view_search_pager(count, pager),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn view_mobile_search_results_legacy(
|
|
||||||
query: &str,
|
|
||||||
search_results: &shared::SearchResult,
|
|
||||||
) -> Node<Msg> {
|
|
||||||
if query.is_empty() {
|
|
||||||
set_title("all mail");
|
|
||||||
} else {
|
|
||||||
set_title(query);
|
|
||||||
}
|
|
||||||
let summaries = &search_results.summary.0;
|
|
||||||
let rows = summaries.iter().map(|r| {
|
|
||||||
/*
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
tr![
|
|
||||||
td![
|
|
||||||
C!["from"],
|
|
||||||
pretty_authors(&r.authors),
|
|
||||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
||||||
],
|
|
||||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
|
||||||
td![C!["date"], &r.date_relative],
|
|
||||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
|
||||||
]
|
|
||||||
*/
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
let datetime = human_age(r.timestamp as i64);
|
|
||||||
a![
|
|
||||||
C!["has-text-light"],
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::thread(&tid)
|
|
||||||
},
|
|
||||||
div![
|
|
||||||
C!["row"],
|
|
||||||
div![C!["subject"], &r.subject],
|
|
||||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
|
||||||
div![
|
|
||||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
|
||||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
});
|
|
||||||
let first = search_results.page * search_results.results_per_page;
|
|
||||||
div![
|
|
||||||
C!["search-results"],
|
|
||||||
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
|
||||||
rows,
|
|
||||||
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|||||||
@ -19,7 +19,9 @@ use crate::{
|
|||||||
mod desktop;
|
mod desktop;
|
||||||
mod legacy;
|
mod legacy;
|
||||||
mod mobile;
|
mod mobile;
|
||||||
|
mod tablet;
|
||||||
|
|
||||||
|
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
||||||
fn set_title(title: &str) {
|
fn set_title(title: &str) {
|
||||||
seed::document().set_title(&format!("lb: {}", title));
|
seed::document().set_title(&format!("lb: {}", title));
|
||||||
}
|
}
|
||||||
@ -249,19 +251,31 @@ fn view_addresses(addrs: &[impl Email]) -> Vec<Node<Msg>> {
|
|||||||
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
fn raw_text_message(contents: &str) -> Node<Msg> {
|
||||||
|
let (contents, truncated_msg) = if contents.len() > MAX_RAW_MESSAGE_SIZE {
|
||||||
|
(
|
||||||
|
&contents[..MAX_RAW_MESSAGE_SIZE],
|
||||||
|
Some(div!["... contents truncated"]),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(contents, None)
|
||||||
|
};
|
||||||
|
div![C!["view-part-text-plain"], contents, truncated_msg,]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||||
set_title(&thread.subject);
|
set_title(&thread.subject);
|
||||||
let messages = thread.messages.iter().map(|msg| {
|
let messages = thread.messages.iter().map(|msg| {
|
||||||
div![
|
div![
|
||||||
C!["message"],
|
C!["message"],
|
||||||
/* TODO(wathiede): collect all the tags and show them here. */
|
/* TODO(wathiede): collect all the tags and show them here. */
|
||||||
/* TODO(wathiede): collect all the attachments from all the subparts */
|
|
||||||
msg.from
|
msg.from
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
||||||
msg.timestamp
|
msg.timestamp
|
||||||
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
||||||
|
div!["Message-ID: ", &msg.id],
|
||||||
div![
|
div![
|
||||||
C!["header"],
|
C!["header"],
|
||||||
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
||||||
@ -278,7 +292,7 @@ fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|||||||
contents,
|
contents,
|
||||||
content_tree,
|
content_tree,
|
||||||
},
|
},
|
||||||
) => div![C!["view-part-text-plain"], contents, pre![content_tree]],
|
) => div![raw_text_message(&contents), pre![content_tree]],
|
||||||
ShowThreadQueryThreadMessagesBody::Html(
|
ShowThreadQueryThreadMessagesBody::Html(
|
||||||
ShowThreadQueryThreadMessagesBodyOnHtml {
|
ShowThreadQueryThreadMessagesBodyOnHtml {
|
||||||
contents,
|
contents,
|
||||||
@ -287,6 +301,15 @@ fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|||||||
) => div![
|
) => div![
|
||||||
C!["view-part-text-html"],
|
C!["view-part-text-html"],
|
||||||
raw![contents],
|
raw![contents],
|
||||||
|
IF!(!msg.attachments.is_empty() =>
|
||||||
|
div![
|
||||||
|
C!["attachments"],
|
||||||
|
br![],
|
||||||
|
h2!["Attachments"],
|
||||||
|
msg.attachments
|
||||||
|
.iter()
|
||||||
|
.map(|a| div!["Filename: ", &a.filename, " ", &a.content_type])
|
||||||
|
]),
|
||||||
pre![content_tree]
|
pre![content_tree]
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@ -395,26 +418,28 @@ fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
|||||||
|
|
||||||
// `view` describes what to display.
|
// `view` describes what to display.
|
||||||
pub fn view(model: &Model) -> Node<Msg> {
|
pub fn view(model: &Model) -> Node<Msg> {
|
||||||
info!("refreshing {:?}", model.refreshing_state);
|
|
||||||
let is_mobile = seed::window()
|
|
||||||
.match_media("(max-width: 768px)")
|
|
||||||
.expect("failed media query")
|
|
||||||
.map(|mql| mql.matches())
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
info!("refreshing {:?}", model.refreshing_state);
|
||||||
|
let win = seed::window();
|
||||||
|
let w = win
|
||||||
|
.inner_width()
|
||||||
|
.expect("window width")
|
||||||
|
.as_f64()
|
||||||
|
.expect("window width f64");
|
||||||
|
let h = win
|
||||||
|
.inner_height()
|
||||||
|
.expect("window height")
|
||||||
|
.as_f64()
|
||||||
|
.expect("window height f64");
|
||||||
|
info!("win: {w}x{h}");
|
||||||
|
|
||||||
info!("view called");
|
info!("view called");
|
||||||
div![
|
div![
|
||||||
if is_mobile {
|
match w {
|
||||||
C!["mobile"]
|
w if w < 800. => div![C!["mobile"], mobile::view(model)],
|
||||||
} else {
|
w if w < 1024. => div![C!["tablet"], tablet::view(model)],
|
||||||
C!["desktop"]
|
_ => div![C!["desktop"], desktop::view(model)],
|
||||||
},
|
},
|
||||||
if is_mobile {
|
view_footer(start.elapsed().as_millis()),
|
||||||
mobile::view(model)
|
|
||||||
} else {
|
|
||||||
desktop::view(model)
|
|
||||||
},
|
|
||||||
view_footer(start.elapsed().as_millis())
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
30
web/src/view/tablet.rs
Normal file
30
web/src/view/tablet.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use seed::{prelude::*, *};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
state::{Context, Model, Msg},
|
||||||
|
view::{self, view_header, view_search_results},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||||
|
let content = match &model.context {
|
||||||
|
Context::None => div![h1!["Loading"]],
|
||||||
|
Context::Thread(_) => unimplemented!("tablet legacy thread view"),
|
||||||
|
Context::ThreadResult(thread) => view::thread(thread),
|
||||||
|
Context::Search(_) => unimplemented!("tablet legacy search results view"),
|
||||||
|
Context::SearchResult {
|
||||||
|
query,
|
||||||
|
results,
|
||||||
|
count,
|
||||||
|
pager,
|
||||||
|
} => view_search_results(&query, results.as_slice(), *count, pager),
|
||||||
|
};
|
||||||
|
div![
|
||||||
|
C!["main-content"],
|
||||||
|
div![
|
||||||
|
view_header(&model.query, &model.refreshing_state),
|
||||||
|
content,
|
||||||
|
view_header(&model.query, &model.refreshing_state),
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user