Compare commits
No commits in common. "94f7ad109a67a1fb119215c3287aded012e4f19a" and "28d55624918d939b11b2a851821ddaf3d1a974c4" have entirely different histories.
94f7ad109a
...
28d5562491
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -3113,6 +3113,7 @@ dependencies = [
|
|||||||
"mailparse",
|
"mailparse",
|
||||||
"memmap",
|
"memmap",
|
||||||
"notmuch",
|
"notmuch",
|
||||||
|
"rayon",
|
||||||
"rocket 0.5.0",
|
"rocket 0.5.0",
|
||||||
"rocket_contrib",
|
"rocket_contrib",
|
||||||
"rocket_cors",
|
"rocket_cors",
|
||||||
|
|||||||
@ -208,9 +208,9 @@
|
|||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
io::{self},
|
io::{self, BufRead, BufReader, Lines},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::Command,
|
process::{Child, ChildStdout, Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::info;
|
use log::info;
|
||||||
@ -518,7 +518,7 @@ impl Notmuch {
|
|||||||
"--format=json",
|
"--format=json",
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@ -537,7 +537,7 @@ impl Notmuch {
|
|||||||
&format!("--part={}", part),
|
&format!("--part={}", part),
|
||||||
query,
|
query,
|
||||||
])?;
|
])?;
|
||||||
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here and
|
// Notmuch returns JSON with invalid unicode. So we lossy convert it to a string here an
|
||||||
// use that for parsing in rust.
|
// use that for parsing in rust.
|
||||||
let s = String::from_utf8_lossy(&slice);
|
let s = String::from_utf8_lossy(&slice);
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
let mut deserializer = serde_json::Deserializer::from_str(&s);
|
||||||
@ -556,14 +556,14 @@ impl Notmuch {
|
|||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_ids(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
pub fn message_ids(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
||||||
let res = self.run_notmuch(["search", "--output=messages", "--format=json", query])?;
|
let mut child = self.run_notmuch_pipe(["search", "--output=messages", query])?;
|
||||||
Ok(serde_json::from_slice(&res)?)
|
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn files(&self, query: &str) -> Result<Vec<String>, NotmuchError> {
|
pub fn files(&self, query: &str) -> Result<Lines<BufReader<ChildStdout>>, NotmuchError> {
|
||||||
let res = self.run_notmuch(["search", "--output=files", "--format=json", query])?;
|
let mut child = self.run_notmuch_pipe(["search", "--output=files", query])?;
|
||||||
Ok(serde_json::from_slice(&res)?)
|
Ok(BufReader::new(child.stdout.take().unwrap()).lines())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
fn run_notmuch<I, S>(&self, args: I) -> Result<Vec<u8>, NotmuchError>
|
||||||
@ -580,6 +580,21 @@ impl Notmuch {
|
|||||||
let out = cmd.output()?;
|
let out = cmd.output()?;
|
||||||
Ok(out.stdout)
|
Ok(out.stdout)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_notmuch_pipe<I, S>(&self, args: I) -> Result<Child, NotmuchError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = S>,
|
||||||
|
S: AsRef<OsStr>,
|
||||||
|
{
|
||||||
|
let mut cmd = Command::new("notmuch");
|
||||||
|
if let Some(config_path) = &self.config_path {
|
||||||
|
cmd.arg("--config").arg(config_path);
|
||||||
|
}
|
||||||
|
cmd.args(args);
|
||||||
|
info!("{:?}", &cmd);
|
||||||
|
let child = cmd.stdout(Stdio::piped()).spawn()?;
|
||||||
|
Ok(child)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@ -20,6 +20,7 @@ urlencoding = "2.1.3"
|
|||||||
async-graphql = { version = "6.0.11", features = ["log"] }
|
async-graphql = { version = "6.0.11", features = ["log"] }
|
||||||
async-graphql-rocket = "6.0.11"
|
async-graphql-rocket = "6.0.11"
|
||||||
rocket_cors = "0.6.0"
|
rocket_cors = "0.6.0"
|
||||||
|
rayon = "1.8.0"
|
||||||
memmap = "0.7.0"
|
memmap = "0.7.0"
|
||||||
mailparse = "0.14.0"
|
mailparse = "0.14.0"
|
||||||
ammonia = "3.3.0"
|
ammonia = "3.3.0"
|
||||||
|
|||||||
@ -89,22 +89,6 @@ impl<'r, 'o: 'r> Responder<'r, 'o> for PartResponder {
|
|||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[get("/attachment/<id>/<idx>")]
|
|
||||||
async fn attachment(
|
|
||||||
nm: &State<Notmuch>,
|
|
||||||
id: &str,
|
|
||||||
idx: usize,
|
|
||||||
) -> Result<PartResponder, Debug<NotmuchError>> {
|
|
||||||
let mid = if id.starts_with("id:") {
|
|
||||||
id.to_string()
|
|
||||||
} else {
|
|
||||||
format!("id:{}", id)
|
|
||||||
};
|
|
||||||
let bytes = Vec::new();
|
|
||||||
let filename = None;
|
|
||||||
// TODO(wathiede): use walk_attachments from graphql to fill this out
|
|
||||||
Ok(PartResponder { bytes, filename })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/original/<id>/part/<part>")]
|
#[get("/original/<id>/part/<part>")]
|
||||||
async fn original_part(
|
async fn original_part(
|
||||||
|
|||||||
@ -1,20 +1,18 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
|
||||||
fs::File,
|
fs::File,
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
hash::{DefaultHasher, Hash, Hasher},
|
||||||
str::FromStr,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use async_graphql::{
|
use async_graphql::{
|
||||||
connection::{self, Connection, Edge},
|
connection::{self, Connection, Edge},
|
||||||
Context, EmptyMutation, EmptySubscription, Enum, Error, FieldResult, Object, Schema,
|
Context, EmptyMutation, EmptySubscription, Error, FieldResult, Object, Schema, SimpleObject,
|
||||||
SimpleObject, Union,
|
Union,
|
||||||
};
|
};
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail};
|
use mailparse::{parse_mail, MailHeaderMap, ParsedMail};
|
||||||
use memmap::MmapOptions;
|
use memmap::MmapOptions;
|
||||||
use notmuch::Notmuch;
|
use notmuch::Notmuch;
|
||||||
use rocket::time::Instant;
|
use rayon::prelude::*;
|
||||||
|
|
||||||
pub struct QueryRoot;
|
pub struct QueryRoot;
|
||||||
|
|
||||||
@ -48,8 +46,6 @@ pub struct Thread {
|
|||||||
|
|
||||||
#[derive(Debug, SimpleObject)]
|
#[derive(Debug, SimpleObject)]
|
||||||
pub struct Message {
|
pub struct Message {
|
||||||
// Message-ID for message, prepend `id:<id>` to search in notmuch
|
|
||||||
pub id: String,
|
|
||||||
// First From header found in email
|
// First From header found in email
|
||||||
pub from: Option<Email>,
|
pub from: Option<Email>,
|
||||||
// All To headers found in email
|
// All To headers found in email
|
||||||
@ -60,50 +56,10 @@ pub struct Message {
|
|||||||
pub subject: Option<String>,
|
pub subject: Option<String>,
|
||||||
// Parsed Date header, if found and valid
|
// Parsed Date header, if found and valid
|
||||||
pub timestamp: Option<i64>,
|
pub timestamp: Option<i64>,
|
||||||
// Headers
|
|
||||||
pub headers: Vec<Header>,
|
|
||||||
// The body contents
|
// The body contents
|
||||||
pub body: Body,
|
pub body: Body,
|
||||||
// On disk location of message
|
// On disk location of message
|
||||||
pub path: String,
|
pub path: String,
|
||||||
pub attachments: Vec<Attachment>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Content-Type: image/jpeg; name="PXL_20231125_204826860.jpg"
|
|
||||||
// Content-Disposition: attachment; filename="PXL_20231125_204826860.jpg"
|
|
||||||
// Content-Transfer-Encoding: base64
|
|
||||||
// Content-ID: <f_lponoluo1>
|
|
||||||
// X-Attachment-Id: f_lponoluo1
|
|
||||||
#[derive(Debug, SimpleObject)]
|
|
||||||
pub struct Attachment {
|
|
||||||
filename: String,
|
|
||||||
content_type: Option<String>,
|
|
||||||
content_id: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
|
||||||
enum DispositionType {
|
|
||||||
Inline,
|
|
||||||
Attachment,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for DispositionType {
|
|
||||||
type Err = String;
|
|
||||||
|
|
||||||
// Required method
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(match s {
|
|
||||||
"inline" => DispositionType::Inline,
|
|
||||||
"attachment" => DispositionType::Attachment,
|
|
||||||
c => return Err(format!("unknown disposition type: {c}")),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, SimpleObject)]
|
|
||||||
pub struct Header {
|
|
||||||
key: String,
|
|
||||||
value: String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -148,9 +104,6 @@ impl Html {
|
|||||||
async fn content_tree(&self) -> &str {
|
async fn content_tree(&self) -> &str {
|
||||||
&self.content_tree
|
&self.content_tree
|
||||||
}
|
}
|
||||||
async fn headers(&self) -> Vec<Header> {
|
|
||||||
Vec::new()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Union)]
|
#[derive(Debug, Union)]
|
||||||
@ -261,32 +214,15 @@ impl QueryRoot {
|
|||||||
|
|
||||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||||
let nm = ctx.data_unchecked::<Notmuch>();
|
let nm = ctx.data_unchecked::<Notmuch>();
|
||||||
let now = Instant::now();
|
Ok(nm
|
||||||
let needs_unread = ctx.look_ahead().field("unread").exists();
|
|
||||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
|
||||||
// 10000 is an arbitrary number, if there's more than 10k unread messages, we'll
|
|
||||||
// get an inaccurate count.
|
|
||||||
nm.search("is:unread", 0, 10000)?
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.fold(HashMap::new(), |mut m, ts| {
|
|
||||||
ts.tags.iter().for_each(|t| {
|
|
||||||
m.entry(t.clone()).and_modify(|c| *c += 1).or_insert(1);
|
|
||||||
});
|
|
||||||
m
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
HashMap::new()
|
|
||||||
};
|
|
||||||
let tags = nm
|
|
||||||
.tags()?
|
.tags()?
|
||||||
.into_iter()
|
.into_par_iter()
|
||||||
.map(|tag| {
|
.map(|tag| {
|
||||||
let mut hasher = DefaultHasher::new();
|
let mut hasher = DefaultHasher::new();
|
||||||
tag.hash(&mut hasher);
|
tag.hash(&mut hasher);
|
||||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||||
let unread = if needs_unread {
|
let unread = if ctx.look_ahead().field("unread").exists() {
|
||||||
*unread_msg_cnt.get(&tag).unwrap_or(&0)
|
nm.count(&format!("tag:{tag} is:unread")).unwrap_or(0)
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
@ -297,9 +233,7 @@ impl QueryRoot {
|
|||||||
unread,
|
unread,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect())
|
||||||
info!("Fetching tags took {}", now.elapsed());
|
|
||||||
Ok(tags)
|
|
||||||
}
|
}
|
||||||
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
||||||
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
// TODO(wathiede): normalize all email addresses through an address book with preferred
|
||||||
@ -312,8 +246,8 @@ impl QueryRoot {
|
|||||||
.field("contentTree")
|
.field("contentTree")
|
||||||
.exists();
|
.exists();
|
||||||
let mut messages = Vec::new();
|
let mut messages = Vec::new();
|
||||||
for (path, id) in std::iter::zip(nm.files(&thread_id)?, nm.message_ids(&thread_id)?) {
|
for path in nm.files(&thread_id)? {
|
||||||
info!("{id}\nfile: {path}");
|
let path = path?;
|
||||||
let file = File::open(&path)?;
|
let file = File::open(&path)?;
|
||||||
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
let mmap = unsafe { MmapOptions::new().map(&file)? };
|
||||||
let m = parse_mail(&mmap)?;
|
let m = parse_mail(&mmap)?;
|
||||||
@ -356,27 +290,14 @@ impl QueryRoot {
|
|||||||
}),
|
}),
|
||||||
b => b,
|
b => b,
|
||||||
};
|
};
|
||||||
let headers = m
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|h| Header {
|
|
||||||
key: h.get_key(),
|
|
||||||
value: h.get_value(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
// TODO(wathiede): parse message and fill out attachments
|
|
||||||
let attachments = extract_attachments(&m)?;
|
|
||||||
messages.push(Message {
|
messages.push(Message {
|
||||||
id,
|
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
cc,
|
cc,
|
||||||
subject,
|
subject,
|
||||||
timestamp,
|
timestamp,
|
||||||
headers,
|
|
||||||
body,
|
body,
|
||||||
path,
|
path,
|
||||||
attachments,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
messages.reverse();
|
messages.reverse();
|
||||||
@ -418,10 +339,6 @@ fn extract_unhandled(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
text: msg,
|
text: msg,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
// multipart/alternative defines multiple representations of the same message, and clients should
|
|
||||||
// show the fanciest they can display. For this program, the priority is text/html, text/plain,
|
|
||||||
// then give up.
|
|
||||||
fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
if sp.ctype.mimetype == "text/html" {
|
if sp.ctype.mimetype == "text/html" {
|
||||||
@ -438,8 +355,6 @@ fn extract_alternative(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
Err("extract_alternative".into())
|
Err("extract_alternative".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
// multipart/mixed defines multiple types of context all of which should be presented to the user
|
|
||||||
// 'serially'.
|
|
||||||
fn extract_mixed(m: &ParsedMail) -> Result<Body, Error> {
|
fn extract_mixed(m: &ParsedMail) -> Result<Body, Error> {
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
if sp.ctype.mimetype == "multipart/alternative" {
|
if sp.ctype.mimetype == "multipart/alternative" {
|
||||||
@ -479,92 +394,21 @@ fn extract_related(m: &ParsedMail) -> Result<Body, Error> {
|
|||||||
Err("extract_related".into())
|
Err("extract_related".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(wathiede): make this walk_attachments that takes a closure.
|
|
||||||
// Then implement one closure for building `Attachment` and imlement another that can be used to
|
|
||||||
// get the bytes for serving attachments of HTTP
|
|
||||||
fn extract_attachments(m: &ParsedMail) -> Result<Vec<Attachment>, Error> {
|
|
||||||
let mut attachements = Vec::new();
|
|
||||||
for sp in &m.subparts {
|
|
||||||
for h in &sp.headers {
|
|
||||||
if h.get_key() == "Content-Disposition" {
|
|
||||||
let v = h.get_value();
|
|
||||||
if let Some(idx) = v.find(";") {
|
|
||||||
let dt = &v[..idx];
|
|
||||||
match DispositionType::from_str(dt) {
|
|
||||||
Ok(DispositionType::Attachment) => {
|
|
||||||
attachements.push(Attachment {
|
|
||||||
filename: get_attachment_filename(&v).to_string(),
|
|
||||||
content_type: get_content_type(&sp.headers),
|
|
||||||
content_id: get_content_id(&sp.headers),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Ok(DispositionType::Inline) => continue,
|
|
||||||
Err(e) => {
|
|
||||||
warn!("failed to parse Content-Disposition type '{}'", e);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
warn!("header has Content-Disposition missing ';'");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(attachements)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_attachment_filename(header_value: &str) -> &str {
|
|
||||||
// Strip last "
|
|
||||||
let v = &header_value[..header_value.len() - 1];
|
|
||||||
if let Some(idx) = v.rfind('"') {
|
|
||||||
&v[idx + 1..]
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_content_type<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
|
||||||
for h in headers {
|
|
||||||
if h.get_key() == "Content-Type" {
|
|
||||||
let v = h.get_value();
|
|
||||||
if let Some(idx) = v.find(';') {
|
|
||||||
return Some(v[..idx].to_string());
|
|
||||||
} else {
|
|
||||||
return Some(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_content_id<'a>(headers: &[MailHeader<'a>]) -> Option<String> {
|
|
||||||
for h in headers {
|
|
||||||
if h.get_key() == "Content-ID" {
|
|
||||||
return Some(h.get_value());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render_content_type_tree(m: &ParsedMail) -> String {
|
fn render_content_type_tree(m: &ParsedMail) -> String {
|
||||||
const WIDTH: usize = 4;
|
const WIDTH: usize = 4;
|
||||||
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
fn render_rec(m: &ParsedMail, depth: usize) -> String {
|
||||||
let mut parts = Vec::new();
|
let mut parts = Vec::new();
|
||||||
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
let msg = format!("{} {}", "-".repeat(depth * WIDTH), m.ctype.mimetype);
|
||||||
parts.push(msg);
|
parts.push(msg);
|
||||||
let indent = " ".repeat(depth * WIDTH);
|
|
||||||
if !m.ctype.charset.is_empty() {
|
if !m.ctype.charset.is_empty() {
|
||||||
parts.push(format!("{indent} Character Set: {}", m.ctype.charset));
|
parts.push(format!(
|
||||||
|
"{} Character Set: {}",
|
||||||
|
" ".repeat(depth * WIDTH),
|
||||||
|
m.ctype.charset
|
||||||
|
));
|
||||||
}
|
}
|
||||||
for (k, v) in m.ctype.params.iter() {
|
for (k, v) in m.ctype.params.iter() {
|
||||||
parts.push(format!("{indent} {k}: {v}"));
|
parts.push(format!("{} {k}: {v}", " ".repeat(depth * WIDTH),));
|
||||||
}
|
|
||||||
if !m.headers.is_empty() {
|
|
||||||
parts.push(format!("{indent} == headers =="));
|
|
||||||
for h in &m.headers {
|
|
||||||
parts.push(format!("{indent} {}: {}", h.get_key(), h.get_value()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
for sp in &m.subparts {
|
for sp in &m.subparts {
|
||||||
parts.push(render_rec(sp, depth + 1))
|
parts.push(render_rec(sp, depth + 1))
|
||||||
|
|||||||
@ -3,5 +3,4 @@
|
|||||||
# Build in release mode and push to minio for serving.
|
# Build in release mode and push to minio for serving.
|
||||||
all:
|
all:
|
||||||
trunk build --release
|
trunk build --release
|
||||||
mc mirror m/letterbox/ /tmp/letterbox-$(shell date +%s)
|
|
||||||
mc mirror --overwrite --remove dist/ m/letterbox/
|
mc mirror --overwrite --remove dist/ m/letterbox/
|
||||||
|
|||||||
@ -10,8 +10,6 @@ port = 6758
|
|||||||
backend = "http://localhost:9345/"
|
backend = "http://localhost:9345/"
|
||||||
rewrite= "/api/"
|
rewrite= "/api/"
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
backend="http://localhost:9345/original"
|
|
||||||
[[proxy]]
|
|
||||||
backend="http://localhost:9345/graphiql"
|
backend="http://localhost:9345/graphiql"
|
||||||
[[proxy]]
|
[[proxy]]
|
||||||
backend="http://localhost:9345/graphql"
|
backend="http://localhost:9345/graphql"
|
||||||
|
|||||||
@ -59,57 +59,6 @@
|
|||||||
},
|
},
|
||||||
"subscriptionType": null,
|
"subscriptionType": null,
|
||||||
"types": [
|
"types": [
|
||||||
{
|
|
||||||
"description": null,
|
|
||||||
"enumValues": null,
|
|
||||||
"fields": [
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "filename",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "contentType",
|
|
||||||
"type": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "contentId",
|
|
||||||
"type": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"inputFields": null,
|
|
||||||
"interfaces": [],
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Attachment",
|
|
||||||
"possibleTypes": null
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@ -191,49 +140,6 @@
|
|||||||
"name": "Float",
|
"name": "Float",
|
||||||
"possibleTypes": null
|
"possibleTypes": null
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"description": null,
|
|
||||||
"enumValues": null,
|
|
||||||
"fields": [
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "key",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "value",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"inputFields": null,
|
|
||||||
"interfaces": [],
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Header",
|
|
||||||
"possibleTypes": null
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
@ -269,30 +175,6 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "headers",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "LIST",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Header",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
@ -325,22 +207,6 @@
|
|||||||
"description": null,
|
"description": null,
|
||||||
"enumValues": null,
|
"enumValues": null,
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "id",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "SCALAR",
|
|
||||||
"name": "String",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"deprecationReason": null,
|
"deprecationReason": null,
|
||||||
@ -425,30 +291,6 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "headers",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "LIST",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Header",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"args": [],
|
"args": [],
|
||||||
"deprecationReason": null,
|
"deprecationReason": null,
|
||||||
@ -480,30 +322,6 @@
|
|||||||
"ofType": null
|
"ofType": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"args": [],
|
|
||||||
"deprecationReason": null,
|
|
||||||
"description": null,
|
|
||||||
"isDeprecated": false,
|
|
||||||
"name": "attachments",
|
|
||||||
"type": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "LIST",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "NON_NULL",
|
|
||||||
"name": null,
|
|
||||||
"ofType": {
|
|
||||||
"kind": "OBJECT",
|
|
||||||
"name": "Attachment",
|
|
||||||
"ofType": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"inputFields": null,
|
"inputFields": null,
|
||||||
|
|||||||
@ -2,7 +2,6 @@ query ShowThreadQuery($threadId: String!) {
|
|||||||
thread(threadId: $threadId) {
|
thread(threadId: $threadId) {
|
||||||
subject
|
subject
|
||||||
messages {
|
messages {
|
||||||
id
|
|
||||||
subject
|
subject
|
||||||
from {
|
from {
|
||||||
name
|
name
|
||||||
@ -32,11 +31,6 @@ query ShowThreadQuery($threadId: String!) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
path
|
path
|
||||||
attachments {
|
|
||||||
filename
|
|
||||||
contentType
|
|
||||||
contentId
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tags {
|
tags {
|
||||||
|
|||||||
@ -133,7 +133,7 @@ blockquote[type="cite"],
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
.desktop .main-content {
|
.desktop-main-content {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: 12rem 1fr;
|
grid-template-columns: 12rem 1fr;
|
||||||
}
|
}
|
||||||
@ -153,11 +153,6 @@ blockquote[type="cite"],
|
|||||||
.navbar {
|
.navbar {
|
||||||
border: none;
|
border: none;
|
||||||
}
|
}
|
||||||
.desktop nav.pagination,
|
|
||||||
.tablet nav.pagination {
|
|
||||||
margin-left: .5em;
|
|
||||||
margin-bottom: 0 !important;
|
|
||||||
}
|
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use graphql_client::GraphQLQuery;
|
use graphql_client::GraphQLQuery;
|
||||||
use log::{debug, error, info};
|
use log::{debug, info};
|
||||||
use notmuch::ThreadSet;
|
use notmuch::ThreadSet;
|
||||||
use seed::{prelude::*, *};
|
use seed::{prelude::*, *};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
@ -19,7 +19,6 @@ pub fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
|
|||||||
} else {
|
} else {
|
||||||
orders.notify(subs::UrlRequested::new(url));
|
orders.notify(subs::UrlRequested::new(url));
|
||||||
};
|
};
|
||||||
orders.stream(streams::window_event(Ev::Resize, |_| Msg::OnResize));
|
|
||||||
orders.subscribe(on_url_changed);
|
orders.subscribe(on_url_changed);
|
||||||
|
|
||||||
Model {
|
Model {
|
||||||
@ -128,7 +127,6 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
Msg::Reload => {
|
Msg::Reload => {
|
||||||
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
orders.perform_cmd(async move { on_url_changed(subs::UrlChanged(Url::current())) });
|
||||||
}
|
}
|
||||||
Msg::OnResize => (),
|
|
||||||
|
|
||||||
Msg::SearchRequest {
|
Msg::SearchRequest {
|
||||||
query,
|
query,
|
||||||
@ -302,7 +300,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
|
|||||||
model.context = Context::ThreadResult(data.thread);
|
model.context = Context::ThreadResult(data.thread);
|
||||||
}
|
}
|
||||||
Msg::ShowThreadResult(bad) => {
|
Msg::ShowThreadResult(bad) => {
|
||||||
error!("show_thread_query error: {bad:#?}");
|
error!("show_thread_query error: {bad:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -359,8 +357,6 @@ pub enum Msg {
|
|||||||
Noop,
|
Noop,
|
||||||
// Tell the client to refresh its state
|
// Tell the client to refresh its state
|
||||||
Reload,
|
Reload,
|
||||||
// Window has changed size
|
|
||||||
OnResize,
|
|
||||||
// Tell the server to update state
|
// Tell the server to update state
|
||||||
RefreshStart,
|
RefreshStart,
|
||||||
RefreshDone(Option<FetchError>),
|
RefreshDone(Option<FetchError>),
|
||||||
|
|||||||
@ -5,7 +5,10 @@ use seed_hooks::{state_access::CloneState, topo, use_state};
|
|||||||
use crate::{
|
use crate::{
|
||||||
api::urls,
|
api::urls,
|
||||||
state::{Context, Model, Msg, Tag},
|
state::{Context, Model, Msg, Tag},
|
||||||
view::{self, legacy, view_header, view_search_results},
|
view::{
|
||||||
|
legacy::{view_search_results_legacy, view_thread_legacy},
|
||||||
|
view_header, view_search_results, view_thread,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[topo::nested]
|
#[topo::nested]
|
||||||
@ -13,9 +16,9 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::Thread(thread_set) => legacy::thread(thread_set),
|
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||||
Context::ThreadResult(thread) => view::thread(thread),
|
Context::ThreadResult(thread) => view_thread(thread),
|
||||||
Context::Search(search_results) => legacy::search_results(&model.query, search_results),
|
Context::Search(search_results) => view_search_results_legacy(&model.query, search_results),
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
@ -86,7 +89,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
let tags_open = use_state(|| false);
|
let tags_open = use_state(|| false);
|
||||||
let force_tags_open = unread.is_empty();
|
let force_tags_open = unread.is_empty();
|
||||||
div![
|
div![
|
||||||
C!["main-content"],
|
C!["desktop-main-content"],
|
||||||
aside![
|
aside![
|
||||||
C!["tags-menu", "menu"],
|
C!["tags-menu", "menu"],
|
||||||
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
IF!(!unread.is_empty() => p![C!["menu-label"], "Unread"]),
|
||||||
|
|||||||
@ -9,7 +9,10 @@ use crate::{
|
|||||||
view::{human_age, pretty_authors, set_title, tags_chiclet},
|
view::{human_age, pretty_authors, set_title, tags_chiclet},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn search_results(query: &str, search_results: &shared::SearchResult) -> Node<Msg> {
|
pub(super) fn view_search_results_legacy(
|
||||||
|
query: &str,
|
||||||
|
search_results: &shared::SearchResult,
|
||||||
|
) -> Node<Msg> {
|
||||||
if query.is_empty() {
|
if query.is_empty() {
|
||||||
set_title("all mail");
|
set_title("all mail");
|
||||||
} else {
|
} else {
|
||||||
@ -42,7 +45,7 @@ pub(super) fn search_results(query: &str, search_results: &shared::SearchResult)
|
|||||||
});
|
});
|
||||||
let first = search_results.page * search_results.results_per_page;
|
let first = search_results.page * search_results.results_per_page;
|
||||||
div![
|
div![
|
||||||
search_pager(first, summaries.len(), search_results.total),
|
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||||
table![
|
table![
|
||||||
C![
|
C![
|
||||||
"table",
|
"table",
|
||||||
@ -59,11 +62,11 @@ pub(super) fn search_results(query: &str, search_results: &shared::SearchResult)
|
|||||||
]],
|
]],
|
||||||
tbody![rows]
|
tbody![rows]
|
||||||
],
|
],
|
||||||
search_pager(first, summaries.len(), search_results.total)
|
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
pub(super) fn view_search_pager_legacy(start: usize, count: usize, total: usize) -> Node<Msg> {
|
||||||
let is_first = start <= 0;
|
let is_first = start <= 0;
|
||||||
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
let is_last = (start + SEARCH_RESULTS_PER_PAGE) >= total;
|
||||||
nav![
|
nav![
|
||||||
@ -87,7 +90,7 @@ fn search_pager(start: usize, count: usize, total: usize) -> Node<Msg> {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn thread(thread_set: &ThreadSet) -> Node<Msg> {
|
pub(super) fn view_thread_legacy(thread_set: &ThreadSet) -> Node<Msg> {
|
||||||
assert_eq!(thread_set.0.len(), 1);
|
assert_eq!(thread_set.0.len(), 1);
|
||||||
let thread = &thread_set.0[0];
|
let thread = &thread_set.0[0];
|
||||||
assert_eq!(thread.0.len(), 1);
|
assert_eq!(thread.0.len(), 1);
|
||||||
@ -104,56 +107,6 @@ pub(super) fn thread(thread_set: &ThreadSet) -> Node<Msg> {
|
|||||||
],
|
],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
pub(super) fn mobile_search_results(
|
|
||||||
query: &str,
|
|
||||||
search_results: &shared::SearchResult,
|
|
||||||
) -> Node<Msg> {
|
|
||||||
if query.is_empty() {
|
|
||||||
set_title("all mail");
|
|
||||||
} else {
|
|
||||||
set_title(query);
|
|
||||||
}
|
|
||||||
let summaries = &search_results.summary.0;
|
|
||||||
let rows = summaries.iter().map(|r| {
|
|
||||||
/*
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
tr![
|
|
||||||
td![
|
|
||||||
C!["from"],
|
|
||||||
pretty_authors(&r.authors),
|
|
||||||
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
|
||||||
],
|
|
||||||
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
|
||||||
td![C!["date"], &r.date_relative],
|
|
||||||
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
|
||||||
]
|
|
||||||
*/
|
|
||||||
let tid = r.thread.clone();
|
|
||||||
let datetime = human_age(r.timestamp as i64);
|
|
||||||
a![
|
|
||||||
C!["has-text-light"],
|
|
||||||
attrs! {
|
|
||||||
At::Href => urls::thread(&tid)
|
|
||||||
},
|
|
||||||
div![
|
|
||||||
C!["row"],
|
|
||||||
div![C!["subject"], &r.subject],
|
|
||||||
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
|
||||||
div![
|
|
||||||
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
|
||||||
span![C!["is-size-7", "float-right", "date"], datetime]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
});
|
|
||||||
let first = search_results.page * search_results.results_per_page;
|
|
||||||
div![
|
|
||||||
C!["search-results"],
|
|
||||||
search_pager(first, summaries.len(), search_results.total),
|
|
||||||
rows,
|
|
||||||
search_pager(first, summaries.len(), search_results.total)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
fn view_message(thread: &ThreadNode) -> Node<Msg> {
|
||||||
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
let message = thread.0.as_ref().expect("ThreadNode missing Message");
|
||||||
let children = &thread.1;
|
let children = &thread.1;
|
||||||
|
|||||||
@ -5,25 +5,26 @@ use crate::{
|
|||||||
graphql::front_page_query::*,
|
graphql::front_page_query::*,
|
||||||
state::{Context, Model, Msg},
|
state::{Context, Model, Msg},
|
||||||
view::{
|
view::{
|
||||||
self, human_age, legacy, pretty_authors, set_title, tags_chiclet, view_header,
|
human_age,
|
||||||
view_search_pager,
|
legacy::{view_search_pager_legacy, view_thread_legacy},
|
||||||
|
pretty_authors, set_title, tags_chiclet, view_header, view_search_pager, view_thread,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
pub(super) fn view(model: &Model) -> Node<Msg> {
|
||||||
let content = match &model.context {
|
let content = match &model.context {
|
||||||
Context::None => div![h1!["Loading"]],
|
Context::None => div![h1!["Loading"]],
|
||||||
Context::Thread(thread_set) => legacy::thread(thread_set),
|
Context::Thread(thread_set) => view_thread_legacy(thread_set),
|
||||||
Context::ThreadResult(thread) => view::thread(thread),
|
Context::ThreadResult(thread) => view_thread(thread),
|
||||||
Context::Search(search_results) => {
|
Context::Search(search_results) => {
|
||||||
legacy::mobile_search_results(&model.query, search_results)
|
view_mobile_search_results_legacy(&model.query, search_results)
|
||||||
}
|
}
|
||||||
Context::SearchResult {
|
Context::SearchResult {
|
||||||
query,
|
query,
|
||||||
results,
|
results,
|
||||||
count,
|
count,
|
||||||
pager,
|
pager,
|
||||||
} => search_results(&query, results.as_slice(), *count, pager),
|
} => view_mobile_search_results(&query, results.as_slice(), *count, pager),
|
||||||
};
|
};
|
||||||
div![
|
div![
|
||||||
view_header(&model.query, &model.refreshing_state),
|
view_header(&model.query, &model.refreshing_state),
|
||||||
@ -32,7 +33,7 @@ pub(super) fn view(model: &Model) -> Node<Msg> {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_results(
|
fn view_mobile_search_results(
|
||||||
query: &str,
|
query: &str,
|
||||||
results: &[FrontPageQuerySearchNodes],
|
results: &[FrontPageQuerySearchNodes],
|
||||||
count: usize,
|
count: usize,
|
||||||
@ -69,3 +70,54 @@ fn search_results(
|
|||||||
view_search_pager(count, pager),
|
view_search_pager(count, pager),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn view_mobile_search_results_legacy(
|
||||||
|
query: &str,
|
||||||
|
search_results: &shared::SearchResult,
|
||||||
|
) -> Node<Msg> {
|
||||||
|
if query.is_empty() {
|
||||||
|
set_title("all mail");
|
||||||
|
} else {
|
||||||
|
set_title(query);
|
||||||
|
}
|
||||||
|
let summaries = &search_results.summary.0;
|
||||||
|
let rows = summaries.iter().map(|r| {
|
||||||
|
/*
|
||||||
|
let tid = r.thread.clone();
|
||||||
|
tr![
|
||||||
|
td![
|
||||||
|
C!["from"],
|
||||||
|
pretty_authors(&r.authors),
|
||||||
|
IF!(r.total>1 => small![" ", r.total.to_string()]),
|
||||||
|
],
|
||||||
|
td![C!["subject"], tags_chiclet(&r.tags), " ", &r.subject],
|
||||||
|
td![C!["date"], &r.date_relative],
|
||||||
|
ev(Ev::Click, move |_| Msg::ShowPrettyRequest(tid)),
|
||||||
|
]
|
||||||
|
*/
|
||||||
|
let tid = r.thread.clone();
|
||||||
|
let datetime = human_age(r.timestamp as i64);
|
||||||
|
a![
|
||||||
|
C!["has-text-light"],
|
||||||
|
attrs! {
|
||||||
|
At::Href => urls::thread(&tid)
|
||||||
|
},
|
||||||
|
div![
|
||||||
|
C!["row"],
|
||||||
|
div![C!["subject"], &r.subject],
|
||||||
|
span![C!["from", "is-size-7"], pretty_authors(&r.authors)],
|
||||||
|
div![
|
||||||
|
span![C!["is-size-7"], tags_chiclet(&r.tags, true)],
|
||||||
|
span![C!["is-size-7", "float-right", "date"], datetime]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
});
|
||||||
|
let first = search_results.page * search_results.results_per_page;
|
||||||
|
div![
|
||||||
|
C!["search-results"],
|
||||||
|
view_search_pager_legacy(first, summaries.len(), search_results.total),
|
||||||
|
rows,
|
||||||
|
view_search_pager_legacy(first, summaries.len(), search_results.total)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
@ -19,9 +19,7 @@ use crate::{
|
|||||||
mod desktop;
|
mod desktop;
|
||||||
mod legacy;
|
mod legacy;
|
||||||
mod mobile;
|
mod mobile;
|
||||||
mod tablet;
|
|
||||||
|
|
||||||
const MAX_RAW_MESSAGE_SIZE: usize = 100_000;
|
|
||||||
fn set_title(title: &str) {
|
fn set_title(title: &str) {
|
||||||
seed::document().set_title(&format!("lb: {}", title));
|
seed::document().set_title(&format!("lb: {}", title));
|
||||||
}
|
}
|
||||||
@ -251,31 +249,19 @@ fn view_addresses(addrs: &[impl Email]) -> Vec<Node<Msg>> {
|
|||||||
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
addrs.into_iter().map(view_address).collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn raw_text_message(contents: &str) -> Node<Msg> {
|
fn view_thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
||||||
let (contents, truncated_msg) = if contents.len() > MAX_RAW_MESSAGE_SIZE {
|
|
||||||
(
|
|
||||||
&contents[..MAX_RAW_MESSAGE_SIZE],
|
|
||||||
Some(div!["... contents truncated"]),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(contents, None)
|
|
||||||
};
|
|
||||||
div![C!["view-part-text-plain"], contents, truncated_msg,]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|
||||||
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
// TODO(wathiede): show per-message subject if it changes significantly from top-level subject
|
||||||
set_title(&thread.subject);
|
set_title(&thread.subject);
|
||||||
let messages = thread.messages.iter().map(|msg| {
|
let messages = thread.messages.iter().map(|msg| {
|
||||||
div![
|
div![
|
||||||
C!["message"],
|
C!["message"],
|
||||||
/* TODO(wathiede): collect all the tags and show them here. */
|
/* TODO(wathiede): collect all the tags and show them here. */
|
||||||
|
/* TODO(wathiede): collect all the attachments from all the subparts */
|
||||||
msg.from
|
msg.from
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
.map(|from| div![C!["header"], "From: ", view_address(&from)]),
|
||||||
msg.timestamp
|
msg.timestamp
|
||||||
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
.map(|ts| div![C!["header"], "Date: ", human_age(ts)]),
|
||||||
div!["Message-ID: ", &msg.id],
|
|
||||||
div![
|
div![
|
||||||
C!["header"],
|
C!["header"],
|
||||||
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
IF!(!msg.to.is_empty() => span!["To: ", view_addresses(&msg.to)]),
|
||||||
@ -292,7 +278,7 @@ fn thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|||||||
contents,
|
contents,
|
||||||
content_tree,
|
content_tree,
|
||||||
},
|
},
|
||||||
) => div![raw_text_message(&contents), pre![content_tree]],
|
) => div![C!["view-part-text-plain"], contents, pre![content_tree]],
|
||||||
ShowThreadQueryThreadMessagesBody::Html(
|
ShowThreadQueryThreadMessagesBody::Html(
|
||||||
ShowThreadQueryThreadMessagesBodyOnHtml {
|
ShowThreadQueryThreadMessagesBodyOnHtml {
|
||||||
contents,
|
contents,
|
||||||
@ -301,15 +287,6 @@ fn thread(thread: &ShowThreadQueryThread) -> Node<Msg> {
|
|||||||
) => div![
|
) => div![
|
||||||
C!["view-part-text-html"],
|
C!["view-part-text-html"],
|
||||||
raw![contents],
|
raw![contents],
|
||||||
IF!(!msg.attachments.is_empty() =>
|
|
||||||
div![
|
|
||||||
C!["attachments"],
|
|
||||||
br![],
|
|
||||||
h2!["Attachments"],
|
|
||||||
msg.attachments
|
|
||||||
.iter()
|
|
||||||
.map(|a| div!["Filename: ", &a.filename, " ", &a.content_type])
|
|
||||||
]),
|
|
||||||
pre![content_tree]
|
pre![content_tree]
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@ -418,28 +395,26 @@ fn view_footer(render_time_ms: u128) -> Node<Msg> {
|
|||||||
|
|
||||||
// `view` describes what to display.
|
// `view` describes what to display.
|
||||||
pub fn view(model: &Model) -> Node<Msg> {
|
pub fn view(model: &Model) -> Node<Msg> {
|
||||||
let start = Instant::now();
|
|
||||||
info!("refreshing {:?}", model.refreshing_state);
|
info!("refreshing {:?}", model.refreshing_state);
|
||||||
let win = seed::window();
|
let is_mobile = seed::window()
|
||||||
let w = win
|
.match_media("(max-width: 768px)")
|
||||||
.inner_width()
|
.expect("failed media query")
|
||||||
.expect("window width")
|
.map(|mql| mql.matches())
|
||||||
.as_f64()
|
.unwrap_or(false);
|
||||||
.expect("window width f64");
|
|
||||||
let h = win
|
|
||||||
.inner_height()
|
|
||||||
.expect("window height")
|
|
||||||
.as_f64()
|
|
||||||
.expect("window height f64");
|
|
||||||
info!("win: {w}x{h}");
|
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
info!("view called");
|
info!("view called");
|
||||||
div![
|
div![
|
||||||
match w {
|
if is_mobile {
|
||||||
w if w < 800. => div![C!["mobile"], mobile::view(model)],
|
C!["mobile"]
|
||||||
w if w < 1024. => div![C!["tablet"], tablet::view(model)],
|
} else {
|
||||||
_ => div![C!["desktop"], desktop::view(model)],
|
C!["desktop"]
|
||||||
},
|
},
|
||||||
view_footer(start.elapsed().as_millis()),
|
if is_mobile {
|
||||||
|
mobile::view(model)
|
||||||
|
} else {
|
||||||
|
desktop::view(model)
|
||||||
|
},
|
||||||
|
view_footer(start.elapsed().as_millis())
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,30 +0,0 @@
|
|||||||
use seed::{prelude::*, *};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
state::{Context, Model, Msg},
|
|
||||||
view::{self, view_header, view_search_results},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(super) fn view(model: &Model) -> Node<Msg> {
|
|
||||||
// Do two queries, one without `unread` so it loads fast, then a second with unread.
|
|
||||||
let content = match &model.context {
|
|
||||||
Context::None => div![h1!["Loading"]],
|
|
||||||
Context::Thread(_) => unimplemented!("tablet legacy thread view"),
|
|
||||||
Context::ThreadResult(thread) => view::thread(thread),
|
|
||||||
Context::Search(_) => unimplemented!("tablet legacy search results view"),
|
|
||||||
Context::SearchResult {
|
|
||||||
query,
|
|
||||||
results,
|
|
||||||
count,
|
|
||||||
pager,
|
|
||||||
} => view_search_results(&query, results.as_slice(), *count, pager),
|
|
||||||
};
|
|
||||||
div![
|
|
||||||
C!["main-content"],
|
|
||||||
div![
|
|
||||||
view_header(&model.query, &model.refreshing_state),
|
|
||||||
content,
|
|
||||||
view_header(&model.query, &model.refreshing_state),
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
Loading…
x
Reference in New Issue
Block a user