552 lines
16 KiB
Rust
552 lines
16 KiB
Rust
use std::str::FromStr;
|
|
|
|
use async_graphql::{
|
|
connection::{self, Connection, Edge, OpaqueCursor},
|
|
Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema,
|
|
SimpleObject, Union,
|
|
};
|
|
use log::info;
|
|
use notmuch::Notmuch;
|
|
use serde::{Deserialize, Serialize};
|
|
use sqlx::postgres::PgPool;
|
|
|
|
use crate::{config::Config, newsreader, nm, tantivy::TantivyConnection, Query};
|
|
|
|
/// # Number of seconds since the Epoch
|
|
pub type UnixTime = isize;
|
|
|
|
/// # Thread ID, sans "thread:"
|
|
pub type ThreadId = String;
|
|
|
|
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
|
pub enum Corpus {
|
|
Notmuch,
|
|
Newsreader,
|
|
Tantivy,
|
|
}
|
|
|
|
impl FromStr for Corpus {
|
|
type Err = String;
|
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
Ok(match s {
|
|
"notmuch" => Corpus::Notmuch,
|
|
"newsreader" => Corpus::Newsreader,
|
|
"tantivy" => Corpus::Tantivy,
|
|
s => return Err(format!("unknown corpus: '{s}'")),
|
|
})
|
|
}
|
|
}
|
|
|
|
// TODO: add is_read field and remove all use of 'tag:unread'
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct ThreadSummary {
|
|
pub thread: ThreadId,
|
|
pub timestamp: UnixTime,
|
|
/// user-friendly timestamp
|
|
pub date_relative: String,
|
|
/// number of matched messages
|
|
pub matched: isize,
|
|
/// total messages in thread
|
|
pub total: isize,
|
|
/// comma-separated names with | between matched and unmatched
|
|
pub authors: String,
|
|
pub subject: String,
|
|
pub tags: Vec<String>,
|
|
pub corpus: Corpus,
|
|
}
|
|
|
|
#[derive(Debug, Union)]
|
|
pub enum Thread {
|
|
Email(EmailThread),
|
|
News(NewsPost),
|
|
}
|
|
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct NewsPost {
|
|
pub thread_id: String,
|
|
pub is_read: bool,
|
|
pub slug: String,
|
|
pub site: String,
|
|
pub title: String,
|
|
pub body: String,
|
|
pub url: String,
|
|
pub timestamp: i64,
|
|
}
|
|
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct EmailThread {
|
|
pub thread_id: String,
|
|
pub subject: String,
|
|
pub messages: Vec<Message>,
|
|
}
|
|
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct Message {
|
|
// Message-ID for message, prepend `id:<id>` to search in notmuch
|
|
pub id: String,
|
|
// First From header found in email
|
|
pub from: Option<Email>,
|
|
// All To headers found in email
|
|
pub to: Vec<Email>,
|
|
// All CC headers found in email
|
|
pub cc: Vec<Email>,
|
|
// First Subject header found in email
|
|
pub subject: Option<String>,
|
|
// Parsed Date header, if found and valid
|
|
pub timestamp: Option<i64>,
|
|
// Headers
|
|
pub headers: Vec<Header>,
|
|
// The body contents
|
|
pub body: Body,
|
|
// On disk location of message
|
|
pub path: String,
|
|
pub attachments: Vec<Attachment>,
|
|
pub tags: Vec<String>,
|
|
}
|
|
|
|
// Content-Type: image/jpeg; name="PXL_20231125_204826860.jpg"
|
|
// Content-Disposition: attachment; filename="PXL_20231125_204826860.jpg"
|
|
// Content-Transfer-Encoding: base64
|
|
// Content-ID: <f_lponoluo1>
|
|
// X-Attachment-Id: f_lponoluo1
|
|
#[derive(Default, Debug, SimpleObject)]
|
|
pub struct Attachment {
|
|
pub id: String,
|
|
pub idx: String,
|
|
pub filename: Option<String>,
|
|
pub size: usize,
|
|
pub content_type: Option<String>,
|
|
pub content_id: Option<String>,
|
|
pub disposition: DispositionType,
|
|
pub bytes: Vec<u8>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
pub struct Disposition {
|
|
pub r#type: DispositionType,
|
|
pub filename: Option<String>,
|
|
pub size: Option<usize>,
|
|
}
|
|
|
|
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
|
|
pub enum DispositionType {
|
|
Inline,
|
|
Attachment,
|
|
}
|
|
|
|
impl From<mailparse::DispositionType> for DispositionType {
|
|
fn from(value: mailparse::DispositionType) -> Self {
|
|
match value {
|
|
mailparse::DispositionType::Inline => DispositionType::Inline,
|
|
mailparse::DispositionType::Attachment => DispositionType::Attachment,
|
|
dt => panic!("unhandled DispositionType {dt:?}"),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Default for DispositionType {
|
|
fn default() -> Self {
|
|
DispositionType::Attachment
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct Header {
|
|
pub key: String,
|
|
pub value: String,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct UnhandledContentType {
|
|
pub text: String,
|
|
pub content_tree: String,
|
|
}
|
|
|
|
#[Object]
|
|
impl UnhandledContentType {
|
|
async fn contents(&self) -> &str {
|
|
&self.text
|
|
}
|
|
async fn content_tree(&self) -> &str {
|
|
&self.content_tree
|
|
}
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct PlainText {
|
|
pub text: String,
|
|
pub content_tree: String,
|
|
}
|
|
|
|
#[Object]
|
|
impl PlainText {
|
|
async fn contents(&self) -> &str {
|
|
&self.text
|
|
}
|
|
async fn content_tree(&self) -> &str {
|
|
&self.content_tree
|
|
}
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct Html {
|
|
pub html: String,
|
|
pub content_tree: String,
|
|
}
|
|
|
|
#[Object]
|
|
impl Html {
|
|
async fn contents(&self) -> &str {
|
|
&self.html
|
|
}
|
|
async fn content_tree(&self) -> &str {
|
|
&self.content_tree
|
|
}
|
|
async fn headers(&self) -> Vec<Header> {
|
|
Vec::new()
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Union)]
|
|
pub enum Body {
|
|
UnhandledContentType(UnhandledContentType),
|
|
PlainText(PlainText),
|
|
Html(Html),
|
|
}
|
|
|
|
impl Body {
|
|
pub fn html(html: String) -> Body {
|
|
Body::Html(Html {
|
|
html,
|
|
content_tree: "".to_string(),
|
|
})
|
|
}
|
|
pub fn text(text: String) -> Body {
|
|
Body::PlainText(PlainText {
|
|
text,
|
|
content_tree: "".to_string(),
|
|
})
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, SimpleObject)]
|
|
pub struct Email {
|
|
pub name: Option<String>,
|
|
pub addr: Option<String>,
|
|
}
|
|
|
|
#[derive(SimpleObject)]
|
|
pub struct Tag {
|
|
pub name: String,
|
|
pub fg_color: String,
|
|
pub bg_color: String,
|
|
pub unread: usize,
|
|
}
|
|
|
|
#[derive(Serialize, Deserialize, Debug, InputObject)]
|
|
struct SearchCursor {
|
|
newsreader_offset: i32,
|
|
notmuch_offset: i32,
|
|
tantivy_offset: i32,
|
|
}
|
|
|
|
pub struct QueryRoot;
|
|
#[Object]
|
|
impl QueryRoot {
|
|
async fn version<'ctx>(&self, _ctx: &Context<'ctx>) -> Result<String, Error> {
|
|
build_info::build_info!(fn bi);
|
|
Ok(shared::build_version(bi))
|
|
}
|
|
async fn count<'ctx>(&self, ctx: &Context<'ctx>, query: String) -> Result<usize, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
|
|
let newsreader_query: Query = query.parse()?;
|
|
|
|
let newsreader_count = newsreader::count(pool, &newsreader_query).await?;
|
|
let notmuch_count = nm::count(nm, &newsreader_query).await?;
|
|
let tantivy_count = tantivy.count(&newsreader_query).await?;
|
|
let total = newsreader_count + notmuch_count + tantivy_count;
|
|
info!("count {newsreader_query:?} newsreader count {newsreader_count} notmuch count {notmuch_count} tantivy count {tantivy_count} total {total}");
|
|
Ok(total)
|
|
}
|
|
|
|
async fn search<'ctx>(
|
|
&self,
|
|
ctx: &Context<'ctx>,
|
|
after: Option<String>,
|
|
before: Option<String>,
|
|
first: Option<i32>,
|
|
last: Option<i32>,
|
|
query: String,
|
|
) -> Result<Connection<OpaqueCursor<SearchCursor>, ThreadSummary>, Error> {
|
|
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",);
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
|
|
Ok(connection::query(
|
|
after,
|
|
before,
|
|
first,
|
|
last,
|
|
|after: Option<OpaqueCursor<SearchCursor>>,
|
|
before: Option<OpaqueCursor<SearchCursor>>,
|
|
first: Option<usize>,
|
|
last: Option<usize>| async move {
|
|
info!(
|
|
"search(after {:?} before {:?} first {first:?} last {last:?} query: {query:?})",
|
|
after.as_ref().map(|v| &v.0),
|
|
before.as_ref().map(|v| &v.0)
|
|
);
|
|
let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset);
|
|
let notmuch_after = after.as_ref().map(|sc| sc.notmuch_offset);
|
|
let tantivy_after = after.as_ref().map(|sc| sc.tantivy_offset);
|
|
|
|
let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset);
|
|
let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset);
|
|
let tantivy_before = before.as_ref().map(|sc| sc.tantivy_offset);
|
|
let first = first.map(|v| v as i32);
|
|
let last = last.map(|v| v as i32);
|
|
|
|
let query: Query = query.parse()?;
|
|
info!("newsreader_query {query:?}");
|
|
|
|
let newsreader_results = newsreader_search(
|
|
pool,
|
|
newsreader_after,
|
|
newsreader_before,
|
|
first,
|
|
last,
|
|
&query,
|
|
)
|
|
.await?;
|
|
let notmuch_results =
|
|
notmuch_search(nm, notmuch_after, notmuch_before, first, last, &query).await?;
|
|
let tantivy_results = tantivy_search(
|
|
tantivy,
|
|
pool,
|
|
tantivy_after,
|
|
tantivy_before,
|
|
first,
|
|
last,
|
|
&query,
|
|
)
|
|
.await?;
|
|
info!(
|
|
"newsreader_results ({}) notmuch_results ({}) tantivy_results ({})",
|
|
newsreader_results.len(),
|
|
notmuch_results.len(),
|
|
tantivy_results.len()
|
|
);
|
|
|
|
let mut results: Vec<_> = newsreader_results
|
|
.into_iter()
|
|
.chain(notmuch_results)
|
|
.chain(tantivy_results)
|
|
.collect();
|
|
|
|
// The leading '-' is to reverse sort
|
|
results.sort_by_key(|item| match item {
|
|
ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp,
|
|
ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp,
|
|
ThreadSummaryCursor::Tantivy(_, ts) => -ts.timestamp,
|
|
});
|
|
|
|
let mut has_next_page = before.is_some();
|
|
if let Some(first) = first {
|
|
let first = first as usize;
|
|
if results.len() > first {
|
|
has_next_page = true;
|
|
results.truncate(first);
|
|
}
|
|
}
|
|
|
|
let mut has_previous_page = after.is_some();
|
|
if let Some(last) = last {
|
|
let last = last as usize;
|
|
if results.len() > last {
|
|
has_previous_page = true;
|
|
results.truncate(last);
|
|
}
|
|
}
|
|
|
|
let mut connection = Connection::new(has_previous_page, has_next_page);
|
|
let mut newsreader_offset = 0;
|
|
let mut notmuch_offset = 0;
|
|
let mut tantivy_offset = 0;
|
|
|
|
connection.edges.extend(results.into_iter().map(|item| {
|
|
let thread_summary;
|
|
match item {
|
|
ThreadSummaryCursor::Newsreader(offset, ts) => {
|
|
thread_summary = ts;
|
|
newsreader_offset = offset;
|
|
}
|
|
ThreadSummaryCursor::Notmuch(offset, ts) => {
|
|
thread_summary = ts;
|
|
notmuch_offset = offset;
|
|
}
|
|
ThreadSummaryCursor::Tantivy(offset, ts) => {
|
|
thread_summary = ts;
|
|
tantivy_offset = offset;
|
|
}
|
|
}
|
|
let cur = OpaqueCursor(SearchCursor {
|
|
newsreader_offset,
|
|
notmuch_offset,
|
|
tantivy_offset,
|
|
});
|
|
Edge::new(cur, thread_summary)
|
|
}));
|
|
Ok::<_, async_graphql::Error>(connection)
|
|
},
|
|
)
|
|
.await?)
|
|
}
|
|
|
|
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
let needs_unread = ctx.look_ahead().field("unread").exists();
|
|
let mut tags = newsreader::tags(pool, needs_unread).await?;
|
|
tags.append(&mut nm::tags(nm, needs_unread)?);
|
|
Ok(tags)
|
|
}
|
|
async fn thread<'ctx>(&self, ctx: &Context<'ctx>, thread_id: String) -> Result<Thread, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
let config = ctx.data_unchecked::<Config>();
|
|
let debug_content_tree = ctx
|
|
.look_ahead()
|
|
.field("messages")
|
|
.field("body")
|
|
.field("contentTree")
|
|
.exists();
|
|
// TODO: look at thread_id and conditionally load newsreader
|
|
if newsreader::is_newsreader_thread(&thread_id) {
|
|
Ok(newsreader::thread(config, pool, thread_id).await?)
|
|
} else {
|
|
Ok(nm::thread(nm, thread_id, debug_content_tree).await?)
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
enum ThreadSummaryCursor {
|
|
Newsreader(i32, ThreadSummary),
|
|
Notmuch(i32, ThreadSummary),
|
|
Tantivy(i32, ThreadSummary),
|
|
}
|
|
async fn newsreader_search(
|
|
pool: &PgPool,
|
|
after: Option<i32>,
|
|
before: Option<i32>,
|
|
first: Option<i32>,
|
|
last: Option<i32>,
|
|
query: &Query,
|
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
Ok(newsreader::search(pool, after, before, first, last, &query)
|
|
.await?
|
|
.into_iter()
|
|
.map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts))
|
|
.collect())
|
|
}
|
|
|
|
async fn notmuch_search(
|
|
nm: &Notmuch,
|
|
after: Option<i32>,
|
|
before: Option<i32>,
|
|
first: Option<i32>,
|
|
last: Option<i32>,
|
|
query: &Query,
|
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
Ok(nm::search(nm, after, before, first, last, &query)
|
|
.await?
|
|
.into_iter()
|
|
.map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts))
|
|
.collect())
|
|
}
|
|
|
|
async fn tantivy_search(
|
|
tantivy: &TantivyConnection,
|
|
pool: &PgPool,
|
|
after: Option<i32>,
|
|
before: Option<i32>,
|
|
first: Option<i32>,
|
|
last: Option<i32>,
|
|
query: &Query,
|
|
) -> Result<Vec<ThreadSummaryCursor>, async_graphql::Error> {
|
|
Ok(tantivy
|
|
.search(pool, after, before, first, last, &query)
|
|
.await?
|
|
.into_iter()
|
|
.map(|(cur, ts)| ThreadSummaryCursor::Tantivy(cur, ts))
|
|
.collect())
|
|
}
|
|
|
|
pub struct Mutation;
|
|
#[Object]
|
|
impl Mutation {
|
|
async fn set_read_status<'ctx>(
|
|
&self,
|
|
ctx: &Context<'ctx>,
|
|
query: String,
|
|
unread: bool,
|
|
) -> Result<bool, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
|
|
let query: Query = query.parse()?;
|
|
newsreader::set_read_status(pool, &query, unread).await?;
|
|
tantivy.reindex_thread(pool, &query).await?;
|
|
nm::set_read_status(nm, &query, unread).await?;
|
|
Ok(true)
|
|
}
|
|
async fn tag_add<'ctx>(
|
|
&self,
|
|
ctx: &Context<'ctx>,
|
|
query: String,
|
|
tag: String,
|
|
) -> Result<bool, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
info!("tag_add({tag}, {query})");
|
|
nm.tag_add(&tag, &query)?;
|
|
Ok(true)
|
|
}
|
|
async fn tag_remove<'ctx>(
|
|
&self,
|
|
ctx: &Context<'ctx>,
|
|
query: String,
|
|
tag: String,
|
|
) -> Result<bool, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
info!("tag_remove({tag}, {query})");
|
|
nm.tag_remove(&tag, &query)?;
|
|
Ok(true)
|
|
}
|
|
/// Drop and recreate tantivy index. Warning this is slow
|
|
async fn drop_and_load_index<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
|
|
tantivy.drop_and_load_index()?;
|
|
tantivy.reindex_all(pool).await?;
|
|
|
|
Ok(true)
|
|
}
|
|
async fn refresh<'ctx>(&self, ctx: &Context<'ctx>) -> Result<bool, Error> {
|
|
let nm = ctx.data_unchecked::<Notmuch>();
|
|
let tantivy = ctx.data_unchecked::<TantivyConnection>();
|
|
let pool = ctx.data_unchecked::<PgPool>();
|
|
// TODO: parallelize
|
|
info!("{}", String::from_utf8_lossy(&nm.new()?));
|
|
tantivy.refresh(pool).await?;
|
|
Ok(true)
|
|
}
|
|
}
|
|
|
|
pub type GraphqlSchema = Schema<QueryRoot, Mutation, EmptySubscription>;
|