diff --git a/server/sql/count.sql b/server/sql/count.sql index 3c01d75..69e1941 100644 --- a/server/sql/count.sql +++ b/server/sql/count.sql @@ -3,7 +3,7 @@ SELECT FROM post WHERE - site = $1 + ($1::text IS NULL OR site = $1) AND ( NOT $2 OR NOT is_read diff --git a/server/sql/threads.sql b/server/sql/threads.sql index a476710..6d2da8f 100644 --- a/server/sql/threads.sql +++ b/server/sql/threads.sql @@ -1,4 +1,5 @@ SELECT + site, date, is_read, title, @@ -8,7 +9,7 @@ FROM post p JOIN feed f ON p.site = f.slug WHERE - site = $1 + ($1::text IS NULL OR site = $1) AND ( NOT $2 OR NOT is_read diff --git a/server/src/graphql.rs b/server/src/graphql.rs index a4f0c13..6cff256 100644 --- a/server/src/graphql.rs +++ b/server/src/graphql.rs @@ -1,9 +1,11 @@ use async_graphql::{ - connection::Connection, Context, EmptySubscription, Enum, Error, FieldResult, Object, Schema, + connection::{self, Connection, Edge, OpaqueCursor}, + Context, EmptySubscription, Enum, Error, FieldResult, InputObject, Object, Schema, SimpleObject, Union, }; use log::info; use notmuch::Notmuch; +use serde::{Deserialize, Serialize}; use sqlx::postgres::PgPool; use crate::{newsreader, nm}; @@ -200,6 +202,12 @@ pub struct Tag { pub unread: usize, } +#[derive(Serialize, Deserialize, Debug, InputObject)] +struct SearchCursor { + newsreader_offset: i32, + notmuch_offset: i32, +} + pub struct QueryRoot; #[Object] impl QueryRoot { @@ -207,12 +215,9 @@ impl QueryRoot { let nm = ctx.data_unchecked::(); let pool = ctx.data_unchecked::(); - // TODO: make this search both copra and merge results - if newsreader::is_newsreader_search(&query) { - Ok(newsreader::count(pool, &query).await?) - } else { - Ok(nm::count(nm, &query).await?) - } + let newsreader_query: newsreader::Query = query.parse()?; + + Ok(newsreader::count(pool, &newsreader_query).await? + nm::count(nm, &query).await?) } async fn search<'ctx>( @@ -223,17 +228,117 @@ impl QueryRoot { first: Option, last: Option, query: String, - ) -> Result, Error> { - info!("search({after:?} {before:?} {first:?} {last:?} {query:?})"); + ) -> Result, ThreadSummary>, Error> { + // TODO: add keywords to limit search to one corpus, i.e. is:news or is:mail + info!("search({after:?} {before:?} {first:?} {last:?} {query:?})",); let nm = ctx.data_unchecked::(); let pool = ctx.data_unchecked::(); - // TODO: make this search both copra and merge results - if newsreader::is_newsreader_search(&query) { - Ok(newsreader::search(pool, after, before, first, last, query).await?) - } else { - Ok(nm::search(nm, after, before, first, last, query).await?) + enum ThreadSummaryCursor { + Newsreader(i32, ThreadSummary), + Notmuch(i32, ThreadSummary), } + Ok(connection::query( + after, + before, + first, + last, + |after: Option>, + before: Option>, + first: Option, + last: Option| async move { + info!( + "search({:?} {:?} {first:?} {last:?} {query:?})", + after.as_ref().map(|v| &v.0), + before.as_ref().map(|v| &v.0) + ); + let newsreader_after = after.as_ref().map(|sc| sc.newsreader_offset); + let notmuch_after = after.as_ref().map(|sc| sc.newsreader_offset); + let newsreader_before = before.as_ref().map(|sc| sc.newsreader_offset); + let notmuch_before = before.as_ref().map(|sc| sc.notmuch_offset); + + let newsreader_query: newsreader::Query = query.parse()?; + let newsreader_results = newsreader::search( + pool, + newsreader_after, + newsreader_before, + first.map(|v| v as i32), + last.map(|v| v as i32), + &newsreader_query, + ) + .await? + .into_iter() + .map(|(cur, ts)| ThreadSummaryCursor::Newsreader(cur, ts)); + + let notmuch_results = nm::search( + nm, + notmuch_after, + notmuch_before, + first.map(|v| v as i32), + last.map(|v| v as i32), + query, + ) + .await? + .into_iter() + .map(|(cur, ts)| ThreadSummaryCursor::Notmuch(cur, ts)); + + let mut results: Vec<_> = newsreader_results.chain(notmuch_results).collect(); + + // The leading '-' is to reverse sort + results.sort_by_key(|item| match item { + ThreadSummaryCursor::Newsreader(_, ts) => -ts.timestamp, + ThreadSummaryCursor::Notmuch(_, ts) => -ts.timestamp, + }); + + let mut has_next_page = before.is_some(); + if let Some(first) = first { + if results.len() > first { + has_next_page = true; + results.truncate(first); + } + } + + let mut has_previous_page = after.is_some(); + if let Some(last) = last { + if results.len() > last { + has_previous_page = true; + // TODO: find better way to do this. + results.reverse(); + results.truncate(last); + results.reverse(); + } + } + + let mut connection = Connection::new(has_previous_page, has_next_page); + let mut newsreader_offset = 0; + let mut notmuch_offset = 0; + + connection.edges.extend(results.into_iter().map(|item| { + let thread_summary; + match item { + ThreadSummaryCursor::Newsreader(offset, ts) => { + thread_summary = ts; + newsreader_offset = offset; + } + ThreadSummaryCursor::Notmuch(offset, ts) => { + thread_summary = ts; + notmuch_offset = offset; + } + } + info!( + "item: {} {}", + thread_summary.subject, thread_summary.timestamp + ); + let cur = OpaqueCursor(SearchCursor { + newsreader_offset, + notmuch_offset, + }); + Edge::new(cur, thread_summary) + })); + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await?) } async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult> { diff --git a/server/src/lib.rs b/server/src/lib.rs index e457bf8..e8d065a 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -345,3 +345,39 @@ pub fn sanitize_html( Ok(clean_html) } + +fn compute_offset_limit( + after: Option, + before: Option, + first: Option, + last: Option, +) -> (i32, i32) { + let default_page_size = 100; + match (after, before, first, last) { + // Reasonable defaults + (None, None, None, None) => (0, default_page_size), + (None, None, Some(first), None) => (0, first), + (Some(after), None, None, None) => (after, default_page_size), + (Some(after), None, Some(first), None) => (after, first), + (None, Some(before), None, None) => (0.max(before - default_page_size), default_page_size), + (None, Some(before), None, Some(last)) => (0.max(before - last), last), + (None, None, None, Some(_)) => { + panic!("specifying last and no before doesn't make sense") + } + (None, None, Some(_), Some(_)) => { + panic!("specifying first and last doesn't make sense") + } + (None, Some(_), Some(_), _) => { + panic!("specifying before and first doesn't make sense") + } + (Some(_), Some(_), _, _) => { + panic!("specifying after and before doesn't make sense") + } + (Some(_), None, None, Some(_)) => { + panic!("specifying after and last doesn't make sense") + } + (Some(_), None, Some(_), Some(_)) => { + panic!("specifying after, first and last doesn't make sense") + } + } +} diff --git a/server/src/newsreader.rs b/server/src/newsreader.rs index f8457b3..8ec1837 100644 --- a/server/src/newsreader.rs +++ b/server/src/newsreader.rs @@ -4,14 +4,16 @@ use std::{ str::FromStr, }; -use async_graphql::connection::{self, Connection, Edge}; +use log::info; use sqlx::postgres::PgPool; use url::Url; const TAG_PREFIX: &'static str = "News/"; const THREAD_PREFIX: &'static str = "news:"; +const NON_EXISTENT_SITE_NAME: &'static str = "NO-SUCH-SITE"; use crate::{ + compute_offset_limit, error::ServerError, graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary}, EscapeHtml, InlineStyle, SanitizeHtml, Transformer, @@ -25,10 +27,8 @@ pub fn is_newsreader_thread(query: &str) -> bool { query.starts_with(THREAD_PREFIX) } -pub async fn count(pool: &PgPool, query: &str) -> Result { - let query: Query = query.parse()?; - let site = query.site.expect("search has no site"); - let row = sqlx::query_file!("sql/count.sql", site, query.unread_only) +pub async fn count(pool: &PgPool, query: &Query) -> Result { + let row = sqlx::query_file!("sql/count.sql", query.site, query.unread_only) .fetch_one(pool) .await?; Ok(row.count.unwrap_or(0).try_into().unwrap_or(0)) @@ -36,102 +36,57 @@ pub async fn count(pool: &PgPool, query: &str) -> Result { pub async fn search( pool: &PgPool, - after: Option, - before: Option, + after: Option, + before: Option, first: Option, last: Option, - query: String, -) -> Result, async_graphql::Error> { - let query: Query = query.parse()?; - let site = query.site.expect("search has no site"); - connection::query( - after, - before, - first, - last, - |after: Option, before: Option, first, last| async move { - let default_page_size = 100; - let (offset, limit) = match (after, before, first, last) { - // Reasonable defaults - (None, None, None, None) => (0, default_page_size), - (None, None, Some(first), None) => (0, first), - (Some(after), None, None, None) => (after, default_page_size), - (Some(after), None, Some(first), None) => (after, first), - (None, Some(before), None, None) => { - (before.saturating_sub(default_page_size), default_page_size) - } - (None, Some(before), None, Some(last)) => (before.saturating_sub(last), last), - (None, None, None, Some(_)) => { - panic!("specifying last and no before doesn't make sense") - } - (None, None, Some(_), Some(_)) => { - panic!("specifying first and last doesn't make sense") - } - (None, Some(_), Some(_), _) => { - panic!("specifying before and first doesn't make sense") - } - (Some(_), Some(_), _, _) => { - panic!("specifying after and before doesn't make sense") - } - (Some(_), None, None, Some(_)) => { - panic!("specifying after and last doesn't make sense") - } - (Some(_), None, Some(_), Some(_)) => { - panic!("specifying after, first and last doesn't make sense") - } - }; - // The +1 is to see if there are more pages of data available. - let limit = limit + 1; + query: &Query, +) -> Result, async_graphql::Error> { + info!("search({after:?} {before:?} {first:?} {last:?} {query:?}"); + let (offset, limit) = compute_offset_limit(after, before, first, last); + // The +1 is to see if there are more pages of data available. + let limit = limit + 1; + info!("search offset {offset} limit {limit}"); - let rows = sqlx::query_file!( - "sql/threads.sql", - site, - query.unread_only, - offset as i64, - limit as i64 - ) - .fetch_all(pool) - .await?; - - let mut slice = rows - .into_iter() - .map(|r| { - let tags = if r.is_read.unwrap_or(false) { - vec![site.clone()] - } else { - vec!["unread".to_string(), site.clone()] - }; - ThreadSummary { - thread: format!("{THREAD_PREFIX}{}", r.uid), - timestamp: r - .date - .expect("post missing date") - .assume_utc() - .unix_timestamp() as isize, - date_relative: "TODO date_relative".to_string(), - matched: 0, - total: 1, - authors: r.name.unwrap_or_else(|| site.clone()), - subject: r.title.unwrap_or("NO TITLE".to_string()), - tags, - } - }) - .collect::>(); - let has_more = slice.len() == limit; - let mut connection = Connection::new(offset > 0, has_more); - if has_more { - slice.pop(); - }; - connection.edges.extend( - slice - .into_iter() - .enumerate() - .map(|(idx, item)| Edge::new(offset + idx, item)), - ); - Ok::<_, async_graphql::Error>(connection) - }, + let rows = sqlx::query_file!( + "sql/threads.sql", + query.site, + query.unread_only, + offset as i64, + limit as i64 ) - .await + .fetch_all(pool) + .await?; + + Ok(rows + .into_iter() + .enumerate() + .map(|(i, r)| { + let site = r.site.unwrap_or("UNKOWN SITE".to_string()); + let tags = if r.is_read.unwrap_or(false) { + vec![site.clone()] + } else { + vec!["unread".to_string(), site.clone()] + }; + ( + i as i32 + offset, + ThreadSummary { + thread: format!("{THREAD_PREFIX}{}", r.uid), + timestamp: r + .date + .expect("post missing date") + .assume_utc() + .unix_timestamp() as isize, + date_relative: "TODO date_relative".to_string(), + matched: 0, + total: 1, + authors: r.name.unwrap_or_else(|| site.clone()), + subject: r.title.unwrap_or("NO TITLE".to_string()), + tags, + }, + ) + }) + .collect()) } pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result, ServerError> { @@ -258,11 +213,11 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result, - uid: Option, - remainder: Vec, +pub struct Query { + pub unread_only: bool, + pub site: Option, + pub uid: Option, + pub remainder: Vec, } impl FromStr for Query { @@ -278,6 +233,10 @@ impl FromStr for Query { unread_only = true } else if word.starts_with(&site_prefix) { site = Some(word[site_prefix.len()..].to_string()) + } else if word.starts_with("tag:") { + // Any tag that doesn't match site_prefix should explicitly set the site to something not in the + // database + site = Some(NON_EXISTENT_SITE_NAME.to_string()); } else if word.starts_with(THREAD_PREFIX) { uid = Some(word[THREAD_PREFIX.len()..].to_string()) } else { diff --git a/server/src/nm.rs b/server/src/nm.rs index ed3ab16..5b36390 100644 --- a/server/src/nm.rs +++ b/server/src/nm.rs @@ -5,13 +5,13 @@ use std::{ time::Instant, }; -use async_graphql::connection::{self, Connection, Edge}; use log::{error, info, warn}; use mailparse::{parse_mail, MailHeader, MailHeaderMap, ParsedMail}; use memmap::MmapOptions; use notmuch::Notmuch; use crate::{ + compute_offset_limit, error::ServerError, graphql::{ Attachment, Body, DispositionType, Email, Header, Html, Message, PlainText, Tag, Thread, @@ -44,41 +44,22 @@ pub async fn count(nm: &Notmuch, query: &str) -> Result { pub async fn search( nm: &Notmuch, - after: Option, - before: Option, + after: Option, + before: Option, first: Option, last: Option, query: String, -) -> Result, async_graphql::Error> { - connection::query( - after, - before, - first, - last, - |after, before, first, last| async move { - let total = nm.count(&query)?; - let (first, last) = if let (None, None) = (first, last) { - info!("neither first nor last set, defaulting first to 20"); - (Some(20), None) - } else { - (first, last) - }; - - let mut start = after.map(|after| after + 1).unwrap_or(0); - let mut end = before.unwrap_or(total); - if let Some(first) = first { - end = (start + first).min(end); - } - if let Some(last) = last { - start = if last > end - start { end } else { end - last }; - } - - let count = end - start; - let slice: Vec = nm - .search(&query, start, count)? - .0 - .into_iter() - .map(|ts| ThreadSummary { +) -> Result, async_graphql::Error> { + let (offset, limit) = compute_offset_limit(after, before, first, last); + Ok(nm + .search(&query, offset as usize, limit as usize)? + .0 + .into_iter() + .enumerate() + .map(|(i, ts)| { + ( + offset + i as i32, + ThreadSummary { thread: format!("thread:{}", ts.thread), timestamp: ts.timestamp, date_relative: ts.date_relative, @@ -87,20 +68,10 @@ pub async fn search( authors: ts.authors, subject: ts.subject, tags: ts.tags, - }) - .collect(); - - let mut connection = Connection::new(start > 0, end < total); - connection.edges.extend( - slice - .into_iter() - .enumerate() - .map(|(idx, item)| Edge::new(start + idx, item)), - ); - Ok::<_, async_graphql::Error>(connection) - }, - ) - .await + }, + ) + }) + .collect()) } pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result, ServerError> { diff --git a/web/index.html b/web/index.html index dc532fa..e9e7f67 100644 --- a/web/index.html +++ b/web/index.html @@ -22,48 +22,6 @@ - diff --git a/web/src/view/mod.rs b/web/src/view/mod.rs index 3028c42..b719372 100644 --- a/web/src/view/mod.rs +++ b/web/src/view/mod.rs @@ -305,23 +305,11 @@ fn search_toolbar( show_bulk_edit: bool, show_icon_text: bool, ) -> Node { - let start = pager - .start_cursor - .as_ref() - .map(|i| i.parse().unwrap_or(0)) - .unwrap_or(0) - + 1; - let end = pager - .end_cursor - .as_ref() - .map(|i| i.parse().unwrap_or(count)) - .unwrap_or(count) - + 1; nav![ C!["level", "is-mobile"], - IF!(show_bulk_edit => div![ C!["level-left"], + IF!(show_bulk_edit => div![ C!["level-item"], div![C!["buttons", "has-addons"], @@ -340,7 +328,8 @@ fn search_toolbar( ev(Ev::Click, |_| Msg::SelectionMarkAsUnread) ] ] - ], + ]), + IF!(show_bulk_edit => div![ C!["level-item"], div![C!["buttons", "has-addons"], @@ -357,8 +346,8 @@ fn search_toolbar( ) ], ], - ] - ]), + ]) + ], div![ C!["level-right"], nav![ @@ -383,10 +372,7 @@ fn search_toolbar( ">", IF!(pager.has_next_page => ev(Ev::Click, |_| Msg::NextPage)) ], - ul![ - C!["pagination-list"], - li![format!("{} - {} of {}", start, end, count)], - ], + ul![C!["pagination-list"], li![format!("{count} results")],], ] ] ] diff --git a/web/static/style.css b/web/static/style.css index 0d1b978..dfad1d6 100644 --- a/web/static/style.css +++ b/web/static/style.css @@ -1,3 +1,44 @@ +:root { + --active-brightness: 0.85; + --border-radius: 5px; + --box-shadow: 2px 2px 10px; + --color-accent: #118bee15; + --color-bg: #fff; + --color-bg-secondary: #e9e9e9; + --color-link: #118bee; + --color-secondary: #920de9; + --color-secondary-accent: #920de90b; + --color-shadow: #f4f4f4; + --color-table: #118bee; + --color-text: #000; + --color-text-secondary: #999; + --color-scrollbar: #cacae8; + --font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; + --hover-brightness: 1.2; + --justify-important: center; + --justify-normal: left; + --line-height: 1.5; + --width-card: 285px; + --width-card-medium: 460px; + --width-card-wide: 800px; + --width-content: 1080px; +} + +@media (prefers-color-scheme: dark) { + :root[color-mode="user"] { + --color-accent: #0097fc4f; + --color-bg: #333; + --color-bg-secondary: #555; + --color-link: #0097fc; + --color-secondary: #e20de9; + --color-secondary-accent: #e20de94f; + --color-shadow: #bbbbbb20; + --color-table: #0097fc; + --color-text: #f7f7f7; + --color-text-secondary: #aaa; + } +} + .message { display: inline-block; padding: 0.5em; @@ -168,6 +209,10 @@ input::placeholder, padding: 1em; } +.search-results>nav { + margin: 1.25rem; +} + .tablet .thread h3, .mobile .thread h3 { overflow-wrap: break-word;