Merge news and email search results
This commit is contained in:
@@ -4,14 +4,16 @@ use std::{
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::info;
|
||||
use sqlx::postgres::PgPool;
|
||||
use url::Url;
|
||||
|
||||
const TAG_PREFIX: &'static str = "News/";
|
||||
const THREAD_PREFIX: &'static str = "news:";
|
||||
const NON_EXISTENT_SITE_NAME: &'static str = "NO-SUCH-SITE";
|
||||
|
||||
use crate::{
|
||||
compute_offset_limit,
|
||||
error::ServerError,
|
||||
graphql::{Body, Email, Html, Message, Tag, Thread, ThreadSummary},
|
||||
EscapeHtml, InlineStyle, SanitizeHtml, Transformer,
|
||||
@@ -25,10 +27,8 @@ pub fn is_newsreader_thread(query: &str) -> bool {
|
||||
query.starts_with(THREAD_PREFIX)
|
||||
}
|
||||
|
||||
pub async fn count(pool: &PgPool, query: &str) -> Result<usize, ServerError> {
|
||||
let query: Query = query.parse()?;
|
||||
let site = query.site.expect("search has no site");
|
||||
let row = sqlx::query_file!("sql/count.sql", site, query.unread_only)
|
||||
pub async fn count(pool: &PgPool, query: &Query) -> Result<usize, ServerError> {
|
||||
let row = sqlx::query_file!("sql/count.sql", query.site, query.unread_only)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
Ok(row.count.unwrap_or(0).try_into().unwrap_or(0))
|
||||
@@ -36,102 +36,57 @@ pub async fn count(pool: &PgPool, query: &str) -> Result<usize, ServerError> {
|
||||
|
||||
pub async fn search(
|
||||
pool: &PgPool,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
after: Option<i32>,
|
||||
before: Option<i32>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
let query: Query = query.parse()?;
|
||||
let site = query.site.expect("search has no site");
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after: Option<usize>, before: Option<usize>, first, last| async move {
|
||||
let default_page_size = 100;
|
||||
let (offset, limit) = match (after, before, first, last) {
|
||||
// Reasonable defaults
|
||||
(None, None, None, None) => (0, default_page_size),
|
||||
(None, None, Some(first), None) => (0, first),
|
||||
(Some(after), None, None, None) => (after, default_page_size),
|
||||
(Some(after), None, Some(first), None) => (after, first),
|
||||
(None, Some(before), None, None) => {
|
||||
(before.saturating_sub(default_page_size), default_page_size)
|
||||
}
|
||||
(None, Some(before), None, Some(last)) => (before.saturating_sub(last), last),
|
||||
(None, None, None, Some(_)) => {
|
||||
panic!("specifying last and no before doesn't make sense")
|
||||
}
|
||||
(None, None, Some(_), Some(_)) => {
|
||||
panic!("specifying first and last doesn't make sense")
|
||||
}
|
||||
(None, Some(_), Some(_), _) => {
|
||||
panic!("specifying before and first doesn't make sense")
|
||||
}
|
||||
(Some(_), Some(_), _, _) => {
|
||||
panic!("specifying after and before doesn't make sense")
|
||||
}
|
||||
(Some(_), None, None, Some(_)) => {
|
||||
panic!("specifying after and last doesn't make sense")
|
||||
}
|
||||
(Some(_), None, Some(_), Some(_)) => {
|
||||
panic!("specifying after, first and last doesn't make sense")
|
||||
}
|
||||
};
|
||||
// The +1 is to see if there are more pages of data available.
|
||||
let limit = limit + 1;
|
||||
query: &Query,
|
||||
) -> Result<Vec<(i32, ThreadSummary)>, async_graphql::Error> {
|
||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?}");
|
||||
let (offset, limit) = compute_offset_limit(after, before, first, last);
|
||||
// The +1 is to see if there are more pages of data available.
|
||||
let limit = limit + 1;
|
||||
info!("search offset {offset} limit {limit}");
|
||||
|
||||
let rows = sqlx::query_file!(
|
||||
"sql/threads.sql",
|
||||
site,
|
||||
query.unread_only,
|
||||
offset as i64,
|
||||
limit as i64
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let mut slice = rows
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
ThreadSummary {
|
||||
thread: format!("{THREAD_PREFIX}{}", r.uid),
|
||||
timestamp: r
|
||||
.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp() as isize,
|
||||
date_relative: "TODO date_relative".to_string(),
|
||||
matched: 0,
|
||||
total: 1,
|
||||
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||
subject: r.title.unwrap_or("NO TITLE".to_string()),
|
||||
tags,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let has_more = slice.len() == limit;
|
||||
let mut connection = Connection::new(offset > 0, has_more);
|
||||
if has_more {
|
||||
slice.pop();
|
||||
};
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(offset + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
let rows = sqlx::query_file!(
|
||||
"sql/threads.sql",
|
||||
query.site,
|
||||
query.unread_only,
|
||||
offset as i64,
|
||||
limit as i64
|
||||
)
|
||||
.await
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
Ok(rows
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, r)| {
|
||||
let site = r.site.unwrap_or("UNKOWN SITE".to_string());
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
(
|
||||
i as i32 + offset,
|
||||
ThreadSummary {
|
||||
thread: format!("{THREAD_PREFIX}{}", r.uid),
|
||||
timestamp: r
|
||||
.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp() as isize,
|
||||
date_relative: "TODO date_relative".to_string(),
|
||||
matched: 0,
|
||||
total: 1,
|
||||
authors: r.name.unwrap_or_else(|| site.clone()),
|
||||
subject: r.title.unwrap_or("NO TITLE".to_string()),
|
||||
tags,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn tags(pool: &PgPool, _needs_unread: bool) -> Result<Vec<Tag>, ServerError> {
|
||||
@@ -258,11 +213,11 @@ pub async fn thread(pool: &PgPool, thread_id: String) -> Result<Thread, ServerEr
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Query {
|
||||
unread_only: bool,
|
||||
site: Option<String>,
|
||||
uid: Option<String>,
|
||||
remainder: Vec<String>,
|
||||
pub struct Query {
|
||||
pub unread_only: bool,
|
||||
pub site: Option<String>,
|
||||
pub uid: Option<String>,
|
||||
pub remainder: Vec<String>,
|
||||
}
|
||||
|
||||
impl FromStr for Query {
|
||||
@@ -278,6 +233,10 @@ impl FromStr for Query {
|
||||
unread_only = true
|
||||
} else if word.starts_with(&site_prefix) {
|
||||
site = Some(word[site_prefix.len()..].to_string())
|
||||
} else if word.starts_with("tag:") {
|
||||
// Any tag that doesn't match site_prefix should explicitly set the site to something not in the
|
||||
// database
|
||||
site = Some(NON_EXISTENT_SITE_NAME.to_string());
|
||||
} else if word.starts_with(THREAD_PREFIX) {
|
||||
uid = Some(word[THREAD_PREFIX.len()..].to_string())
|
||||
} else {
|
||||
|
||||
Reference in New Issue
Block a user