WIP add search
This commit is contained in:
parent
0bf865fdef
commit
dd09bc3168
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -3378,6 +3378,7 @@ dependencies = [
|
||||
"smallvec",
|
||||
"sqlformat",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
@ -3461,6 +3462,7 @@ dependencies = [
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tracing",
|
||||
"whoami",
|
||||
]
|
||||
@ -3499,6 +3501,7 @@ dependencies = [
|
||||
"sqlx-core",
|
||||
"stringprep",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tracing",
|
||||
"whoami",
|
||||
]
|
||||
@ -3521,6 +3524,7 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"sqlx-core",
|
||||
"time",
|
||||
"tracing",
|
||||
"url",
|
||||
"urlencoding",
|
||||
|
||||
@ -28,5 +28,5 @@ css-inline = "0.13.0"
|
||||
anyhow = "1.0.79"
|
||||
maplit = "1.0.2"
|
||||
linkify = "0.10.0"
|
||||
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio"] }
|
||||
sqlx = { version = "0.7.4", features = ["postgres", "runtime-tokio", "time"] }
|
||||
|
||||
|
||||
10
server/sql/threads.sql
Normal file
10
server/sql/threads.sql
Normal file
@ -0,0 +1,10 @@
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
post
|
||||
WHERE
|
||||
site = $1
|
||||
ORDER BY
|
||||
date DESC
|
||||
LIMIT
|
||||
10
|
||||
@ -201,58 +201,16 @@ impl QueryRoot {
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, Error> {
|
||||
info!("search({after:?} {before:?} {first:?} {last:?} {query:?})");
|
||||
let nm = ctx.data_unchecked::<Notmuch>();
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
let total = nm.count(&query)?;
|
||||
let (first, last) = if let (None, None) = (first, last) {
|
||||
info!("neither first nor last set, defaulting first to 20");
|
||||
(Some(20), None)
|
||||
} else {
|
||||
(first, last)
|
||||
};
|
||||
let pool = ctx.data_unchecked::<PgPool>();
|
||||
|
||||
let mut start = after.map(|after| after + 1).unwrap_or(0);
|
||||
let mut end = before.unwrap_or(total);
|
||||
if let Some(first) = first {
|
||||
end = (start + first).min(end);
|
||||
}
|
||||
if let Some(last) = last {
|
||||
start = if last > end - start { end } else { end - last };
|
||||
}
|
||||
|
||||
let count = end - start;
|
||||
let slice: Vec<ThreadSummary> = nm
|
||||
.search(&query, start, count)?
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|ts| ThreadSummary {
|
||||
thread: ts.thread,
|
||||
timestamp: ts.timestamp,
|
||||
date_relative: ts.date_relative,
|
||||
matched: ts.matched,
|
||||
total: ts.total,
|
||||
authors: ts.authors,
|
||||
subject: ts.subject,
|
||||
tags: ts.tags,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut connection = Connection::new(start > 0, end < total);
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
// TODO: make this search both copra and merge results
|
||||
if newsreader::is_newsreader_search(&query) {
|
||||
Ok(newsreader::search(pool, after, before, first, last, query).await?)
|
||||
} else {
|
||||
Ok(nm::search(nm, after, before, first, last, query).await?)
|
||||
}
|
||||
}
|
||||
|
||||
async fn tags<'ctx>(&self, ctx: &Context<'ctx>) -> FieldResult<Vec<Tag>> {
|
||||
|
||||
@ -1,15 +1,92 @@
|
||||
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::info;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
const TAG_PREFIX: &'static str = "News";
|
||||
const TAG_PREFIX: &'static str = "News/";
|
||||
|
||||
use crate::{
|
||||
error,
|
||||
graphql::{Tag, ThreadSummary},
|
||||
};
|
||||
|
||||
pub fn is_newsreader_search(query: &str) -> bool {
|
||||
query.contains(TAG_PREFIX)
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
pool: &PgPool,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
let mut unread_only = false;
|
||||
let mut site = None;
|
||||
let site_prefix = format!("tag:{TAG_PREFIX}");
|
||||
for word in query.split_whitespace() {
|
||||
if word == "is:unread" {
|
||||
unread_only = true
|
||||
};
|
||||
if word.starts_with(&site_prefix) {
|
||||
site = Some(word[site_prefix.len()..].to_string())
|
||||
}
|
||||
}
|
||||
let site = site.expect("search has no site");
|
||||
info!("news search unread_only {unread_only} site {site:?}");
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
// TODO: handle `unread_only`
|
||||
let rows = sqlx::query_file!("sql/threads.sql", site)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let slice = rows.into_iter().map(|r| {
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
ThreadSummary {
|
||||
thread: format!("news:{}", r.uid),
|
||||
timestamp: r
|
||||
.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp() as isize,
|
||||
date_relative: "TODO date_relative".to_string(),
|
||||
matched: 0,
|
||||
total: 1,
|
||||
// TODO: join with feed table and get pretty name
|
||||
authors: site.clone(),
|
||||
subject: r.title.unwrap_or("NO TITLE".to_string()),
|
||||
tags,
|
||||
}
|
||||
});
|
||||
let mut connection = Connection::new(false, false);
|
||||
// TODO
|
||||
let start = 0;
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
use crate::{error, graphql::Tag};
|
||||
pub async fn tags(pool: &PgPool, needs_unread: bool) -> Result<Vec<Tag>, error::ServerError> {
|
||||
// TODO: write separate query for needs_unread.
|
||||
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||
info!("sqlx tags {tags:#?}");
|
||||
let tags = tags
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
@ -17,7 +94,7 @@ pub async fn tags(pool: &PgPool, needs_unread: bool) -> Result<Vec<Tag>, error::
|
||||
tag.site.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||
let name = format!("{TAG_PREFIX}/{}", tag.site.expect("tag must have site"));
|
||||
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
|
||||
Tag {
|
||||
name,
|
||||
fg_color: "white".to_string(),
|
||||
|
||||
@ -4,11 +4,15 @@ use std::{
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::info;
|
||||
use notmuch::Notmuch;
|
||||
use shared::Message;
|
||||
|
||||
use crate::{error, graphql::Tag};
|
||||
use crate::{
|
||||
error,
|
||||
graphql::{Tag, ThreadSummary},
|
||||
};
|
||||
|
||||
// TODO(wathiede): decide good error type
|
||||
pub fn threadset_to_messages(
|
||||
@ -20,6 +24,67 @@ pub fn threadset_to_messages(
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
nm: &Notmuch,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
let total = nm.count(&query)?;
|
||||
let (first, last) = if let (None, None) = (first, last) {
|
||||
info!("neither first nor last set, defaulting first to 20");
|
||||
(Some(20), None)
|
||||
} else {
|
||||
(first, last)
|
||||
};
|
||||
|
||||
let mut start = after.map(|after| after + 1).unwrap_or(0);
|
||||
let mut end = before.unwrap_or(total);
|
||||
if let Some(first) = first {
|
||||
end = (start + first).min(end);
|
||||
}
|
||||
if let Some(last) = last {
|
||||
start = if last > end - start { end } else { end - last };
|
||||
}
|
||||
|
||||
let count = end - start;
|
||||
let slice: Vec<ThreadSummary> = nm
|
||||
.search(&query, start, count)?
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|ts| ThreadSummary {
|
||||
thread: ts.thread,
|
||||
timestamp: ts.timestamp,
|
||||
date_relative: ts.date_relative,
|
||||
matched: ts.matched,
|
||||
total: ts.total,
|
||||
authors: ts.authors,
|
||||
subject: ts.subject,
|
||||
tags: ts.tags,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut connection = Connection::new(start > 0, end < total);
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn tags(nm: &Notmuch, needs_unread: bool) -> Result<Vec<Tag>, error::ServerError> {
|
||||
let now = Instant::now();
|
||||
let unread_msg_cnt: HashMap<String, usize> = if needs_unread {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user