WIP add search
This commit is contained in:
@@ -1,15 +1,92 @@
|
||||
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||
|
||||
use async_graphql::connection::{self, Connection, Edge};
|
||||
use log::info;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
const TAG_PREFIX: &'static str = "News";
|
||||
const TAG_PREFIX: &'static str = "News/";
|
||||
|
||||
use crate::{
|
||||
error,
|
||||
graphql::{Tag, ThreadSummary},
|
||||
};
|
||||
|
||||
pub fn is_newsreader_search(query: &str) -> bool {
|
||||
query.contains(TAG_PREFIX)
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
pool: &PgPool,
|
||||
after: Option<String>,
|
||||
before: Option<String>,
|
||||
first: Option<i32>,
|
||||
last: Option<i32>,
|
||||
query: String,
|
||||
) -> Result<Connection<usize, ThreadSummary>, async_graphql::Error> {
|
||||
let mut unread_only = false;
|
||||
let mut site = None;
|
||||
let site_prefix = format!("tag:{TAG_PREFIX}");
|
||||
for word in query.split_whitespace() {
|
||||
if word == "is:unread" {
|
||||
unread_only = true
|
||||
};
|
||||
if word.starts_with(&site_prefix) {
|
||||
site = Some(word[site_prefix.len()..].to_string())
|
||||
}
|
||||
}
|
||||
let site = site.expect("search has no site");
|
||||
info!("news search unread_only {unread_only} site {site:?}");
|
||||
connection::query(
|
||||
after,
|
||||
before,
|
||||
first,
|
||||
last,
|
||||
|after, before, first, last| async move {
|
||||
// TODO: handle `unread_only`
|
||||
let rows = sqlx::query_file!("sql/threads.sql", site)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let slice = rows.into_iter().map(|r| {
|
||||
let tags = if r.is_read.unwrap_or(false) {
|
||||
vec![site.clone()]
|
||||
} else {
|
||||
vec!["unread".to_string(), site.clone()]
|
||||
};
|
||||
ThreadSummary {
|
||||
thread: format!("news:{}", r.uid),
|
||||
timestamp: r
|
||||
.date
|
||||
.expect("post missing date")
|
||||
.assume_utc()
|
||||
.unix_timestamp() as isize,
|
||||
date_relative: "TODO date_relative".to_string(),
|
||||
matched: 0,
|
||||
total: 1,
|
||||
// TODO: join with feed table and get pretty name
|
||||
authors: site.clone(),
|
||||
subject: r.title.unwrap_or("NO TITLE".to_string()),
|
||||
tags,
|
||||
}
|
||||
});
|
||||
let mut connection = Connection::new(false, false);
|
||||
// TODO
|
||||
let start = 0;
|
||||
connection.edges.extend(
|
||||
slice
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, item)| Edge::new(start + idx, item)),
|
||||
);
|
||||
Ok::<_, async_graphql::Error>(connection)
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
use crate::{error, graphql::Tag};
|
||||
pub async fn tags(pool: &PgPool, needs_unread: bool) -> Result<Vec<Tag>, error::ServerError> {
|
||||
// TODO: write separate query for needs_unread.
|
||||
let tags = sqlx::query_file!("sql/tags.sql").fetch_all(pool).await?;
|
||||
info!("sqlx tags {tags:#?}");
|
||||
let tags = tags
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
@@ -17,7 +94,7 @@ pub async fn tags(pool: &PgPool, needs_unread: bool) -> Result<Vec<Tag>, error::
|
||||
tag.site.hash(&mut hasher);
|
||||
let hex = format!("#{:06x}", hasher.finish() % (1 << 24));
|
||||
let unread = tag.unread.unwrap_or(0).try_into().unwrap_or(0);
|
||||
let name = format!("{TAG_PREFIX}/{}", tag.site.expect("tag must have site"));
|
||||
let name = format!("{TAG_PREFIX}{}", tag.site.expect("tag must have site"));
|
||||
Tag {
|
||||
name,
|
||||
fg_color: "white".to_string(),
|
||||
|
||||
Reference in New Issue
Block a user