WIP
This commit is contained in:
parent
1c95f4391f
commit
b4a0aadef9
73 changed files with 2993 additions and 1632 deletions
|
@ -12,6 +12,7 @@ r2d2 = "0.8.10"
|
|||
r2d2_sqlite = "0.30.0"
|
||||
rusqlite = { version = "0.36.0", features = ["bundled", "time"] }
|
||||
serde = { workspace = true }
|
||||
snafu = { workspace = true }
|
||||
tempfile = "3.20.0"
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true }
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
//! Bundle all functions together.
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use r2d2::Pool;
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use tempfile::NamedTempFile;
|
||||
use tempfile::{NamedTempFile, PersistError};
|
||||
|
||||
use crate::{
|
||||
data::{
|
||||
author::Author, book::Book, error::DataStoreError, pagination::SortOrder, series::Series,
|
||||
},
|
||||
data::{self, author::Author, book::Book, pagination::SortOrder, series::Series},
|
||||
search::search,
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
/// Top level calibre functions, bundling all sub functions in one place and providing secure access to
|
||||
/// the database.
|
||||
|
@ -21,16 +23,195 @@ pub struct Calibre {
|
|||
search_db_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum LoadError {
|
||||
#[snafu(display("Failed to create database connection pool."))]
|
||||
DbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to create temporary database view."))]
|
||||
TmpDb { source: io::Error },
|
||||
#[snafu(display("Failed to keep temporary database from deletion."))]
|
||||
PersistTmpDb { source: PersistError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to search."))]
|
||||
pub struct SearchError {
|
||||
source: crate::search::SearchError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum BooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
BooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch multiple books."))]
|
||||
FetchBooks {
|
||||
source: data::book::MultipleBooksError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum AuthorsError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
AuthorsDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch multiple authors."))]
|
||||
FetchAuthors {
|
||||
source: data::author::MultipleAuthorsError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum AuthorBooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
AuthorBooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch multiple an author's books."))]
|
||||
FetchAuthorBooks {
|
||||
source: data::book::AuthorBooksError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum RecentBooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
RecentBooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch recent books."))]
|
||||
FetchRecentBooks {
|
||||
source: data::book::RecentBooksError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarBookError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
ScalarBookDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch book."))]
|
||||
FetchScalarBook { source: data::book::ScalarBookError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarAuthorError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
ScalarAuthorDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch author."))]
|
||||
FetchScalarAuthor {
|
||||
source: data::author::ScalarAuthorError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarSeriesError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
ScalarSeriesDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch series."))]
|
||||
FetchScalarSeries {
|
||||
source: data::series::ScalarSeriesError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum BookAuthorError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
BookAuthorDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch book's author."))]
|
||||
FetchBookAuthor {
|
||||
source: data::author::BookAuthorError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum MultipleSeriesError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
MultipleSeriesDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to fetch multiple series."))]
|
||||
FetchMultipleSeries {
|
||||
source: data::series::MultiplSeriesError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum BookSeriesError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
BookSeriesDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to get the series a book belongs to."))]
|
||||
FetchBookSeries {
|
||||
source: data::series::SeriesBooksError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum SeriesBooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
SeriesBooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to get a series' books."))]
|
||||
FetchSeriesBooks { source: data::book::SeriesBookError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasPreviousAuthorsError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasPreviousAuthorsDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous authors."))]
|
||||
FetchHasPreviousAuthors {
|
||||
source: data::author::PreviousAuthorsError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasMoreAuthorsError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasMoreAuthorsDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous authors."))]
|
||||
FetchHasMoreAuthors {
|
||||
source: data::author::MoreAuthorsError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasPreviousBooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasPreviousBooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous books."))]
|
||||
FetchHasPreviousBooks {
|
||||
source: data::book::PreviousBooksError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasMoreBooksError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasMoreBooksDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous books."))]
|
||||
FetchHasMoreBooks { source: data::book::MoreBooksError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasPreviousSeriesError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasPreviousSeriesDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous series."))]
|
||||
FetchHasPreviousSeries {
|
||||
source: data::series::PreviousSeriesError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasMoreSeriesError {
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
HasMoreSeriesDbPool { source: r2d2::Error },
|
||||
#[snafu(display("Failed to check if there are previous series."))]
|
||||
FetchHasMoreSeries {
|
||||
source: data::series::MoreSeriesError,
|
||||
},
|
||||
}
|
||||
impl Calibre {
|
||||
/// Open a connection to the calibre database.
|
||||
///
|
||||
/// Fail if the database file can not be opened or not be found.
|
||||
pub fn load(path: &Path) -> Result<Self, DataStoreError> {
|
||||
pub fn load(path: &Path) -> Result<Self, LoadError> {
|
||||
let manager = SqliteConnectionManager::file(path);
|
||||
let pool = r2d2::Pool::new(manager)?;
|
||||
let pool = r2d2::Pool::new(manager).context(DbPoolSnafu)?;
|
||||
|
||||
let tmpfile = NamedTempFile::new()?;
|
||||
let (_, search_db_path) = tmpfile.keep()?;
|
||||
let tmpfile = NamedTempFile::new().context(TmpDbSnafu)?;
|
||||
let (_, search_db_path) = tmpfile.keep().context(PersistTmpDbSnafu)?;
|
||||
|
||||
Ok(Self {
|
||||
pool,
|
||||
|
@ -41,8 +222,8 @@ impl Calibre {
|
|||
/// Full text search with a query.
|
||||
///
|
||||
/// See https://www.sqlite.org/fts5.html#full_text_query_syntax for syntax.
|
||||
pub fn search(&self, query: &str) -> Result<Vec<Book>, DataStoreError> {
|
||||
search(query, &self.pool, &self.search_db_path)
|
||||
pub fn search(&self, query: &str) -> Result<Vec<Book>, SearchError> {
|
||||
search(query, &self.pool, &self.search_db_path).context(SearchSnafu)
|
||||
}
|
||||
|
||||
/// Fetch book data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -52,9 +233,9 @@ impl Calibre {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::multiple(&conn, limit, cursor, sort_order)
|
||||
) -> Result<Vec<Book>, BooksError> {
|
||||
let conn = self.pool.get().context(BooksDbPoolSnafu)?;
|
||||
Book::multiple(&conn, limit, cursor, sort_order).context(FetchBooksSnafu)
|
||||
}
|
||||
|
||||
/// Fetch author data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -64,9 +245,9 @@ impl Calibre {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Author>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Author::multiple(&conn, limit, cursor, sort_order)
|
||||
) -> Result<Vec<Author>, AuthorsError> {
|
||||
let conn = self.pool.get().context(AuthorsDbPoolSnafu)?;
|
||||
Author::multiple(&conn, limit, cursor, sort_order).context(FetchAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
|
||||
|
@ -77,27 +258,28 @@ impl Calibre {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
) -> Result<Vec<Book>, AuthorBooksError> {
|
||||
let conn = self.pool.get().context(AuthorBooksDbPoolSnafu)?;
|
||||
Book::author_books(&conn, author_id, limit, cursor, sort_order)
|
||||
.context(FetchAuthorBooksSnafu)
|
||||
}
|
||||
|
||||
/// Get recent books up to a limit of `limit`.
|
||||
pub fn recent_books(&self, limit: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::recents(&conn, limit)
|
||||
pub fn recent_books(&self, limit: u64) -> Result<Vec<Book>, RecentBooksError> {
|
||||
let conn = self.pool.get().context(RecentBooksDbPoolSnafu)?;
|
||||
Book::recents(&conn, limit).context(FetchRecentBooksSnafu)
|
||||
}
|
||||
|
||||
/// Get a single book, specified `id`.
|
||||
pub fn scalar_book(&self, id: u64) -> Result<Book, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::scalar_book(&conn, id)
|
||||
pub fn scalar_book(&self, id: u64) -> Result<Book, ScalarBookError> {
|
||||
let conn = self.pool.get().context(ScalarBookDbPoolSnafu)?;
|
||||
Book::scalar_book(&conn, id).context(FetchScalarBookSnafu)
|
||||
}
|
||||
|
||||
/// Get the author to a book with id `id`.
|
||||
pub fn book_author(&self, id: u64) -> Result<Author, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Author::book_author(&conn, id)
|
||||
pub fn book_author(&self, id: u64) -> Result<Author, BookAuthorError> {
|
||||
let conn = self.pool.get().context(BookAuthorDbPoolSnafu)?;
|
||||
Author::book_author(&conn, id).context(FetchBookAuthorSnafu)
|
||||
}
|
||||
|
||||
/// Fetch series data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -107,69 +289,69 @@ impl Calibre {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Series>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Series::multiple(&conn, limit, cursor, sort_order)
|
||||
) -> Result<Vec<Series>, MultipleSeriesError> {
|
||||
let conn = self.pool.get().context(MultipleSeriesDbPoolSnafu)?;
|
||||
Series::multiple(&conn, limit, cursor, sort_order).context(FetchMultipleSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Get the series a book with id `id` is in, as well as the book's position within the series.
|
||||
pub fn book_series(&self, id: u64) -> Result<Option<(Series, f64)>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Series::book_series(&conn, id)
|
||||
pub fn book_series(&self, id: u64) -> Result<Option<(Series, f64)>, BookSeriesError> {
|
||||
let conn = self.pool.get().context(BookSeriesDbPoolSnafu)?;
|
||||
Series::book_series(&conn, id).context(FetchBookSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Get all books belonging to the series with id `id`.
|
||||
pub fn series_books(&self, id: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::series_books(&conn, id)
|
||||
pub fn series_books(&self, id: u64) -> Result<Vec<Book>, SeriesBooksError> {
|
||||
let conn = self.pool.get().context(SeriesBooksDbPoolSnafu)?;
|
||||
Book::series_books(&conn, id).context(FetchSeriesBooksSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more authors before the specified cursor.
|
||||
pub fn has_previous_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Author::has_previous_authors(&conn, author_sort)
|
||||
pub fn has_previous_authors(&self, author_sort: &str) -> Result<bool, HasPreviousAuthorsError> {
|
||||
let conn = self.pool.get().context(HasPreviousAuthorsDbPoolSnafu)?;
|
||||
Author::has_previous_authors(&conn, author_sort).context(FetchHasPreviousAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more authors after the specified cursor.
|
||||
pub fn has_more_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Author::has_more_authors(&conn, author_sort)
|
||||
pub fn has_more_authors(&self, author_sort: &str) -> Result<bool, HasMoreAuthorsError> {
|
||||
let conn = self.pool.get().context(HasMoreAuthorsDbPoolSnafu)?;
|
||||
Author::has_more_authors(&conn, author_sort).context(FetchHasMoreAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more books before the specified cursor.
|
||||
pub fn has_previous_books(&self, book_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::has_previous_books(&conn, book_sort)
|
||||
pub fn has_previous_books(&self, book_sort: &str) -> Result<bool, HasPreviousBooksError> {
|
||||
let conn = self.pool.get().context(HasPreviousBooksDbPoolSnafu)?;
|
||||
Book::has_previous_books(&conn, book_sort).context(FetchHasPreviousBooksSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more books after the specified cursor.
|
||||
pub fn has_more_books(&self, book_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Book::has_more_books(&conn, book_sort)
|
||||
pub fn has_more_books(&self, book_sort: &str) -> Result<bool, HasMoreBooksError> {
|
||||
let conn = self.pool.get().context(HasMoreBooksDbPoolSnafu)?;
|
||||
Book::has_more_books(&conn, book_sort).context(FetchHasMoreBooksSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more series before the specified cursor.
|
||||
pub fn has_previous_series(&self, series_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Series::has_previous_series(&conn, series_sort)
|
||||
pub fn has_previous_series(&self, series_sort: &str) -> Result<bool, HasPreviousSeriesError> {
|
||||
let conn = self.pool.get().context(HasPreviousSeriesDbPoolSnafu)?;
|
||||
Series::has_previous_series(&conn, series_sort).context(FetchHasPreviousSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more series after the specified cursor.
|
||||
pub fn has_more_series(&self, series_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Series::has_more_series(&conn, series_sort)
|
||||
pub fn has_more_series(&self, series_sort: &str) -> Result<bool, HasMoreSeriesError> {
|
||||
let conn = self.pool.get().context(HasMoreSeriesDbPoolSnafu)?;
|
||||
Series::has_more_series(&conn, series_sort).context(FetchHasMoreSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Fetch a single author with id `id`.
|
||||
pub fn scalar_author(&self, id: u64) -> Result<Author, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Author::scalar_author(&conn, id)
|
||||
pub fn scalar_author(&self, id: u64) -> Result<Author, ScalarAuthorError> {
|
||||
let conn = self.pool.get().context(ScalarAuthorDbPoolSnafu)?;
|
||||
Author::scalar_author(&conn, id).context(FetchScalarAuthorSnafu)
|
||||
}
|
||||
|
||||
/// Fetch a single series with id `id`.
|
||||
pub fn scalar_series(&self, id: u64) -> Result<Series, DataStoreError> {
|
||||
let conn = self.pool.get()?;
|
||||
Series::scalar_series(&conn, id)
|
||||
pub fn scalar_series(&self, id: u64) -> Result<Series, ScalarSeriesError> {
|
||||
let conn = self.pool.get().context(ScalarSeriesDbPoolSnafu)?;
|
||||
Series::scalar_series(&conn, id).context(FetchScalarSeriesSnafu)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
//! Author data.
|
||||
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
/// Author in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -19,6 +16,40 @@ pub struct Author {
|
|||
pub sort: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple authors."))]
|
||||
pub struct MultipleAuthorsError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum BookAuthorError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareBookAuthor { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteBookAuthor { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarAuthorError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarAuthor { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarAuthor { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous authors."))]
|
||||
pub struct PreviousAuthorsError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more authors."))]
|
||||
pub struct MoreAuthorsError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Author {
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
|
@ -35,44 +66,54 @@ impl Author {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
) -> Result<Vec<Self>, MultipleAuthorsError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM authors",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
pagination
|
||||
.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM authors",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
.context(MultipleAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Get the author to a book with id `id`.
|
||||
pub fn book_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT authors.id, authors.name, authors.sort FROM authors \
|
||||
pub fn book_author(conn: &Connection, id: u64) -> Result<Self, BookAuthorError> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT authors.id, authors.name, authors.sort FROM authors \
|
||||
INNER JOIN books_authors_link ON authors.id = books_authors_link.author \
|
||||
WHERE books_authors_link.book = (:id)",
|
||||
)?;
|
||||
)
|
||||
.context(PrepareBookAuthorSnafu)?;
|
||||
let params = named_params! { ":id": id };
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteBookAuthorSnafu)
|
||||
}
|
||||
|
||||
/// Fetch a single author with id `id`.
|
||||
pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")?;
|
||||
pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, ScalarAuthorError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")
|
||||
.context(PrepareScalarAuthorSnafu)?;
|
||||
let params = named_params! { ":id": id };
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarAuthorSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more authors before the specified cursor.
|
||||
pub fn has_previous_authors(
|
||||
conn: &Connection,
|
||||
sort_name: &str,
|
||||
) -> Result<bool, DataStoreError> {
|
||||
) -> Result<bool, PreviousAuthorsError> {
|
||||
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::DESC)
|
||||
.context(PreviousAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more authors after the specified cursor.
|
||||
pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, MoreAuthorsError> {
|
||||
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::ASC)
|
||||
.context(MoreAuthorsSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
//! Book data.
|
||||
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use serde::Serialize;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
/// Book in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -28,6 +26,54 @@ pub struct Book {
|
|||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple books."))]
|
||||
pub struct MultipleBooksError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch author's books."))]
|
||||
pub struct AuthorBooksError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum SeriesBookError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSeriesBook { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSeriesBook { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum RecentBooksError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareRecentBooks { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteRecentBooks { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarBookError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarBook { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarBook { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous books."))]
|
||||
pub struct PreviousBooksError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more books."))]
|
||||
pub struct MoreBooksError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Book {
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
|
@ -48,7 +94,7 @@ impl Book {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
) -> Result<Vec<Self>, MultipleBooksError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
|
@ -56,7 +102,7 @@ impl Book {
|
|||
FROM books LEFT JOIN comments ON books.id = comments.book",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
).context(MultipleBooksSnafu)
|
||||
}
|
||||
|
||||
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
|
||||
|
@ -67,7 +113,7 @@ impl Book {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
) -> Result<Vec<Self>, AuthorBooksError> {
|
||||
let pagination = Pagination::new("books.sort", cursor, limit, sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
|
@ -77,11 +123,11 @@ impl Book {
|
|||
WHERE books_authors_link.author = (:author_id) AND",
|
||||
&[(":author_id", &author_id)],
|
||||
Self::from_row,
|
||||
)
|
||||
).context(AuthorBooksSnafu)
|
||||
}
|
||||
|
||||
/// Get all books belonging to the series with id `id`.
|
||||
pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, SeriesBookError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text FROM series \
|
||||
INNER JOIN books_series_link ON series.id = books_series_link.series \
|
||||
|
@ -89,40 +135,50 @@ impl Book {
|
|||
LEFT JOIN comments ON books.id = comments.book \
|
||||
WHERE books_series_link.series = (:id) \
|
||||
ORDER BY books.series_index",
|
||||
)?;
|
||||
).context(PrepareSeriesBookSnafu)?;
|
||||
let params = named_params! { ":id": id };
|
||||
let iter = stmt.query_map(params, Self::from_row)?;
|
||||
let iter = stmt
|
||||
.query_map(params, Self::from_row)
|
||||
.context(ExecuteSeriesBookSnafu)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
|
||||
/// Get recent books up to a limit of `limit`.
|
||||
pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, DataStoreError> {
|
||||
pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, RecentBooksError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
|
||||
FROM books LEFT JOIN comments ON books.id = comments.book ORDER BY books.timestamp DESC LIMIT (:limit)"
|
||||
)?;
|
||||
).context(PrepareRecentBooksSnafu)?;
|
||||
let params = named_params! { ":limit": limit };
|
||||
let iter = stmt.query_map(params, Self::from_row)?;
|
||||
let iter = stmt
|
||||
.query_map(params, Self::from_row)
|
||||
.context(ExecuteRecentBooksSnafu)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
|
||||
/// Get a single book, specified `id`.
|
||||
pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, ScalarBookError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
|
||||
FROM books LEFT JOIN comments WHERE books.id = (:id)",
|
||||
)?;
|
||||
).context(PrepareScalarBookSnafu)?;
|
||||
let params = named_params! { ":id": id };
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarBookSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more books before the specified cursor.
|
||||
pub fn has_previous_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> {
|
||||
pub fn has_previous_books(
|
||||
conn: &Connection,
|
||||
sort_title: &str,
|
||||
) -> Result<bool, PreviousBooksError> {
|
||||
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::DESC)
|
||||
.context(PreviousBooksSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more books after the specified cursor.
|
||||
pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> {
|
||||
pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, MoreBooksError> {
|
||||
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::ASC)
|
||||
.context(MoreBooksSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,42 +1 @@
|
|||
//! Error handling for calibre database access.
|
||||
|
||||
use std::io;
|
||||
|
||||
use tempfile::PersistError;
|
||||
use thiserror::Error;
|
||||
use time::error::Parse;
|
||||
|
||||
/// Errors from accessing the calibre database.
|
||||
#[derive(Error, Debug)]
|
||||
#[error("data store error")]
|
||||
pub enum DataStoreError {
|
||||
/// Found no entries for the query.
|
||||
#[error("no results")]
|
||||
NoResults(rusqlite::Error),
|
||||
/// Error with SQLite.
|
||||
#[error("sqlite error")]
|
||||
SqliteError(rusqlite::Error),
|
||||
/// Error connecting to the database.
|
||||
#[error("connection error")]
|
||||
ConnectionError(#[from] r2d2::Error),
|
||||
/// Error parsing a datetime from the database.
|
||||
#[error("failed to parse datetime")]
|
||||
DateTimeError(#[from] Parse),
|
||||
/// Error creating the search database.
|
||||
#[error("failed to create search database")]
|
||||
SearchDbError(#[from] io::Error),
|
||||
/// Error marking the search database as persistent.
|
||||
#[error("failed to persist search database")]
|
||||
PersistSearchDbError(#[from] PersistError),
|
||||
}
|
||||
|
||||
/// Convert an SQLite error into a proper NoResults one if the query
|
||||
/// returned no rows, return others as is.
|
||||
impl From<rusqlite::Error> for DataStoreError {
|
||||
fn from(error: rusqlite::Error) -> Self {
|
||||
match error {
|
||||
rusqlite::Error::QueryReturnedNoRows => DataStoreError::NoResults(error),
|
||||
_ => DataStoreError::SqliteError(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
//! Cursor pagination handling.
|
||||
|
||||
use rusqlite::{named_params, Connection, Row, ToSql};
|
||||
use rusqlite::{Connection, Row, ToSql, named_params};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::error::DataStoreError;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
/// How to sort query results. Signifying whether we are paginating forwards or backwards.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
|
||||
|
@ -26,6 +25,22 @@ pub struct Pagination<'a> {
|
|||
pub sort_order: SortOrder,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HasPrevOrMoreError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareHasPrevOrMore { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteHasPrevOrMore { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum PaginationError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PreparePagination { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecutePagination { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
impl<'a> Pagination<'a> {
|
||||
/// Create a new pagination.
|
||||
pub fn new(
|
||||
|
@ -57,14 +72,16 @@ impl<'a> Pagination<'a> {
|
|||
table: &str,
|
||||
sort: &str,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<bool, DataStoreError> {
|
||||
) -> Result<bool, HasPrevOrMoreError> {
|
||||
let comparison = Pagination::sort_order_to_sql(sort_order);
|
||||
|
||||
let mut stmt = conn.prepare(&format!(
|
||||
"SELECT Count(1) FROM {table} WHERE sort {comparison} (:sort) ORDER BY sort {sort_order:?}"
|
||||
))?;
|
||||
)).context(PrepareHasPrevOrMoreSnafu)?;
|
||||
let params = named_params! { ":sort": sort};
|
||||
let count: u64 = stmt.query_row(params, |x| x.get(0))?;
|
||||
let count: u64 = stmt
|
||||
.query_row(params, |x| x.get(0))
|
||||
.context(ExecuteHasPrevOrMoreSnafu)?;
|
||||
|
||||
Ok(count > 0)
|
||||
}
|
||||
|
@ -76,7 +93,7 @@ impl<'a> Pagination<'a> {
|
|||
statement: &str,
|
||||
params: &[(&str, &dyn ToSql)],
|
||||
processor: F,
|
||||
) -> Result<Vec<T>, DataStoreError>
|
||||
) -> Result<Vec<T>, PaginationError>
|
||||
where
|
||||
F: FnMut(&Row<'_>) -> Result<T, rusqlite::Error>,
|
||||
{
|
||||
|
@ -102,7 +119,7 @@ impl<'a> Pagination<'a> {
|
|||
// DANGER: vulnerable to SQL injection if statement or sort_col variable is influenced by user input
|
||||
let mut stmt = conn.prepare(&format!(
|
||||
"SELECT * FROM ({statement} {where_sql} {sort_col} {comparison} (:cursor) ORDER BY {sort_col} {sort_order:?} LIMIT (:limit)) AS t ORDER BY {sort_col_wrapped} ASC"
|
||||
))?;
|
||||
)).context(PreparePaginationSnafu)?;
|
||||
let params = [
|
||||
&[
|
||||
(":cursor", &cursor as &dyn ToSql),
|
||||
|
@ -111,7 +128,9 @@ impl<'a> Pagination<'a> {
|
|||
params,
|
||||
]
|
||||
.concat();
|
||||
let iter = stmt.query_map(params.as_slice(), processor)?;
|
||||
let iter = stmt
|
||||
.query_map(params.as_slice(), processor)
|
||||
.context(ExecutePaginationSnafu)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
//! Series data.
|
||||
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
/// Series in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -19,6 +17,40 @@ pub struct Series {
|
|||
pub sort: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple series."))]
|
||||
pub struct MultiplSeriesError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum SeriesBooksError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSeriesBooks { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSeriesBooks { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ScalarSeriesError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarSeries { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarSeries { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous series."))]
|
||||
pub struct PreviousSeriesError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more series."))]
|
||||
pub struct MoreSeriesError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Series {
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
|
@ -35,34 +67,41 @@ impl Series {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
) -> Result<Vec<Self>, MultiplSeriesError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM series",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
pagination
|
||||
.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM series",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
.context(MultiplSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Fetch a single series with id `id`.
|
||||
pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")?;
|
||||
pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, ScalarSeriesError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")
|
||||
.context(PrepareScalarSeriesSnafu)?;
|
||||
let params = named_params! { ":id": id };
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Get the series a book with id `id` is in, as well as the book's position within the series.
|
||||
pub fn book_series(
|
||||
conn: &Connection,
|
||||
book_id: u64,
|
||||
) -> Result<Option<(Self, f64)>, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT series.id, series.name, series.sort, books.series_index FROM series \
|
||||
) -> Result<Option<(Self, f64)>, SeriesBooksError> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT series.id, series.name, series.sort, books.series_index FROM series \
|
||||
INNER JOIN books_series_link ON series.id = books_series_link.series \
|
||||
INNER JOIN books ON books.id = books_series_link.book \
|
||||
WHERE books_series_link.book = (:id)",
|
||||
)?;
|
||||
)
|
||||
.context(PrepareSeriesBooksSnafu)?;
|
||||
let params = named_params! { ":id": book_id };
|
||||
|
||||
let from_row = |row: &Row<'_>| {
|
||||
|
@ -74,17 +113,22 @@ impl Series {
|
|||
match stmt.query_row(params, from_row) {
|
||||
Ok(series) => Ok(Some(series)),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
||||
Err(e) => Err(DataStoreError::SqliteError(e)),
|
||||
Err(e) => Err(e).context(ExecuteSeriesBooksSnafu),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if there are more series before the specified cursor.
|
||||
pub fn has_previous_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
pub fn has_previous_series(
|
||||
conn: &Connection,
|
||||
sort_name: &str,
|
||||
) -> Result<bool, PreviousSeriesError> {
|
||||
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::DESC)
|
||||
.context(PreviousSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more series after the specified cursor.
|
||||
pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, MoreSeriesError> {
|
||||
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::ASC)
|
||||
.context(MoreSeriesSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,8 +10,9 @@ use std::path::Path;
|
|||
use r2d2::{Pool, PooledConnection};
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use rusqlite::named_params;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::data::{book::Book, error::DataStoreError};
|
||||
use crate::data::book::Book;
|
||||
|
||||
/// A lot of joins but only run once at startup.
|
||||
const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
|
||||
|
@ -33,20 +34,61 @@ const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
|
|||
LEFT JOIN main.series AS s ON b2s.series = s.id
|
||||
GROUP BY b.id";
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum EnsureSearchDbError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareEnsureSearch { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteEnsureSearch { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to attach database."))]
|
||||
Attach { source: AttachError },
|
||||
#[snafu(display("Failed to initialize database."))]
|
||||
Init { source: InitError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum AttachError {
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteAttach { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum InitError {
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareInit { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteInit { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum SearchError {
|
||||
#[snafu(display("Failed ensure the search db is initialized."))]
|
||||
EnsureDb { source: EnsureSearchDbError },
|
||||
#[snafu(display("Failed to get connection from pool."))]
|
||||
Connection { source: r2d2::Error },
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSearch { source: rusqlite::Error },
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSearch { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
/// Ensure the search database is attached to the connection and
|
||||
/// initializes the data if needed.
|
||||
fn ensure_search_db(
|
||||
conn: &PooledConnection<SqliteConnectionManager>,
|
||||
db_path: &Path,
|
||||
) -> Result<(), DataStoreError> {
|
||||
let mut stmt =
|
||||
conn.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0))?;
|
||||
) -> Result<(), EnsureSearchDbError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")
|
||||
.context(PrepareEnsureSearchSnafu)?;
|
||||
let count: u64 = stmt
|
||||
.query_row([], |x| x.get(0))
|
||||
.context(ExecuteEnsureSearchSnafu)?;
|
||||
let need_attachment = count == 0;
|
||||
|
||||
if need_attachment {
|
||||
attach(conn, db_path)?;
|
||||
init(conn)?;
|
||||
attach(conn, db_path).context(AttachSnafu)?;
|
||||
init(conn).context(InitSnafu)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -56,29 +98,32 @@ fn ensure_search_db(
|
|||
fn attach(
|
||||
conn: &PooledConnection<SqliteConnectionManager>,
|
||||
db_path: &Path,
|
||||
) -> Result<(), DataStoreError> {
|
||||
) -> Result<(), AttachError> {
|
||||
conn.execute(
|
||||
&format!("ATTACH DATABASE '{}' AS search", db_path.to_string_lossy()),
|
||||
[],
|
||||
)?;
|
||||
init(conn)?;
|
||||
)
|
||||
.context(ExecuteAttachSnafu)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Initialise the fts virtual table.
|
||||
fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), DataStoreError> {
|
||||
fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), InitError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0))?;
|
||||
.prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")
|
||||
.context(PrepareInitSnafu)?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0)).context(ExecuteInitSnafu)?;
|
||||
let need_init = count == 0;
|
||||
|
||||
if need_init {
|
||||
conn.execute(
|
||||
"CREATE VIRTUAL TABLE search.fts USING fts5(book_id, data)",
|
||||
[],
|
||||
)?;
|
||||
conn.execute(SEARCH_INIT_QUERY, [])?;
|
||||
)
|
||||
.context(ExecuteInitSnafu)?;
|
||||
conn.execute(SEARCH_INIT_QUERY, [])
|
||||
.context(ExecuteInitSnafu)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -89,15 +134,17 @@ pub(crate) fn search(
|
|||
query: &str,
|
||||
pool: &Pool<SqliteConnectionManager>,
|
||||
search_db_path: &Path,
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = pool.get()?;
|
||||
ensure_search_db(&conn, search_db_path)?;
|
||||
) -> Result<Vec<Book>, SearchError> {
|
||||
let conn = pool.get().context(ConnectionSnafu)?;
|
||||
ensure_search_db(&conn, search_db_path).context(EnsureDbSnafu)?;
|
||||
|
||||
let mut stmt =
|
||||
conn.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")?;
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")
|
||||
.context(PrepareSearchSnafu)?;
|
||||
let params = named_params! { ":query": query };
|
||||
let books = stmt
|
||||
.query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })?
|
||||
.query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })
|
||||
.context(ExecuteSearchSnafu)?
|
||||
.filter_map(Result::ok)
|
||||
.filter_map(|id| Book::scalar_book(&conn, id).ok())
|
||||
.collect();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue