From b4a0aadef9ecaa23c4374390fd225ea6e51c2cf0 Mon Sep 17 00:00:00 2001 From: Sebastian Hugentobler Date: Wed, 2 Jul 2025 21:09:37 +0200 Subject: [PATCH] WIP --- Cargo.lock | 664 ++++++++++++++---- Cargo.toml | 1 + calibre-db/Cargo.toml | 1 + calibre-db/src/calibre.rs | 304 ++++++-- calibre-db/src/data/author.rs | 87 ++- calibre-db/src/data/book.rs | 96 ++- calibre-db/src/data/error.rs | 41 -- calibre-db/src/data/pagination.rs | 37 +- calibre-db/src/data/series.rs | 88 ++- calibre-db/src/search.rs | 89 ++- little-hesinde/Cargo.toml | 13 +- little-hesinde/src/api.rs | 86 +++ little-hesinde/src/api/authors.rs | 36 + little-hesinde/src/api/books.rs | 22 + little-hesinde/src/api/download.rs | 38 + little-hesinde/src/api/error.rs | 41 ++ little-hesinde/src/api/html.rs | 7 + little-hesinde/src/api/html/archive.rs | 45 ++ little-hesinde/src/api/html/authors.rs | 134 ++++ little-hesinde/src/api/html/books.rs | 155 ++++ little-hesinde/src/api/html/cover.rs | 125 ++++ little-hesinde/src/api/html/recent.rs | 70 ++ little-hesinde/src/api/html/search.rs | 73 ++ little-hesinde/src/api/html/series.rs | 134 ++++ little-hesinde/src/api/opds.rs | 5 + little-hesinde/src/api/opds/authors.rs | 138 ++++ little-hesinde/src/api/opds/books.rs | 95 +++ little-hesinde/src/api/opds/recent.rs | 76 ++ little-hesinde/src/api/opds/search.rs | 107 +++ little-hesinde/src/api/opds/series.rs | 138 ++++ .../src/{handlers => api}/paginated.rs | 28 +- little-hesinde/src/api/recent.rs | 0 little-hesinde/src/api/routes.rs | 53 ++ little-hesinde/src/api/search.rs | 28 + little-hesinde/src/api/series.rs | 32 + little-hesinde/src/api/static_files.rs | 48 ++ little-hesinde/src/cache.rs | 96 ++- little-hesinde/src/config.rs | 59 +- little-hesinde/src/data.rs | 1 + little-hesinde/src/data/book.rs | 15 +- little-hesinde/src/handlers/author.rs | 38 - little-hesinde/src/handlers/authors.rs | 44 -- little-hesinde/src/handlers/books.rs | 77 -- little-hesinde/src/handlers/cover.rs | 74 -- little-hesinde/src/handlers/download.rs | 23 - little-hesinde/src/handlers/error.rs | 57 -- little-hesinde/src/handlers/html/author.rs | 21 - little-hesinde/src/handlers/html/authors.rs | 21 - little-hesinde/src/handlers/html/books.rs | 26 - little-hesinde/src/handlers/html/recent.rs | 20 - little-hesinde/src/handlers/html/search.rs | 20 - little-hesinde/src/handlers/html/series.rs | 21 - .../src/handlers/html/series_single.rs | 21 - little-hesinde/src/handlers/opds/author.rs | 39 - little-hesinde/src/handlers/opds/authors.rs | 49 -- little-hesinde/src/handlers/opds/books.rs | 54 -- little-hesinde/src/handlers/opds/feed.rs | 110 --- little-hesinde/src/handlers/opds/recent.rs | 38 - little-hesinde/src/handlers/opds/search.rs | 12 - .../src/handlers/opds/search_info.rs | 27 - little-hesinde/src/handlers/opds/series.rs | 46 -- .../src/handlers/opds/series_single.rs | 39 - little-hesinde/src/handlers/recent.rs | 28 - little-hesinde/src/handlers/search.rs | 38 - little-hesinde/src/handlers/series.rs | 48 -- little-hesinde/src/handlers/series_single.rs | 37 - little-hesinde/src/handlers/source_archive.rs | 11 - little-hesinde/src/lib.rs | 148 +--- little-hesinde/src/main.rs | 37 +- little-hesinde/src/opds.rs | 9 + little-hesinde/src/opds/error.rs | 32 +- little-hesinde/src/opds/feed.rs | 27 +- little-hesinde/src/opds/search.rs | 27 +- 73 files changed, 2993 insertions(+), 1632 deletions(-) create mode 100644 little-hesinde/src/api.rs create mode 100644 little-hesinde/src/api/authors.rs create mode 100644 little-hesinde/src/api/books.rs create mode 100644 little-hesinde/src/api/download.rs create mode 100644 little-hesinde/src/api/error.rs create mode 100644 little-hesinde/src/api/html.rs create mode 100644 little-hesinde/src/api/html/archive.rs create mode 100644 little-hesinde/src/api/html/authors.rs create mode 100644 little-hesinde/src/api/html/books.rs create mode 100644 little-hesinde/src/api/html/cover.rs create mode 100644 little-hesinde/src/api/html/recent.rs create mode 100644 little-hesinde/src/api/html/search.rs create mode 100644 little-hesinde/src/api/html/series.rs create mode 100644 little-hesinde/src/api/opds.rs create mode 100644 little-hesinde/src/api/opds/authors.rs create mode 100644 little-hesinde/src/api/opds/books.rs create mode 100644 little-hesinde/src/api/opds/recent.rs create mode 100644 little-hesinde/src/api/opds/search.rs create mode 100644 little-hesinde/src/api/opds/series.rs rename little-hesinde/src/{handlers => api}/paginated.rs (62%) create mode 100644 little-hesinde/src/api/recent.rs create mode 100644 little-hesinde/src/api/routes.rs create mode 100644 little-hesinde/src/api/search.rs create mode 100644 little-hesinde/src/api/series.rs create mode 100644 little-hesinde/src/api/static_files.rs create mode 100644 little-hesinde/src/data.rs delete mode 100644 little-hesinde/src/handlers/author.rs delete mode 100644 little-hesinde/src/handlers/authors.rs delete mode 100644 little-hesinde/src/handlers/books.rs delete mode 100644 little-hesinde/src/handlers/cover.rs delete mode 100644 little-hesinde/src/handlers/download.rs delete mode 100644 little-hesinde/src/handlers/error.rs delete mode 100644 little-hesinde/src/handlers/html/author.rs delete mode 100644 little-hesinde/src/handlers/html/authors.rs delete mode 100644 little-hesinde/src/handlers/html/books.rs delete mode 100644 little-hesinde/src/handlers/html/recent.rs delete mode 100644 little-hesinde/src/handlers/html/search.rs delete mode 100644 little-hesinde/src/handlers/html/series.rs delete mode 100644 little-hesinde/src/handlers/html/series_single.rs delete mode 100644 little-hesinde/src/handlers/opds/author.rs delete mode 100644 little-hesinde/src/handlers/opds/authors.rs delete mode 100644 little-hesinde/src/handlers/opds/books.rs delete mode 100644 little-hesinde/src/handlers/opds/feed.rs delete mode 100644 little-hesinde/src/handlers/opds/recent.rs delete mode 100644 little-hesinde/src/handlers/opds/search.rs delete mode 100644 little-hesinde/src/handlers/opds/search_info.rs delete mode 100644 little-hesinde/src/handlers/opds/series.rs delete mode 100644 little-hesinde/src/handlers/opds/series_single.rs delete mode 100644 little-hesinde/src/handlers/search.rs delete mode 100644 little-hesinde/src/handlers/series.rs delete mode 100644 little-hesinde/src/handlers/series_single.rs delete mode 100644 little-hesinde/src/handlers/source_archive.rs create mode 100644 little-hesinde/src/opds.rs diff --git a/Cargo.lock b/Cargo.lock index cd2fff0..1c2b17c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -167,6 +167,60 @@ dependencies = [ "arrayvec", ] +[[package]] +name = "axum" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" +dependencies = [ + "axum-core", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "backtrace" version = "0.3.75" @@ -257,6 +311,7 @@ dependencies = [ "r2d2_sqlite", "rusqlite", "serde", + "snafu", "tempfile", "thiserror 1.0.69", "time", @@ -289,12 +344,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - [[package]] name = "chrono" version = "0.4.41" @@ -507,6 +556,17 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dyn-clone" version = "1.0.19" @@ -620,17 +680,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "futures-sink" version = "0.3.31" @@ -650,12 +699,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", - "futures-macro", - "futures-sink", "futures-task", "pin-project-lite", "pin-utils", - "slab", ] [[package]] @@ -706,8 +752,8 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -764,30 +810,6 @@ dependencies = [ "hashbrown 0.15.4", ] -[[package]] -name = "headers" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" -dependencies = [ - "base64", - "bytes", - "headers-core", - "http", - "httpdate", - "mime", - "sha1", -] - -[[package]] -name = "headers-core" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" -dependencies = [ - "http", -] - [[package]] name = "heck" version = "0.5.0" @@ -888,6 +910,7 @@ dependencies = [ "hyper", "pin-project-lite", "tokio", + "tower-service", ] [[package]] @@ -914,12 +937,119 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + [[package]] name = "ignore" version = "0.4.23" @@ -930,7 +1060,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata", + "regex-automata 0.4.9", "same-file", "walkdir", "winapi-util", @@ -1085,31 +1215,42 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + [[package]] name = "little-hesinde" version = "0.3.1" dependencies = [ + "axum", "calibre-db", "clap", "ignore", "image", + "mime_guess", "once_cell", - "poem", "quick-xml", "rust-embed", "serde", "serde_json", "serde_with", "sha2", + "snafu", "tera", - "thiserror 1.0.69", "time", "tokio", "tokio-util", + "tower-http", "tracing", "tracing-subscriber", + "utoipa", + "utoipa-axum", + "utoipa-swagger-ui", "uuid", - "zip", + "zip 4.2.0", ] [[package]] @@ -1137,6 +1278,21 @@ dependencies = [ "imgref", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "maybe-rayon" version = "0.1.1" @@ -1201,18 +1357,6 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" -[[package]] -name = "nix" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" -dependencies = [ - "bitflags", - "cfg-if", - "cfg_aliases", - "libc", -] - [[package]] name = "nom" version = "7.1.3" @@ -1467,52 +1611,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] -name = "poem" -version = "3.1.11" +name = "potential_utf" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea63e9302279b1ca262d15342760f8d08f04fb974d4997e8baed7d034b94121" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" dependencies = [ - "bytes", - "futures-util", - "headers", - "hex", - "http", - "http-body-util", - "httpdate", - "hyper", - "hyper-util", - "mime", - "mime_guess", - "nix", - "parking_lot", - "percent-encoding", - "pin-project-lite", - "poem-derive", - "regex", - "rfc7239", - "rust-embed", - "serde", - "serde_json", - "serde_urlencoded", - "smallvec", - "sync_wrapper", - "thiserror 2.0.12", - "tokio", - "tokio-util", - "tracing", - "wildmatch", -] - -[[package]] -name = "poem-derive" -version = "3.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824e7fe35343b7fe354e5d4ac444ddbe674676ebba4b4e48565835661033d338" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", + "zerovec", ] [[package]] @@ -1530,15 +1634,6 @@ dependencies = [ "zerocopy", ] -[[package]] -name = "proc-macro-crate" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" -dependencies = [ - "toml_edit", -] - [[package]] name = "proc-macro2" version = "1.0.95" @@ -1786,8 +1881,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -1798,24 +1902,21 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.5", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" -[[package]] -name = "rfc7239" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a82f1d1e38e9a85bb58ffcfadf22ed6f2c94e8cd8581ec2b0f80a2a6858350f" -dependencies = [ - "uncased", -] - [[package]] name = "rgb" version = "0.8.50" @@ -1982,6 +2083,16 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" +dependencies = [ + "itoa", + "serde", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -2035,17 +2146,6 @@ dependencies = [ "syn", ] -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "sha2" version = "0.10.9" @@ -2124,6 +2224,27 @@ version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +[[package]] +name = "snafu" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320b01e011bf8d5d7a4a4a4be966d9160968935849c83b918827f6a435e7f627" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1961e2ef424c1424204d3a5d6975f934f56b6d50ff5732382d84ebf460e147f7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "socket2" version = "0.5.10" @@ -2134,6 +2255,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "strsim" version = "0.11.1" @@ -2156,8 +2283,16 @@ name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ - "futures-core", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2294,6 +2429,16 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.45.1" @@ -2369,12 +2514,57 @@ dependencies = [ "winnow", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags", + "bytes", + "http", + "http-body", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + [[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -2418,10 +2608,14 @@ version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ + "matchers", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] @@ -2438,15 +2632,6 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" -[[package]] -name = "uncased" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" -dependencies = [ - "version_check", -] - [[package]] name = "unic-char-property" version = "0.9.0" @@ -2509,12 +2694,91 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "utoipa" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fcc29c80c21c31608227e0912b2d7fddba57ad76b606890627ba8ee7964e993" +dependencies = [ + "indexmap 2.10.0", + "serde", + "serde_json", + "utoipa-gen", +] + +[[package]] +name = "utoipa-axum" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c25bae5bccc842449ec0c5ddc5cbb6a3a1eaeac4503895dc105a1138f8234a0" +dependencies = [ + "axum", + "paste", + "tower-layer", + "tower-service", + "utoipa", +] + +[[package]] +name = "utoipa-gen" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d79d08d92ab8af4c5e8a6da20c47ae3f61a0f1dabc1997cdf2d082b757ca08b" +dependencies = [ + "proc-macro2", + "quote", + "regex", + "syn", +] + +[[package]] +name = "utoipa-swagger-ui" +version = "9.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d047458f1b5b65237c2f6dc6db136945667f40a7668627b3490b9513a3d43a55" +dependencies = [ + "axum", + "base64", + "mime_guess", + "regex", + "rust-embed", + "serde", + "serde_json", + "url", + "utoipa", + "utoipa-swagger-ui-vendored", + "zip 3.0.0", +] + +[[package]] +name = "utoipa-swagger-ui-vendored" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2eebbbfe4093922c2b6734d7c679ebfebd704a0d7e56dfcb0d05818ce28977d" + [[package]] name = "uuid" version = "1.17.0" @@ -2645,12 +2909,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "wildmatch" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd" - [[package]] name = "winapi" version = "0.3.9" @@ -2914,6 +3172,36 @@ dependencies = [ "bitflags", ] +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.8.26" @@ -2934,6 +3222,74 @@ dependencies = [ "syn", ] +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zip" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308" +dependencies = [ + "arbitrary", + "crc32fast", + "flate2", + "indexmap 2.10.0", + "memchr", + "zopfli", +] + [[package]] name = "zip" version = "4.2.0" diff --git a/Cargo.toml b/Cargo.toml index 88b8c10..328ae1f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ [workspace.dependencies] serde = "1.0.219" +snafu = { version = "0.8.6", features = ["rust_1_81"] } thiserror = "1.0.61" time = { version = "0.3.41", features = ["macros", "serde", "formatting", "parsing" ] } diff --git a/calibre-db/Cargo.toml b/calibre-db/Cargo.toml index d5da03d..0655e8a 100644 --- a/calibre-db/Cargo.toml +++ b/calibre-db/Cargo.toml @@ -12,6 +12,7 @@ r2d2 = "0.8.10" r2d2_sqlite = "0.30.0" rusqlite = { version = "0.36.0", features = ["bundled", "time"] } serde = { workspace = true } +snafu = { workspace = true } tempfile = "3.20.0" thiserror = { workspace = true } time = { workspace = true } diff --git a/calibre-db/src/calibre.rs b/calibre-db/src/calibre.rs index 1dd1164..83b5c7b 100644 --- a/calibre-db/src/calibre.rs +++ b/calibre-db/src/calibre.rs @@ -1,17 +1,19 @@ //! Bundle all functions together. -use std::path::{Path, PathBuf}; +use std::{ + io, + path::{Path, PathBuf}, +}; use r2d2::Pool; use r2d2_sqlite::SqliteConnectionManager; -use tempfile::NamedTempFile; +use tempfile::{NamedTempFile, PersistError}; use crate::{ - data::{ - author::Author, book::Book, error::DataStoreError, pagination::SortOrder, series::Series, - }, + data::{self, author::Author, book::Book, pagination::SortOrder, series::Series}, search::search, }; +use snafu::{ResultExt, Snafu}; /// Top level calibre functions, bundling all sub functions in one place and providing secure access to /// the database. @@ -21,16 +23,195 @@ pub struct Calibre { search_db_path: PathBuf, } +#[derive(Debug, Snafu)] +pub enum LoadError { + #[snafu(display("Failed to create database connection pool."))] + DbPool { source: r2d2::Error }, + #[snafu(display("Failed to create temporary database view."))] + TmpDb { source: io::Error }, + #[snafu(display("Failed to keep temporary database from deletion."))] + PersistTmpDb { source: PersistError }, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to search."))] +pub struct SearchError { + source: crate::search::SearchError, +} + +#[derive(Debug, Snafu)] +pub enum BooksError { + #[snafu(display("Failed to get database connection from pool."))] + BooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch multiple books."))] + FetchBooks { + source: data::book::MultipleBooksError, + }, +} + +#[derive(Debug, Snafu)] +pub enum AuthorsError { + #[snafu(display("Failed to get database connection from pool."))] + AuthorsDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch multiple authors."))] + FetchAuthors { + source: data::author::MultipleAuthorsError, + }, +} + +#[derive(Debug, Snafu)] +pub enum AuthorBooksError { + #[snafu(display("Failed to get database connection from pool."))] + AuthorBooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch multiple an author's books."))] + FetchAuthorBooks { + source: data::book::AuthorBooksError, + }, +} + +#[derive(Debug, Snafu)] +pub enum RecentBooksError { + #[snafu(display("Failed to get database connection from pool."))] + RecentBooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch recent books."))] + FetchRecentBooks { + source: data::book::RecentBooksError, + }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarBookError { + #[snafu(display("Failed to get database connection from pool."))] + ScalarBookDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch book."))] + FetchScalarBook { source: data::book::ScalarBookError }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarAuthorError { + #[snafu(display("Failed to get database connection from pool."))] + ScalarAuthorDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch author."))] + FetchScalarAuthor { + source: data::author::ScalarAuthorError, + }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarSeriesError { + #[snafu(display("Failed to get database connection from pool."))] + ScalarSeriesDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch series."))] + FetchScalarSeries { + source: data::series::ScalarSeriesError, + }, +} + +#[derive(Debug, Snafu)] +pub enum BookAuthorError { + #[snafu(display("Failed to get database connection from pool."))] + BookAuthorDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch book's author."))] + FetchBookAuthor { + source: data::author::BookAuthorError, + }, +} + +#[derive(Debug, Snafu)] +pub enum MultipleSeriesError { + #[snafu(display("Failed to get database connection from pool."))] + MultipleSeriesDbPool { source: r2d2::Error }, + #[snafu(display("Failed to fetch multiple series."))] + FetchMultipleSeries { + source: data::series::MultiplSeriesError, + }, +} + +#[derive(Debug, Snafu)] +pub enum BookSeriesError { + #[snafu(display("Failed to get database connection from pool."))] + BookSeriesDbPool { source: r2d2::Error }, + #[snafu(display("Failed to get the series a book belongs to."))] + FetchBookSeries { + source: data::series::SeriesBooksError, + }, +} + +#[derive(Debug, Snafu)] +pub enum SeriesBooksError { + #[snafu(display("Failed to get database connection from pool."))] + SeriesBooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to get a series' books."))] + FetchSeriesBooks { source: data::book::SeriesBookError }, +} + +#[derive(Debug, Snafu)] +pub enum HasPreviousAuthorsError { + #[snafu(display("Failed to get database connection from pool."))] + HasPreviousAuthorsDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous authors."))] + FetchHasPreviousAuthors { + source: data::author::PreviousAuthorsError, + }, +} + +#[derive(Debug, Snafu)] +pub enum HasMoreAuthorsError { + #[snafu(display("Failed to get database connection from pool."))] + HasMoreAuthorsDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous authors."))] + FetchHasMoreAuthors { + source: data::author::MoreAuthorsError, + }, +} + +#[derive(Debug, Snafu)] +pub enum HasPreviousBooksError { + #[snafu(display("Failed to get database connection from pool."))] + HasPreviousBooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous books."))] + FetchHasPreviousBooks { + source: data::book::PreviousBooksError, + }, +} + +#[derive(Debug, Snafu)] +pub enum HasMoreBooksError { + #[snafu(display("Failed to get database connection from pool."))] + HasMoreBooksDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous books."))] + FetchHasMoreBooks { source: data::book::MoreBooksError }, +} + +#[derive(Debug, Snafu)] +pub enum HasPreviousSeriesError { + #[snafu(display("Failed to get database connection from pool."))] + HasPreviousSeriesDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous series."))] + FetchHasPreviousSeries { + source: data::series::PreviousSeriesError, + }, +} + +#[derive(Debug, Snafu)] +pub enum HasMoreSeriesError { + #[snafu(display("Failed to get database connection from pool."))] + HasMoreSeriesDbPool { source: r2d2::Error }, + #[snafu(display("Failed to check if there are previous series."))] + FetchHasMoreSeries { + source: data::series::MoreSeriesError, + }, +} impl Calibre { /// Open a connection to the calibre database. /// /// Fail if the database file can not be opened or not be found. - pub fn load(path: &Path) -> Result { + pub fn load(path: &Path) -> Result { let manager = SqliteConnectionManager::file(path); - let pool = r2d2::Pool::new(manager)?; + let pool = r2d2::Pool::new(manager).context(DbPoolSnafu)?; - let tmpfile = NamedTempFile::new()?; - let (_, search_db_path) = tmpfile.keep()?; + let tmpfile = NamedTempFile::new().context(TmpDbSnafu)?; + let (_, search_db_path) = tmpfile.keep().context(PersistTmpDbSnafu)?; Ok(Self { pool, @@ -41,8 +222,8 @@ impl Calibre { /// Full text search with a query. /// /// See https://www.sqlite.org/fts5.html#full_text_query_syntax for syntax. - pub fn search(&self, query: &str) -> Result, DataStoreError> { - search(query, &self.pool, &self.search_db_path) + pub fn search(&self, query: &str) -> Result, SearchError> { + search(query, &self.pool, &self.search_db_path).context(SearchSnafu) } /// Fetch book data from calibre, starting at `cursor`, fetching up to an amount of `limit` and @@ -52,9 +233,9 @@ impl Calibre { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Book::multiple(&conn, limit, cursor, sort_order) + ) -> Result, BooksError> { + let conn = self.pool.get().context(BooksDbPoolSnafu)?; + Book::multiple(&conn, limit, cursor, sort_order).context(FetchBooksSnafu) } /// Fetch author data from calibre, starting at `cursor`, fetching up to an amount of `limit` and @@ -64,9 +245,9 @@ impl Calibre { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Author::multiple(&conn, limit, cursor, sort_order) + ) -> Result, AuthorsError> { + let conn = self.pool.get().context(AuthorsDbPoolSnafu)?; + Author::multiple(&conn, limit, cursor, sort_order).context(FetchAuthorsSnafu) } /// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`, @@ -77,27 +258,28 @@ impl Calibre { limit: u64, cursor: Option<&str>, sort_order: SortOrder, - ) -> Result, DataStoreError> { - let conn = self.pool.get()?; + ) -> Result, AuthorBooksError> { + let conn = self.pool.get().context(AuthorBooksDbPoolSnafu)?; Book::author_books(&conn, author_id, limit, cursor, sort_order) + .context(FetchAuthorBooksSnafu) } /// Get recent books up to a limit of `limit`. - pub fn recent_books(&self, limit: u64) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Book::recents(&conn, limit) + pub fn recent_books(&self, limit: u64) -> Result, RecentBooksError> { + let conn = self.pool.get().context(RecentBooksDbPoolSnafu)?; + Book::recents(&conn, limit).context(FetchRecentBooksSnafu) } /// Get a single book, specified `id`. - pub fn scalar_book(&self, id: u64) -> Result { - let conn = self.pool.get()?; - Book::scalar_book(&conn, id) + pub fn scalar_book(&self, id: u64) -> Result { + let conn = self.pool.get().context(ScalarBookDbPoolSnafu)?; + Book::scalar_book(&conn, id).context(FetchScalarBookSnafu) } /// Get the author to a book with id `id`. - pub fn book_author(&self, id: u64) -> Result { - let conn = self.pool.get()?; - Author::book_author(&conn, id) + pub fn book_author(&self, id: u64) -> Result { + let conn = self.pool.get().context(BookAuthorDbPoolSnafu)?; + Author::book_author(&conn, id).context(FetchBookAuthorSnafu) } /// Fetch series data from calibre, starting at `cursor`, fetching up to an amount of `limit` and @@ -107,69 +289,69 @@ impl Calibre { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Series::multiple(&conn, limit, cursor, sort_order) + ) -> Result, MultipleSeriesError> { + let conn = self.pool.get().context(MultipleSeriesDbPoolSnafu)?; + Series::multiple(&conn, limit, cursor, sort_order).context(FetchMultipleSeriesSnafu) } /// Get the series a book with id `id` is in, as well as the book's position within the series. - pub fn book_series(&self, id: u64) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Series::book_series(&conn, id) + pub fn book_series(&self, id: u64) -> Result, BookSeriesError> { + let conn = self.pool.get().context(BookSeriesDbPoolSnafu)?; + Series::book_series(&conn, id).context(FetchBookSeriesSnafu) } /// Get all books belonging to the series with id `id`. - pub fn series_books(&self, id: u64) -> Result, DataStoreError> { - let conn = self.pool.get()?; - Book::series_books(&conn, id) + pub fn series_books(&self, id: u64) -> Result, SeriesBooksError> { + let conn = self.pool.get().context(SeriesBooksDbPoolSnafu)?; + Book::series_books(&conn, id).context(FetchSeriesBooksSnafu) } /// Check if there are more authors before the specified cursor. - pub fn has_previous_authors(&self, author_sort: &str) -> Result { - let conn = self.pool.get()?; - Author::has_previous_authors(&conn, author_sort) + pub fn has_previous_authors(&self, author_sort: &str) -> Result { + let conn = self.pool.get().context(HasPreviousAuthorsDbPoolSnafu)?; + Author::has_previous_authors(&conn, author_sort).context(FetchHasPreviousAuthorsSnafu) } /// Check if there are more authors after the specified cursor. - pub fn has_more_authors(&self, author_sort: &str) -> Result { - let conn = self.pool.get()?; - Author::has_more_authors(&conn, author_sort) + pub fn has_more_authors(&self, author_sort: &str) -> Result { + let conn = self.pool.get().context(HasMoreAuthorsDbPoolSnafu)?; + Author::has_more_authors(&conn, author_sort).context(FetchHasMoreAuthorsSnafu) } /// Check if there are more books before the specified cursor. - pub fn has_previous_books(&self, book_sort: &str) -> Result { - let conn = self.pool.get()?; - Book::has_previous_books(&conn, book_sort) + pub fn has_previous_books(&self, book_sort: &str) -> Result { + let conn = self.pool.get().context(HasPreviousBooksDbPoolSnafu)?; + Book::has_previous_books(&conn, book_sort).context(FetchHasPreviousBooksSnafu) } /// Check if there are more books after the specified cursor. - pub fn has_more_books(&self, book_sort: &str) -> Result { - let conn = self.pool.get()?; - Book::has_more_books(&conn, book_sort) + pub fn has_more_books(&self, book_sort: &str) -> Result { + let conn = self.pool.get().context(HasMoreBooksDbPoolSnafu)?; + Book::has_more_books(&conn, book_sort).context(FetchHasMoreBooksSnafu) } /// Check if there are more series before the specified cursor. - pub fn has_previous_series(&self, series_sort: &str) -> Result { - let conn = self.pool.get()?; - Series::has_previous_series(&conn, series_sort) + pub fn has_previous_series(&self, series_sort: &str) -> Result { + let conn = self.pool.get().context(HasPreviousSeriesDbPoolSnafu)?; + Series::has_previous_series(&conn, series_sort).context(FetchHasPreviousSeriesSnafu) } /// Check if there are more series after the specified cursor. - pub fn has_more_series(&self, series_sort: &str) -> Result { - let conn = self.pool.get()?; - Series::has_more_series(&conn, series_sort) + pub fn has_more_series(&self, series_sort: &str) -> Result { + let conn = self.pool.get().context(HasMoreSeriesDbPoolSnafu)?; + Series::has_more_series(&conn, series_sort).context(FetchHasMoreSeriesSnafu) } /// Fetch a single author with id `id`. - pub fn scalar_author(&self, id: u64) -> Result { - let conn = self.pool.get()?; - Author::scalar_author(&conn, id) + pub fn scalar_author(&self, id: u64) -> Result { + let conn = self.pool.get().context(ScalarAuthorDbPoolSnafu)?; + Author::scalar_author(&conn, id).context(FetchScalarAuthorSnafu) } /// Fetch a single series with id `id`. - pub fn scalar_series(&self, id: u64) -> Result { - let conn = self.pool.get()?; - Series::scalar_series(&conn, id) + pub fn scalar_series(&self, id: u64) -> Result { + let conn = self.pool.get().context(ScalarSeriesDbPoolSnafu)?; + Series::scalar_series(&conn, id).context(FetchScalarSeriesSnafu) } } diff --git a/calibre-db/src/data/author.rs b/calibre-db/src/data/author.rs index e79da5a..615f704 100644 --- a/calibre-db/src/data/author.rs +++ b/calibre-db/src/data/author.rs @@ -1,12 +1,9 @@ //! Author data. -use rusqlite::{named_params, Connection, Row}; +use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder}; +use rusqlite::{Connection, Row, named_params}; use serde::Serialize; - -use super::{ - error::DataStoreError, - pagination::{Pagination, SortOrder}, -}; +use snafu::{ResultExt, Snafu}; /// Author in calibre. #[derive(Debug, Clone, Serialize)] @@ -19,6 +16,40 @@ pub struct Author { pub sort: String, } +#[derive(Debug, Snafu)] +#[snafu(display("Failed to fetch multiple authors."))] +pub struct MultipleAuthorsError { + source: PaginationError, +} + +#[derive(Debug, Snafu)] +pub enum BookAuthorError { + #[snafu(display("Failed to prepare statement."))] + PrepareBookAuthor { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteBookAuthor { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarAuthorError { + #[snafu(display("Failed to prepare statement."))] + PrepareScalarAuthor { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteScalarAuthor { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for previous authors."))] +pub struct PreviousAuthorsError { + source: HasPrevOrMoreError, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for more authors."))] +pub struct MoreAuthorsError { + source: HasPrevOrMoreError, +} + impl Author { fn from_row(row: &Row<'_>) -> Result { Ok(Self { @@ -35,44 +66,54 @@ impl Author { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { + ) -> Result, MultipleAuthorsError> { let pagination = Pagination::new("sort", cursor, limit, *sort_order); - pagination.paginate( - conn, - "SELECT id, name, sort FROM authors", - &[], - Self::from_row, - ) + pagination + .paginate( + conn, + "SELECT id, name, sort FROM authors", + &[], + Self::from_row, + ) + .context(MultipleAuthorsSnafu) } /// Get the author to a book with id `id`. - pub fn book_author(conn: &Connection, id: u64) -> Result { - let mut stmt = conn.prepare( - "SELECT authors.id, authors.name, authors.sort FROM authors \ + pub fn book_author(conn: &Connection, id: u64) -> Result { + let mut stmt = conn + .prepare( + "SELECT authors.id, authors.name, authors.sort FROM authors \ INNER JOIN books_authors_link ON authors.id = books_authors_link.author \ WHERE books_authors_link.book = (:id)", - )?; + ) + .context(PrepareBookAuthorSnafu)?; let params = named_params! { ":id": id }; - Ok(stmt.query_row(params, Self::from_row)?) + stmt.query_row(params, Self::from_row) + .context(ExecuteBookAuthorSnafu) } /// Fetch a single author with id `id`. - pub fn scalar_author(conn: &Connection, id: u64) -> Result { - let mut stmt = conn.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")?; + pub fn scalar_author(conn: &Connection, id: u64) -> Result { + let mut stmt = conn + .prepare("SELECT id, name, sort FROM authors WHERE id = (:id)") + .context(PrepareScalarAuthorSnafu)?; let params = named_params! { ":id": id }; - Ok(stmt.query_row(params, Self::from_row)?) + stmt.query_row(params, Self::from_row) + .context(ExecuteScalarAuthorSnafu) } /// Check if there are more authors before the specified cursor. pub fn has_previous_authors( conn: &Connection, sort_name: &str, - ) -> Result { + ) -> Result { Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::DESC) + .context(PreviousAuthorsSnafu) } /// Check if there are more authors after the specified cursor. - pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result { + pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result { Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::ASC) + .context(MoreAuthorsSnafu) } } diff --git a/calibre-db/src/data/book.rs b/calibre-db/src/data/book.rs index 4547008..c9a9efc 100644 --- a/calibre-db/src/data/book.rs +++ b/calibre-db/src/data/book.rs @@ -1,13 +1,11 @@ //! Book data. -use rusqlite::{named_params, Connection, Row}; +use rusqlite::{Connection, Row, named_params}; use serde::Serialize; use time::OffsetDateTime; -use super::{ - error::DataStoreError, - pagination::{Pagination, SortOrder}, -}; +use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder}; +use snafu::{ResultExt, Snafu}; /// Book in calibre. #[derive(Debug, Clone, Serialize)] @@ -28,6 +26,54 @@ pub struct Book { pub description: Option, } +#[derive(Debug, Snafu)] +#[snafu(display("Failed to fetch multiple books."))] +pub struct MultipleBooksError { + source: PaginationError, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to fetch author's books."))] +pub struct AuthorBooksError { + source: PaginationError, +} + +#[derive(Debug, Snafu)] +pub enum SeriesBookError { + #[snafu(display("Failed to prepare statement."))] + PrepareSeriesBook { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteSeriesBook { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum RecentBooksError { + #[snafu(display("Failed to prepare statement."))] + PrepareRecentBooks { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteRecentBooks { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarBookError { + #[snafu(display("Failed to prepare statement."))] + PrepareScalarBook { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteScalarBook { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for previous books."))] +pub struct PreviousBooksError { + source: HasPrevOrMoreError, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for more books."))] +pub struct MoreBooksError { + source: HasPrevOrMoreError, +} + impl Book { fn from_row(row: &Row<'_>) -> Result { Ok(Self { @@ -48,7 +94,7 @@ impl Book { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { + ) -> Result, MultipleBooksError> { let pagination = Pagination::new("sort", cursor, limit, *sort_order); pagination.paginate( conn, @@ -56,7 +102,7 @@ impl Book { FROM books LEFT JOIN comments ON books.id = comments.book", &[], Self::from_row, - ) + ).context(MultipleBooksSnafu) } /// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`, @@ -67,7 +113,7 @@ impl Book { limit: u64, cursor: Option<&str>, sort_order: SortOrder, - ) -> Result, DataStoreError> { + ) -> Result, AuthorBooksError> { let pagination = Pagination::new("books.sort", cursor, limit, sort_order); pagination.paginate( conn, @@ -77,11 +123,11 @@ impl Book { WHERE books_authors_link.author = (:author_id) AND", &[(":author_id", &author_id)], Self::from_row, - ) + ).context(AuthorBooksSnafu) } /// Get all books belonging to the series with id `id`. - pub fn series_books(conn: &Connection, id: u64) -> Result, DataStoreError> { + pub fn series_books(conn: &Connection, id: u64) -> Result, SeriesBookError> { let mut stmt = conn.prepare( "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text FROM series \ INNER JOIN books_series_link ON series.id = books_series_link.series \ @@ -89,40 +135,50 @@ impl Book { LEFT JOIN comments ON books.id = comments.book \ WHERE books_series_link.series = (:id) \ ORDER BY books.series_index", - )?; + ).context(PrepareSeriesBookSnafu)?; let params = named_params! { ":id": id }; - let iter = stmt.query_map(params, Self::from_row)?; + let iter = stmt + .query_map(params, Self::from_row) + .context(ExecuteSeriesBookSnafu)?; Ok(iter.filter_map(Result::ok).collect()) } /// Get recent books up to a limit of `limit`. - pub fn recents(conn: &Connection, limit: u64) -> Result, DataStoreError> { + pub fn recents(conn: &Connection, limit: u64) -> Result, RecentBooksError> { let mut stmt = conn.prepare( "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \ FROM books LEFT JOIN comments ON books.id = comments.book ORDER BY books.timestamp DESC LIMIT (:limit)" - )?; + ).context(PrepareRecentBooksSnafu)?; let params = named_params! { ":limit": limit }; - let iter = stmt.query_map(params, Self::from_row)?; + let iter = stmt + .query_map(params, Self::from_row) + .context(ExecuteRecentBooksSnafu)?; Ok(iter.filter_map(Result::ok).collect()) } /// Get a single book, specified `id`. - pub fn scalar_book(conn: &Connection, id: u64) -> Result { + pub fn scalar_book(conn: &Connection, id: u64) -> Result { let mut stmt = conn.prepare( "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \ FROM books LEFT JOIN comments WHERE books.id = (:id)", - )?; + ).context(PrepareScalarBookSnafu)?; let params = named_params! { ":id": id }; - Ok(stmt.query_row(params, Self::from_row)?) + stmt.query_row(params, Self::from_row) + .context(ExecuteScalarBookSnafu) } /// Check if there are more books before the specified cursor. - pub fn has_previous_books(conn: &Connection, sort_title: &str) -> Result { + pub fn has_previous_books( + conn: &Connection, + sort_title: &str, + ) -> Result { Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::DESC) + .context(PreviousBooksSnafu) } /// Check if there are more books after the specified cursor. - pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result { + pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result { Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::ASC) + .context(MoreBooksSnafu) } } diff --git a/calibre-db/src/data/error.rs b/calibre-db/src/data/error.rs index 681b141..8b13789 100644 --- a/calibre-db/src/data/error.rs +++ b/calibre-db/src/data/error.rs @@ -1,42 +1 @@ -//! Error handling for calibre database access. -use std::io; - -use tempfile::PersistError; -use thiserror::Error; -use time::error::Parse; - -/// Errors from accessing the calibre database. -#[derive(Error, Debug)] -#[error("data store error")] -pub enum DataStoreError { - /// Found no entries for the query. - #[error("no results")] - NoResults(rusqlite::Error), - /// Error with SQLite. - #[error("sqlite error")] - SqliteError(rusqlite::Error), - /// Error connecting to the database. - #[error("connection error")] - ConnectionError(#[from] r2d2::Error), - /// Error parsing a datetime from the database. - #[error("failed to parse datetime")] - DateTimeError(#[from] Parse), - /// Error creating the search database. - #[error("failed to create search database")] - SearchDbError(#[from] io::Error), - /// Error marking the search database as persistent. - #[error("failed to persist search database")] - PersistSearchDbError(#[from] PersistError), -} - -/// Convert an SQLite error into a proper NoResults one if the query -/// returned no rows, return others as is. -impl From for DataStoreError { - fn from(error: rusqlite::Error) -> Self { - match error { - rusqlite::Error::QueryReturnedNoRows => DataStoreError::NoResults(error), - _ => DataStoreError::SqliteError(error), - } - } -} diff --git a/calibre-db/src/data/pagination.rs b/calibre-db/src/data/pagination.rs index c2f6064..8b44ebb 100644 --- a/calibre-db/src/data/pagination.rs +++ b/calibre-db/src/data/pagination.rs @@ -1,9 +1,8 @@ //! Cursor pagination handling. -use rusqlite::{named_params, Connection, Row, ToSql}; +use rusqlite::{Connection, Row, ToSql, named_params}; use serde::{Deserialize, Serialize}; - -use super::error::DataStoreError; +use snafu::{ResultExt, Snafu}; /// How to sort query results. Signifying whether we are paginating forwards or backwards. #[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)] @@ -26,6 +25,22 @@ pub struct Pagination<'a> { pub sort_order: SortOrder, } +#[derive(Debug, Snafu)] +pub enum HasPrevOrMoreError { + #[snafu(display("Failed to prepare statement."))] + PrepareHasPrevOrMore { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteHasPrevOrMore { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum PaginationError { + #[snafu(display("Failed to prepare statement."))] + PreparePagination { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecutePagination { source: rusqlite::Error }, +} + impl<'a> Pagination<'a> { /// Create a new pagination. pub fn new( @@ -57,14 +72,16 @@ impl<'a> Pagination<'a> { table: &str, sort: &str, sort_order: &SortOrder, - ) -> Result { + ) -> Result { let comparison = Pagination::sort_order_to_sql(sort_order); let mut stmt = conn.prepare(&format!( "SELECT Count(1) FROM {table} WHERE sort {comparison} (:sort) ORDER BY sort {sort_order:?}" - ))?; + )).context(PrepareHasPrevOrMoreSnafu)?; let params = named_params! { ":sort": sort}; - let count: u64 = stmt.query_row(params, |x| x.get(0))?; + let count: u64 = stmt + .query_row(params, |x| x.get(0)) + .context(ExecuteHasPrevOrMoreSnafu)?; Ok(count > 0) } @@ -76,7 +93,7 @@ impl<'a> Pagination<'a> { statement: &str, params: &[(&str, &dyn ToSql)], processor: F, - ) -> Result, DataStoreError> + ) -> Result, PaginationError> where F: FnMut(&Row<'_>) -> Result, { @@ -102,7 +119,7 @@ impl<'a> Pagination<'a> { // DANGER: vulnerable to SQL injection if statement or sort_col variable is influenced by user input let mut stmt = conn.prepare(&format!( "SELECT * FROM ({statement} {where_sql} {sort_col} {comparison} (:cursor) ORDER BY {sort_col} {sort_order:?} LIMIT (:limit)) AS t ORDER BY {sort_col_wrapped} ASC" - ))?; + )).context(PreparePaginationSnafu)?; let params = [ &[ (":cursor", &cursor as &dyn ToSql), @@ -111,7 +128,9 @@ impl<'a> Pagination<'a> { params, ] .concat(); - let iter = stmt.query_map(params.as_slice(), processor)?; + let iter = stmt + .query_map(params.as_slice(), processor) + .context(ExecutePaginationSnafu)?; Ok(iter.filter_map(Result::ok).collect()) } } diff --git a/calibre-db/src/data/series.rs b/calibre-db/src/data/series.rs index b467fde..bf607c7 100644 --- a/calibre-db/src/data/series.rs +++ b/calibre-db/src/data/series.rs @@ -1,12 +1,10 @@ //! Series data. -use rusqlite::{named_params, Connection, Row}; +use rusqlite::{Connection, Row, named_params}; use serde::Serialize; -use super::{ - error::DataStoreError, - pagination::{Pagination, SortOrder}, -}; +use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder}; +use snafu::{ResultExt, Snafu}; /// Series in calibre. #[derive(Debug, Clone, Serialize)] @@ -19,6 +17,40 @@ pub struct Series { pub sort: String, } +#[derive(Debug, Snafu)] +#[snafu(display("Failed to fetch multiple series."))] +pub struct MultiplSeriesError { + source: PaginationError, +} + +#[derive(Debug, Snafu)] +pub enum SeriesBooksError { + #[snafu(display("Failed to prepare statement."))] + PrepareSeriesBooks { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteSeriesBooks { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum ScalarSeriesError { + #[snafu(display("Failed to prepare statement."))] + PrepareScalarSeries { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteScalarSeries { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for previous series."))] +pub struct PreviousSeriesError { + source: HasPrevOrMoreError, +} + +#[derive(Debug, Snafu)] +#[snafu(display("Failed to check for more series."))] +pub struct MoreSeriesError { + source: HasPrevOrMoreError, +} + impl Series { fn from_row(row: &Row<'_>) -> Result { Ok(Self { @@ -35,34 +67,41 @@ impl Series { limit: u64, cursor: Option<&str>, sort_order: &SortOrder, - ) -> Result, DataStoreError> { + ) -> Result, MultiplSeriesError> { let pagination = Pagination::new("sort", cursor, limit, *sort_order); - pagination.paginate( - conn, - "SELECT id, name, sort FROM series", - &[], - Self::from_row, - ) + pagination + .paginate( + conn, + "SELECT id, name, sort FROM series", + &[], + Self::from_row, + ) + .context(MultiplSeriesSnafu) } /// Fetch a single series with id `id`. - pub fn scalar_series(conn: &Connection, id: u64) -> Result { - let mut stmt = conn.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")?; + pub fn scalar_series(conn: &Connection, id: u64) -> Result { + let mut stmt = conn + .prepare("SELECT id, name, sort FROM series WHERE id = (:id)") + .context(PrepareScalarSeriesSnafu)?; let params = named_params! { ":id": id }; - Ok(stmt.query_row(params, Self::from_row)?) + stmt.query_row(params, Self::from_row) + .context(ExecuteScalarSeriesSnafu) } /// Get the series a book with id `id` is in, as well as the book's position within the series. pub fn book_series( conn: &Connection, book_id: u64, - ) -> Result, DataStoreError> { - let mut stmt = conn.prepare( - "SELECT series.id, series.name, series.sort, books.series_index FROM series \ + ) -> Result, SeriesBooksError> { + let mut stmt = conn + .prepare( + "SELECT series.id, series.name, series.sort, books.series_index FROM series \ INNER JOIN books_series_link ON series.id = books_series_link.series \ INNER JOIN books ON books.id = books_series_link.book \ WHERE books_series_link.book = (:id)", - )?; + ) + .context(PrepareSeriesBooksSnafu)?; let params = named_params! { ":id": book_id }; let from_row = |row: &Row<'_>| { @@ -74,17 +113,22 @@ impl Series { match stmt.query_row(params, from_row) { Ok(series) => Ok(Some(series)), Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), - Err(e) => Err(DataStoreError::SqliteError(e)), + Err(e) => Err(e).context(ExecuteSeriesBooksSnafu), } } /// Check if there are more series before the specified cursor. - pub fn has_previous_series(conn: &Connection, sort_name: &str) -> Result { + pub fn has_previous_series( + conn: &Connection, + sort_name: &str, + ) -> Result { Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::DESC) + .context(PreviousSeriesSnafu) } /// Check if there are more series after the specified cursor. - pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result { + pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result { Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::ASC) + .context(MoreSeriesSnafu) } } diff --git a/calibre-db/src/search.rs b/calibre-db/src/search.rs index 8c6ae9a..01f7d21 100644 --- a/calibre-db/src/search.rs +++ b/calibre-db/src/search.rs @@ -10,8 +10,9 @@ use std::path::Path; use r2d2::{Pool, PooledConnection}; use r2d2_sqlite::SqliteConnectionManager; use rusqlite::named_params; +use snafu::{ResultExt, Snafu}; -use crate::data::{book::Book, error::DataStoreError}; +use crate::data::book::Book; /// A lot of joins but only run once at startup. const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data) @@ -33,20 +34,61 @@ const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data) LEFT JOIN main.series AS s ON b2s.series = s.id GROUP BY b.id"; +#[derive(Debug, Snafu)] +pub enum EnsureSearchDbError { + #[snafu(display("Failed to prepare statement."))] + PrepareEnsureSearch { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteEnsureSearch { source: rusqlite::Error }, + #[snafu(display("Failed to attach database."))] + Attach { source: AttachError }, + #[snafu(display("Failed to initialize database."))] + Init { source: InitError }, +} + +#[derive(Debug, Snafu)] +pub enum AttachError { + #[snafu(display("Failed to execute statement."))] + ExecuteAttach { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum InitError { + #[snafu(display("Failed to prepare statement."))] + PrepareInit { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteInit { source: rusqlite::Error }, +} + +#[derive(Debug, Snafu)] +pub enum SearchError { + #[snafu(display("Failed ensure the search db is initialized."))] + EnsureDb { source: EnsureSearchDbError }, + #[snafu(display("Failed to get connection from pool."))] + Connection { source: r2d2::Error }, + #[snafu(display("Failed to prepare statement."))] + PrepareSearch { source: rusqlite::Error }, + #[snafu(display("Failed to execute statement."))] + ExecuteSearch { source: rusqlite::Error }, +} + /// Ensure the search database is attached to the connection and /// initializes the data if needed. fn ensure_search_db( conn: &PooledConnection, db_path: &Path, -) -> Result<(), DataStoreError> { - let mut stmt = - conn.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")?; - let count: u64 = stmt.query_row([], |x| x.get(0))?; +) -> Result<(), EnsureSearchDbError> { + let mut stmt = conn + .prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'") + .context(PrepareEnsureSearchSnafu)?; + let count: u64 = stmt + .query_row([], |x| x.get(0)) + .context(ExecuteEnsureSearchSnafu)?; let need_attachment = count == 0; if need_attachment { - attach(conn, db_path)?; - init(conn)?; + attach(conn, db_path).context(AttachSnafu)?; + init(conn).context(InitSnafu)?; } Ok(()) @@ -56,29 +98,32 @@ fn ensure_search_db( fn attach( conn: &PooledConnection, db_path: &Path, -) -> Result<(), DataStoreError> { +) -> Result<(), AttachError> { conn.execute( &format!("ATTACH DATABASE '{}' AS search", db_path.to_string_lossy()), [], - )?; - init(conn)?; + ) + .context(ExecuteAttachSnafu)?; Ok(()) } /// Initialise the fts virtual table. -fn init(conn: &PooledConnection) -> Result<(), DataStoreError> { +fn init(conn: &PooledConnection) -> Result<(), InitError> { let mut stmt = conn - .prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")?; - let count: u64 = stmt.query_row([], |x| x.get(0))?; + .prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'") + .context(PrepareInitSnafu)?; + let count: u64 = stmt.query_row([], |x| x.get(0)).context(ExecuteInitSnafu)?; let need_init = count == 0; if need_init { conn.execute( "CREATE VIRTUAL TABLE search.fts USING fts5(book_id, data)", [], - )?; - conn.execute(SEARCH_INIT_QUERY, [])?; + ) + .context(ExecuteInitSnafu)?; + conn.execute(SEARCH_INIT_QUERY, []) + .context(ExecuteInitSnafu)?; } Ok(()) @@ -89,15 +134,17 @@ pub(crate) fn search( query: &str, pool: &Pool, search_db_path: &Path, -) -> Result, DataStoreError> { - let conn = pool.get()?; - ensure_search_db(&conn, search_db_path)?; +) -> Result, SearchError> { + let conn = pool.get().context(ConnectionSnafu)?; + ensure_search_db(&conn, search_db_path).context(EnsureDbSnafu)?; - let mut stmt = - conn.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")?; + let mut stmt = conn + .prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank") + .context(PrepareSearchSnafu)?; let params = named_params! { ":query": query }; let books = stmt - .query_map(params, |r| -> Result { r.get(0) })? + .query_map(params, |r| -> Result { r.get(0) }) + .context(ExecuteSearchSnafu)? .filter_map(Result::ok) .filter_map(|id| Book::scalar_book(&conn, id).ok()) .collect(); diff --git a/little-hesinde/Cargo.toml b/little-hesinde/Cargo.toml index 90b91eb..71cfa6e 100644 --- a/little-hesinde/Cargo.toml +++ b/little-hesinde/Cargo.toml @@ -8,23 +8,28 @@ repository = { workspace = true } description = "A very simple ebook server for a calibre library, providing a html interface as well as an OPDS feed." [dependencies] +axum = { version = "0.8.4", features = ["http2", "tracing"] } calibre-db = { path = "../calibre-db/", version = "0.1.0" } clap = { version = "4.5.40", features = ["derive", "env"] } image = { version = "0.25.6", default-features = false, features = ["jpeg", "rayon"] } +mime_guess = "2.0.5" once_cell = "1.21.3" -poem = { version = "3.0.1", features = ["embed", "static-files"] } rust-embed = "8.7.2" sha2 = "0.10.9" serde = { workspace = true } serde_json = "1.0.140" serde_with = "3.14.0" +snafu = { workspace = true } tera = "1.20.0" -thiserror = { workspace = true } time = { workspace = true } -tokio = { version = "1.45.1", features = ["signal", "rt-multi-thread", "macros"] } +tokio = { version = "1.45.1", features = ["signal", "fs", "rt-multi-thread", "macros"] } tokio-util = "0.7.15" +tower-http = { version = "0.6.6", features = ["trace"] } tracing = "0.1.41" -tracing-subscriber = "0.3.19" +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +utoipa = { version = "5.4.0", features = ["axum_extras"] } +utoipa-axum = "0.2.0" +utoipa-swagger-ui = { version = "9.0.2", features = ["axum", "vendored"] } uuid = { version = "1.17.0", features = ["v4", "fast-rng"] } quick-xml = { version = "0.38.0", features = ["serialize"] } diff --git a/little-hesinde/src/api.rs b/little-hesinde/src/api.rs new file mode 100644 index 0000000..7521dc9 --- /dev/null +++ b/little-hesinde/src/api.rs @@ -0,0 +1,86 @@ +use std::{io, net::SocketAddr}; + +use serde::Deserialize; +use snafu::{ResultExt, Snafu}; +use tokio::net::TcpListener; +use utoipa::{OpenApi, ToSchema}; +use utoipa_axum::router::OpenApiRouter; +use utoipa_swagger_ui::SwaggerUi; + +use crate::app_state::AppState; + +pub mod authors; +pub mod books; +pub mod download; +pub mod error; +pub mod html; +pub mod opds; +pub mod paginated; +pub mod routes; +pub mod search; +pub mod series; +pub mod static_files; + +/// How to sort query results. +#[derive(Debug, Copy, Clone, PartialEq, Deserialize, ToSchema)] +#[serde(rename_all = "UPPERCASE")] +pub enum SortOrder { + ASC, + DESC, +} + +impl From for calibre_db::data::pagination::SortOrder { + fn from(val: SortOrder) -> Self { + match val { + SortOrder::ASC => calibre_db::data::pagination::SortOrder::ASC, + SortOrder::DESC => calibre_db::data::pagination::SortOrder::DESC, + } + } +} + +/// OpenAPI tag for all endpoints. +const TAG: &str = "little-hesinde"; + +/// OpenAPI documentation configuration. +#[derive(OpenApi)] +#[openapi( + components( + schemas( + SortOrder + ) + ), + tags( + (name = TAG, description = "Browser and OPDS access to a calibre library.") + ) + )] +struct ApiDoc; + +/// Errors that occur when starting the HTTP server. +#[derive(Debug, Snafu)] +pub enum ServeError { + #[snafu(display("Failed to bind to {address}."))] + Bind { + source: io::Error, + address: SocketAddr, + }, + #[snafu(display("Failed to run http server."))] + Serve { source: io::Error }, +} + +/// Start the HTTP API server with the given configuration. +pub async fn serve(address: SocketAddr, state: AppState) -> Result<(), ServeError> { + let (router, api) = OpenApiRouter::with_openapi(ApiDoc::openapi()) + .merge(routes::router(state)) + .split_for_parts(); + + let router = + router.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", api.clone())); + + let listener = TcpListener::bind(&address) + .await + .context(BindSnafu { address })?; + + axum::serve(listener, router.into_make_service()) + .await + .context(ServeSnafu) +} diff --git a/little-hesinde/src/api/authors.rs b/little-hesinde/src/api/authors.rs new file mode 100644 index 0000000..7ef06a0 --- /dev/null +++ b/little-hesinde/src/api/authors.rs @@ -0,0 +1,36 @@ +use std::path::Path; + +use calibre_db::{ + calibre::{AuthorBooksError, Calibre, ScalarAuthorError}, + data::author::Author, +}; + +use crate::data::book::Book; +use snafu::{ResultExt, Snafu}; + +use super::SortOrder; + +#[derive(Debug, Snafu)] +pub enum SingleAuthorError { + #[snafu(display("Failed to fetch author data."))] + AuthorData { source: ScalarAuthorError }, + #[snafu(display("Failed to fetch books from author."))] + BookData { source: AuthorBooksError }, +} + +pub async fn single( + id: u64, + calibre: &Calibre, + library_path: &Path, +) -> Result<(Author, Vec), SingleAuthorError> { + let author = calibre.scalar_author(id).context(AuthorDataSnafu)?; + let books = calibre + .author_books(id, u32::MAX.into(), None, SortOrder::ASC.into()) + .context(BookDataSnafu)?; + let books = books + .iter() + .filter_map(|x| Book::full_book(x, calibre, library_path)) + .collect::>(); + + Ok((author, books)) +} diff --git a/little-hesinde/src/api/books.rs b/little-hesinde/src/api/books.rs new file mode 100644 index 0000000..8c59dab --- /dev/null +++ b/little-hesinde/src/api/books.rs @@ -0,0 +1,22 @@ +use std::path::Path; + +use calibre_db::calibre::{Calibre, RecentBooksError}; +use snafu::{ResultExt, Snafu}; + +use crate::data::book::Book; + +#[derive(Debug, Snafu)] +pub enum RecentBooksError { + #[snafu(display("Failed to fetch recent books."))] + RecentBooks { source: RecentBooksError }, +} + +pub async fn recent(calibre: &Calibre, library_path: &Path) -> Result, RecentBooksError> { + let recent_books = calibre.recent_books(25).context(RecentBooksSnafu)?; + let recent_books = recent_books + .iter() + .filter_map(|x| Book::full_book(x, calibre, library_path)) + .collect::>(); + + Ok(recent_books) +} diff --git a/little-hesinde/src/api/download.rs b/little-hesinde/src/api/download.rs new file mode 100644 index 0000000..56156d4 --- /dev/null +++ b/little-hesinde/src/api/download.rs @@ -0,0 +1,38 @@ +use axum::{ + body::Body, + http::{self, StatusCode, header}, + response::Response, +}; +use snafu::{ResultExt, Snafu}; +use tokio::io::AsyncRead; +use tokio_util::io::ReaderStream; + +#[derive(Debug, Snafu)] +pub enum DownloadError { + #[snafu(display("Failed to fetch cover."))] + Body { source: http::Error }, +} + +/// Handle a request for file. +/// +/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks. +pub async fn handler( + file_name: &str, + reader: A, + content_type: &str, +) -> Result { + let stream = ReaderStream::new(reader); + let body = Body::from_stream(stream); + + let response = Response::builder() + .status(StatusCode::OK) + .header( + header::CONTENT_DISPOSITION, + format!("filename=\"{file_name}\""), + ) + .header(header::CONTENT_TYPE, content_type) + .body(body) + .context(BodySnafu)?; + + Ok(response) +} diff --git a/little-hesinde/src/api/error.rs b/little-hesinde/src/api/error.rs new file mode 100644 index 0000000..1d5a943 --- /dev/null +++ b/little-hesinde/src/api/error.rs @@ -0,0 +1,41 @@ +//! HTTP error handling and response formatting. + +use axum::http::StatusCode; +use serde::Serialize; +use utoipa::ToSchema; + +/// Standard error response format for API endpoints. +#[derive(Serialize, ToSchema)] +pub struct ErrorResponse { + /// Unique identifier for tracking this error instance. + pub id: String, + /// Human-readable error message. + pub error: String, +} + +/// Map error types to HTTP status codes. +pub trait HttpStatus { + /// Return the appropriate HTTP status code for this error. + fn status_code(&self) -> StatusCode; +} + +/// Generate IntoResponse implementation for error types with JSON formatting. +#[macro_export] +macro_rules! http_error { + ($error_type:ty) => { + impl axum::response::IntoResponse for $error_type { + fn into_response(self) -> axum::response::Response { + let status = self.status_code(); + let id = uuid::Uuid::new_v4().to_string(); + tracing::error!("{}: {}", &id, snafu::Report::from_error(&self)); + + let error_response = $crate::api::error::ErrorResponse { + id, + error: self.to_string(), + }; + + (status, axum::Json(error_response)).into_response() + } + } + }; +} diff --git a/little-hesinde/src/api/html.rs b/little-hesinde/src/api/html.rs new file mode 100644 index 0000000..db2d9ce --- /dev/null +++ b/little-hesinde/src/api/html.rs @@ -0,0 +1,7 @@ +pub mod archive; +pub mod authors; +pub mod books; +pub mod cover; +pub mod recent; +pub mod search; +pub mod series; diff --git a/little-hesinde/src/api/html/archive.rs b/little-hesinde/src/api/html/archive.rs new file mode 100644 index 0000000..46cf1c0 --- /dev/null +++ b/little-hesinde/src/api/html/archive.rs @@ -0,0 +1,45 @@ +use axum::{http::StatusCode, response::Response}; +use snafu::{ResultExt, Snafu}; + +use crate::{ + APP_NAME, APP_VERSION, + api::{ + TAG, + download::{self, DownloadError}, + error::{ErrorResponse, HttpStatus}, + }, + http_error, +}; + +const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip")); + +#[derive(Debug, Snafu)] +pub enum ArchiveError { + #[snafu(display("Failed to stream source code archive."))] + Download { source: DownloadError }, +} +impl HttpStatus for ArchiveError { + fn status_code(&self) -> StatusCode { + match self { + ArchiveError::Download { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(ArchiveError); + +/// Handle a request for source code of the server.. +#[utoipa::path( + get, + path = "/archive", + tag = TAG, + responses( + (status = OK, content_type = "application/zip"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler() -> Result { + let file_name = format!("{APP_NAME}-{APP_VERSION}.zip"); + download::handler(&file_name, SOURCE_ARCHIVE, "application/zip") + .await + .context(DownloadSnafu) +} diff --git a/little-hesinde/src/api/html/authors.rs b/little-hesinde/src/api/html/authors.rs new file mode 100644 index 0000000..8f0533c --- /dev/null +++ b/little-hesinde/src/api/html/authors.rs @@ -0,0 +1,134 @@ +use std::sync::Arc; + +use crate::{ + api::{ + SortOrder, TAG, + authors::{self, SingleAuthorError}, + error::{ErrorResponse, HttpStatus}, + paginated::{self, PaginationError}, + }, + app_state::AppState, + http_error, + templates::TEMPLATES, +}; +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::{Html, IntoResponse, Response}, +}; +use snafu::{ResultExt, Snafu}; +use tera::Context; + +#[derive(Debug, Snafu)] +pub enum RetrieveError { + #[snafu(display("Failed to fetch pagination data."))] + Authors { source: AuthorError }, +} +impl HttpStatus for RetrieveError { + fn status_code(&self) -> StatusCode { + match self { + RetrieveError::Authors { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RetrieveError); + +#[utoipa::path( + get, + path = "/authors", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler_init(State(state): State>) -> Result { + authors(&state, None, SortOrder::ASC) + .await + .context(AuthorsSnafu) +} + +#[utoipa::path( + get, + path = "/authors/{cursor}/{sort_order}", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler( + Path((cursor, sort_order)): Path<(String, SortOrder)>, + State(state): State>, +) -> Result { + authors(&state, Some(&cursor), sort_order) + .await + .context(AuthorsSnafu) +} + +#[derive(Debug, Snafu)] +pub enum AuthorError { + #[snafu(display("Failed to fetch pagination data."))] + Pagination { source: PaginationError }, +} + +async fn authors( + state: &Arc, + cursor: Option<&str>, + sort_order: SortOrder, +) -> Result { + paginated::render( + "authors", + || state.calibre.authors(25, cursor, &sort_order.into()), + |author| author.sort.clone(), + |cursor| state.calibre.has_previous_authors(cursor), + |cursor| state.calibre.has_more_authors(cursor), + ) + .context(PaginationSnafu) +} + +#[derive(Debug, Snafu)] +pub enum SingleError { + #[snafu(display("Failed to fetch author data."))] + Data { source: SingleAuthorError }, + #[snafu(display("Failed to render template."))] + Render { source: tera::Error }, +} +impl HttpStatus for SingleError { + fn status_code(&self) -> StatusCode { + match self { + SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SingleError); + +#[utoipa::path( + get, + path = "/authors/{id}", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn single( + Path(id): Path, + State(state): State>, +) -> Result { + let (author, books) = authors::single(id, &state.calibre, &state.config.library_path) + .await + .context(DataSnafu)?; + + let mut context = Context::new(); + context.insert("title", &author.name); + context.insert("nav", "authors"); + context.insert("books", &books); + + Ok(TEMPLATES + .render("book_list", &context) + .context(RenderSnafu) + .map(Html)? + .into_response()) +} diff --git a/little-hesinde/src/api/html/books.rs b/little-hesinde/src/api/html/books.rs new file mode 100644 index 0000000..0fe59d7 --- /dev/null +++ b/little-hesinde/src/api/html/books.rs @@ -0,0 +1,155 @@ +use std::{io, sync::Arc}; + +use crate::{ + api::{ + SortOrder, TAG, download, + error::{ErrorResponse, HttpStatus}, + paginated::{self, PaginationError}, + }, + app_state::AppState, + data::book::{Book, Format}, + http_error, + opds::media_type::MediaType, +}; +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::Response, +}; +use calibre_db::calibre::ScalarBookError; +use snafu::{ResultExt, Snafu}; +use tokio::fs::File; + +#[derive(Debug, Snafu)] +pub enum RetrieveError { + #[snafu(display("Failed to fetch pagination data."))] + Books { source: BookError }, +} +impl HttpStatus for RetrieveError { + fn status_code(&self) -> StatusCode { + match self { + RetrieveError::Books { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RetrieveError); + +#[utoipa::path( + get, + path = "/books", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler_init(State(state): State>) -> Result { + books(&state, None, SortOrder::ASC) + .await + .context(BooksSnafu) +} + +#[utoipa::path( + get, + path = "/books/{cursor}/{sort_order}", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler( + Path((cursor, sort_order)): Path<(String, SortOrder)>, + State(state): State>, +) -> Result { + books(&state, Some(&cursor), sort_order) + .await + .context(BooksSnafu) +} + +#[derive(Debug, Snafu)] +pub enum BookError { + #[snafu(display("Failed to fetch pagination data."))] + Pagination { source: PaginationError }, +} + +async fn books( + state: &Arc, + cursor: Option<&str>, + sort_order: SortOrder, +) -> Result { + paginated::render( + "books", + || { + state + .calibre + .books(25, cursor, &sort_order.into()) + .map(|x| { + x.iter() + .filter_map(|y| { + Book::full_book(y, &state.calibre, &state.config.library_path) + }) + .collect() + }) + }, + |book| book.data.sort.clone(), + |cursor| state.calibre.has_previous_books(cursor), + |cursor| state.calibre.has_more_books(cursor), + ) + .context(PaginationSnafu) +} + +#[derive(Debug, Snafu)] +pub enum DownloadError { + #[snafu(display("Failed to fetch book data."))] + BookData { source: ScalarBookError }, + #[snafu(display("No such book."))] + NotFound, + #[snafu(display("No such book."))] + FileNotFound { source: io::Error }, + #[snafu(display("Failed to stream book file."))] + Stream { source: download::DownloadError }, +} +impl HttpStatus for DownloadError { + fn status_code(&self) -> StatusCode { + match self { + DownloadError::BookData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + DownloadError::NotFound => StatusCode::NOT_FOUND, + DownloadError::FileNotFound { source: _ } => StatusCode::NOT_FOUND, + DownloadError::Stream { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(DownloadError); + +#[utoipa::path( + get, + path = "/book/{id}/{format}", + tag = TAG, + responses( + (status = OK, content_type = "application/*"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn download( + Path((id, format)): Path<(u64, String)>, + State(state): State>, +) -> Result { + let book = state.calibre.scalar_book(id).context(BookDataSnafu)?; + let book = Book::full_book(&book, &state.calibre, &state.config.library_path) + .ok_or(NotFoundSnafu.build())?; + let format = Format(format); + let file_name = book.formats.get(&format).ok_or(NotFoundSnafu.build())?; + let file_path = state + .config + .library_path + .join(book.data.path) + .join(file_name); + let file = File::open(file_path).await.context(FileNotFoundSnafu)?; + let content_type: MediaType = format.into(); + let content_type = format!("{content_type}"); + + download::handler(file_name, file, &content_type) + .await + .context(StreamSnafu) +} diff --git a/little-hesinde/src/api/html/cover.rs b/little-hesinde/src/api/html/cover.rs new file mode 100644 index 0000000..a6d79ae --- /dev/null +++ b/little-hesinde/src/api/html/cover.rs @@ -0,0 +1,125 @@ +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::Response, +}; +use calibre_db::calibre::{Calibre, ScalarBookError}; +use snafu::{ResultExt, Snafu}; +use std::{fs::File, io, path::Path as FilePath, sync::Arc}; +use tokio::fs::File as AsyncFile; + +use crate::{ + api::{ + TAG, + download::{self, DownloadError}, + error::{ErrorResponse, HttpStatus}, + }, + app_state::AppState, + cache::{self, RetrieveThumbnailError}, + http_error, +}; + +#[derive(Debug, Snafu)] +pub enum RetrieveError { + #[snafu(display("Failed to fetch cover."))] + Cover { source: CoverError }, + #[snafu(display("Failed to open cover."))] + CoverOpen { source: io::Error }, +} +impl HttpStatus for RetrieveError { + fn status_code(&self) -> StatusCode { + match self { + RetrieveError::Cover { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + RetrieveError::CoverOpen { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RetrieveError); + +#[utoipa::path( + get, + path = "/cover/{id}/thumbnail", + tag = TAG, + responses( + (status = OK, content_type = "image/jpeg"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn thumbnail( + Path(id): Path, + State(state): State>, +) -> Result { + cover( + &state.calibre, + &state.config.library_path, + &state.config.cache_path, + id, + |cover_path, cache_path| { + cache::get_thumbnail(cover_path, cache_path).context(ThumbnailSnafu) + }, + ) + .await + .context(CoverSnafu) +} + +#[utoipa::path( + get, + path = "/cover/{id}", + tag = TAG, + responses( + (status = OK, content_type = "image/jpeg"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn full( + Path(id): Path, + State(state): State>, +) -> Result { + cover( + &state.calibre, + &state.config.library_path, + &state.config.cache_path, + id, + |cover_path, _| File::open(cover_path).context(FileOpenSnafu), + ) + .await + .context(CoverSnafu) +} + +#[derive(Debug, Snafu)] +pub enum CoverError { + #[snafu(display("Failed to fetch book data."))] + BookData { source: ScalarBookError }, + #[snafu(display("No such cover"))] + NotFound { source: CoverFetchError }, + #[snafu(display("Failed to fetch cover thumbnail."))] + StreamCover { source: DownloadError }, +} + +#[derive(Debug, Snafu)] +pub enum CoverFetchError { + #[snafu(display("Failed to fetch cover thumbnail."))] + Thumbnail { source: RetrieveThumbnailError }, + #[snafu(display("Failed to open cover file."))] + FileOpen { source: io::Error }, +} + +async fn cover( + calibre: &Calibre, + library_path: &FilePath, + cache_path: &FilePath, + id: u64, + f: F, +) -> Result +where + F: Fn(&FilePath, &FilePath) -> Result, +{ + let book = calibre.scalar_book(id).context(BookDataSnafu)?; + let cover_path = library_path.join(book.path).join("cover.jpg"); + + let cover = f(&cover_path, cache_path).context(NotFoundSnafu)?; + let cover = AsyncFile::from_std(cover); + download::handler("cover.jpg", cover, "image/jpeg") + .await + .context(StreamCoverSnafu) +} diff --git a/little-hesinde/src/api/html/recent.rs b/little-hesinde/src/api/html/recent.rs new file mode 100644 index 0000000..11877b0 --- /dev/null +++ b/little-hesinde/src/api/html/recent.rs @@ -0,0 +1,70 @@ +//! Handle requests for recent books. + +use std::sync::Arc; + +use axum::{ + extract::State, + http::StatusCode, + response::{Html, IntoResponse, Response}, +}; +use snafu::{ResultExt, Snafu}; +use tera::Context; + +use crate::{ + api::{ + TAG, + books::{self, RecentBooksError}, + error::{ErrorResponse, HttpStatus}, + }, + app_state::AppState, + http_error, + templates::TEMPLATES, +}; + +/// Errors that occur during query processing. +#[derive(Debug, Snafu)] +pub enum RecentError { + #[snafu(display("Failed to fetch recent books."))] + RecentBooks { source: RecentBooksError }, + #[snafu(display("Failed to render template."))] + Template { source: tera::Error }, +} +impl HttpStatus for RecentError { + fn status_code(&self) -> StatusCode { + match self { + RecentError::RecentBooks { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + RecentError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RecentError); + +/// Get recently added books +/// +/// Provides a list of the 25 most recently added books. +/// The format can be either HTML or an OPDS feed, depending on the `Accept` header. +#[utoipa::path( + get, + path = "/recent", + tag = TAG, + responses( + (status = 200, description = "List of recent books", content_type = "text/html"), + (status = 500, description = "Error retrieving books from database", body = ErrorResponse) + ) +)] +pub async fn handler(State(state): State>) -> Result { + let recent_books = books::recent(&state.calibre, &state.config.library_path) + .await + .context(RecentBooksSnafu)?; + + let mut context = Context::new(); + context.insert("title", ""); + context.insert("nav", "recent"); + context.insert("books", &recent_books); + + Ok(TEMPLATES + .render("book_list", &context) + .map(Html) + .context(TemplateSnafu)? + .into_response()) +} diff --git a/little-hesinde/src/api/html/search.rs b/little-hesinde/src/api/html/search.rs new file mode 100644 index 0000000..8666259 --- /dev/null +++ b/little-hesinde/src/api/html/search.rs @@ -0,0 +1,73 @@ +use std::sync::Arc; + +use axum::{ + extract::{Query, State}, + http::StatusCode, + response::{Html, IntoResponse, Response}, +}; +use serde::Deserialize; +use snafu::{ResultExt, Snafu}; +use tera::Context; + +use crate::{ + api::{ + TAG, + error::{ErrorResponse, HttpStatus}, + search::{self, SearchQueryError}, + }, + app_state::AppState, + http_error, + templates::TEMPLATES, +}; + +/// Errors that occur during query processing. +#[derive(Debug, Snafu)] +pub enum SearchError { + #[snafu(display("Failed to search for books."))] + Query { source: SearchQueryError }, + #[snafu(display("Failed to render template."))] + Template { source: tera::Error }, +} +impl HttpStatus for SearchError { + fn status_code(&self) -> StatusCode { + match self { + SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SearchError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SearchError); + +#[derive(Deserialize)] +pub struct Params { + /// Query for a search request. + query: String, +} +#[utoipa::path( + get, + path = "/search", + tag = TAG, + responses( + (status = 200, content_type = "text/html"), + (status = 500, description = "Error retrieving books from database", body = ErrorResponse) + ) +)] +pub async fn handler( + Query(params): Query, + State(state): State>, +) -> Result { + let books = search::query(¶ms.query, &state.calibre, &state.config.library_path) + .await + .context(QuerySnafu)?; + + let mut context = Context::new(); + context.insert("title", "Search Results"); + context.insert("nav", "search"); + context.insert("books", &books); + + Ok(TEMPLATES + .render("book_list", &context) + .context(TemplateSnafu) + .map(Html)? + .into_response()) +} diff --git a/little-hesinde/src/api/html/series.rs b/little-hesinde/src/api/html/series.rs new file mode 100644 index 0000000..791710b --- /dev/null +++ b/little-hesinde/src/api/html/series.rs @@ -0,0 +1,134 @@ +use std::sync::Arc; + +use crate::{ + api::{ + SortOrder, TAG, + error::{ErrorResponse, HttpStatus}, + paginated::{self, PaginationError}, + series::{self, SingleSeriesError}, + }, + app_state::AppState, + http_error, + templates::TEMPLATES, +}; +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::{Html, IntoResponse, Response}, +}; +use snafu::{ResultExt, Snafu}; +use tera::Context; + +#[derive(Debug, Snafu)] +pub enum RetrieveError { + #[snafu(display("Failed to fetch series data."))] + Series { source: SeriesError }, +} +impl HttpStatus for RetrieveError { + fn status_code(&self) -> StatusCode { + match self { + RetrieveError::Series { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RetrieveError); + +#[utoipa::path( + get, + path = "/series", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler_init(State(state): State>) -> Result { + series(&state, None, SortOrder::ASC) + .await + .context(SeriesSnafu) +} + +#[utoipa::path( + get, + path = "/series/{cursor}/{sort_order}", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler( + Path((cursor, sort_order)): Path<(String, SortOrder)>, + State(state): State>, +) -> Result { + series(&state, Some(&cursor), sort_order) + .await + .context(SeriesSnafu) +} + +#[derive(Debug, Snafu)] +pub enum SeriesError { + #[snafu(display("Failed to fetch pagination data."))] + Pagination { source: PaginationError }, +} + +async fn series( + state: &Arc, + cursor: Option<&str>, + sort_order: SortOrder, +) -> Result { + paginated::render( + "series", + || state.calibre.series(25, cursor, &sort_order.into()), + |series| series.sort.clone(), + |cursor| state.calibre.has_previous_series(cursor), + |cursor| state.calibre.has_more_series(cursor), + ) + .context(PaginationSnafu) +} + +#[derive(Debug, Snafu)] +pub enum SingleError { + #[snafu(display("Failed to fetch series data."))] + Data { source: SingleSeriesError }, + #[snafu(display("Failed to render template."))] + Render { source: tera::Error }, +} +impl HttpStatus for SingleError { + fn status_code(&self) -> StatusCode { + match self { + SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SingleError); + +#[utoipa::path( + get, + path = "/series/{id}", + tag = TAG, + responses( + (status = OK, content_type = "text/html"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn single( + Path(id): Path, + State(state): State>, +) -> Result { + let (series, books) = series::single(id, &state.calibre, &state.config.library_path) + .await + .context(DataSnafu)?; + + let mut context = Context::new(); + context.insert("title", &series.name); + context.insert("nav", "series"); + context.insert("books", &books); + + Ok(TEMPLATES + .render("book_list", &context) + .context(RenderSnafu) + .map(Html)? + .into_response()) +} diff --git a/little-hesinde/src/api/opds.rs b/little-hesinde/src/api/opds.rs new file mode 100644 index 0000000..28e3782 --- /dev/null +++ b/little-hesinde/src/api/opds.rs @@ -0,0 +1,5 @@ +pub mod authors; +pub mod books; +pub mod recent; +pub mod search; +pub mod series; diff --git a/little-hesinde/src/api/opds/authors.rs b/little-hesinde/src/api/opds/authors.rs new file mode 100644 index 0000000..998d5e7 --- /dev/null +++ b/little-hesinde/src/api/opds/authors.rs @@ -0,0 +1,138 @@ +use std::sync::Arc; + +use crate::{ + APP_NAME, + api::{ + SortOrder, TAG, + authors::{self, SingleAuthorError}, + error::{ErrorResponse, HttpStatus}, + }, + app_state::AppState, + http_error, + opds::{ + entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType, + relation::Relation, + }, +}; +use axum::{ + extract::{Path, State}, + http::{StatusCode, header}, + response::{IntoResponse, Response}, +}; +use calibre_db::{calibre::AuthorsError, data::author::Author as DbAuthor}; +use snafu::{ResultExt, Snafu}; +use time::OffsetDateTime; + +#[derive(Debug, Snafu)] +pub enum AuthorsError { + #[snafu(display("Failed to fetch author data."))] + Data { source: AuthorsError }, + #[snafu(display("Failed to render author data."))] + Render { source: AsXmlError }, +} +impl HttpStatus for AuthorsError { + fn status_code(&self) -> StatusCode { + match self { + AuthorsError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + AuthorsError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(AuthorsError); + +/// Render all authors as OPDS entries embedded in a feed. +#[utoipa::path( + get, + path = "/authors", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler(State(state): State>) -> Result { + let authors: Vec = state + .calibre + .authors(u32::MAX.into(), None, &SortOrder::ASC.into()) + .context(DataSnafu)?; + + let entries: Vec = authors.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: "/opds/authors".to_string(), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}:authors"), + "All Authors", + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(RenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} + +#[derive(Debug, Snafu)] +pub enum SingleError { + #[snafu(display("Failed to fetch author data."))] + AuthorData { source: SingleAuthorError }, + #[snafu(display("Failed to render feed."))] + FeedRender { source: AsXmlError }, +} +impl HttpStatus for SingleError { + fn status_code(&self) -> StatusCode { + match self { + SingleError::AuthorData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SingleError); + +/// Render a single series as an OPDS entry embedded in a feed. +#[utoipa::path( + get, + path = "/authors/{id}", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn single( + Path(id): Path, + State(state): State>, +) -> Result { + let (author, books) = authors::single(id, &state.calibre, &state.config.library_path) + .await + .context(AuthorDataSnafu)?; + + let entries: Vec = books.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: format!("/opds/authors/{}", author.id), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}author:{}", author.id), + &author.name, + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(FeedRenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} diff --git a/little-hesinde/src/api/opds/books.rs b/little-hesinde/src/api/opds/books.rs new file mode 100644 index 0000000..acbe5ba --- /dev/null +++ b/little-hesinde/src/api/opds/books.rs @@ -0,0 +1,95 @@ +use std::sync::Arc; + +use crate::{ + APP_NAME, + api::{ + SortOrder, TAG, + error::{ErrorResponse, HttpStatus}, + }, + app_state::AppState, + data::book::Book, + http_error, + opds::{ + entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType, + relation::Relation, + }, +}; +use axum::{ + extract::State, + http::{StatusCode, header}, + response::{IntoResponse, Response}, +}; +use calibre_db::calibre::BooksError; +use snafu::{ResultExt, Snafu}; +use time::OffsetDateTime; + +#[derive(Debug, Snafu)] +pub enum OdpsBooksError { + #[snafu(display("Failed to fetch book data."))] + Data { source: BooksError }, + #[snafu(display("Failed to render book data."))] + Render { source: RenderError }, +} +impl HttpStatus for OdpsBooksError { + fn status_code(&self) -> StatusCode { + match self { + OdpsBooksError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + OdpsBooksError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(OdpsBooksError); + +/// Render all books as OPDS entries embedded in a feed. +#[utoipa::path( + get, + path = "/books", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler(State(state): State>) -> Result { + let books: Vec = state + .calibre + .books(u32::MAX.into(), None, &SortOrder::ASC.into()) + .map(|x| { + x.iter() + .filter_map(|y| Book::full_book(y, &state.calibre, &state.config.library_path)) + .collect() + }) + .context(DataSnafu)?; + render_books(books).await.context(RenderSnafu) +} + +#[derive(Debug, Snafu)] +pub enum RenderError { + #[snafu(display("Failed to create opds feed."))] + Feed { source: AsXmlError }, +} + +/// Render a list of books as OPDS entries in a feed. +pub(crate) async fn render_books(books: Vec) -> Result { + let entries: Vec = books.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: "/opds/books".to_string(), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}:books"), + "All Books", + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(FeedSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} diff --git a/little-hesinde/src/api/opds/recent.rs b/little-hesinde/src/api/opds/recent.rs new file mode 100644 index 0000000..74ed88a --- /dev/null +++ b/little-hesinde/src/api/opds/recent.rs @@ -0,0 +1,76 @@ +use std::sync::Arc; + +use crate::{ + APP_NAME, + api::{ + TAG, + books::{self, RecentBooksError}, + error::{ErrorResponse, HttpStatus}, + }, + app_state::AppState, + http_error, + opds::{ + entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType, + relation::Relation, + }, +}; +use axum::{ + extract::State, + http::{StatusCode, header}, + response::{IntoResponse, Response}, +}; +use snafu::{ResultExt, Snafu}; +use time::OffsetDateTime; + +#[derive(Debug, Snafu)] +pub enum RecentError { + #[snafu(display("Failed to fetch recent books."))] + Data { source: RecentBooksError }, + #[snafu(display("Failed to render feed."))] + Render { source: AsXmlError }, +} +impl HttpStatus for RecentError { + fn status_code(&self) -> StatusCode { + match self { + RecentError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + RecentError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(RecentError); + +#[utoipa::path( + get, + path = "/recent", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler(State(state): State>) -> Result { + let recent_books = books::recent(&state.calibre, &state.config.library_path) + .await + .context(DataSnafu)?; + let entries: Vec = recent_books.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: "/opds/recent".to_string(), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}:recentbooks"), + "Recent Books", + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(RenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} diff --git a/little-hesinde/src/api/opds/search.rs b/little-hesinde/src/api/opds/search.rs new file mode 100644 index 0000000..198f5e6 --- /dev/null +++ b/little-hesinde/src/api/opds/search.rs @@ -0,0 +1,107 @@ +use std::sync::Arc; + +use crate::{ + APP_NAME, + api::{ + TAG, + error::{ErrorResponse, HttpStatus}, + search::{self, SearchQueryError}, + }, + app_state::AppState, + http_error, + opds::{ + error::AsXmlError, + search::{OpenSearchDescription, Url}, + }, +}; +use axum::{ + extract::{Query, State}, + http::{StatusCode, header}, + response::{IntoResponse, Response}, +}; +use serde::Deserialize; +use snafu::{ResultExt, Snafu}; + +use super::books::{RenderError, render_books}; + +#[derive(Debug, Snafu)] +pub enum SearchError { + #[snafu(display("Failed to query books."))] + Query { source: SearchQueryError }, + #[snafu(display("Failed to render feed."))] + Render { source: RenderError }, +} +impl HttpStatus for SearchError { + fn status_code(&self) -> StatusCode { + match self { + SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SearchError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SearchError); + +#[derive(Deserialize)] +pub struct Params { + /// Query for a search request. + query: String, +} +#[utoipa::path( + get, + path = "/search", + tag = TAG, + responses( + (status = 200, content_type = "application/atom+xml"), + (status = 500, description = "Error retrieving books from database", body = ErrorResponse) + ) +)] +pub async fn handler( + Query(params): Query, + State(state): State>, +) -> Result { + let books = search::query(¶ms.query, &state.calibre, &state.config.library_path) + .await + .context(QuerySnafu)?; + + render_books(books).await.context(RenderSnafu) +} + +#[derive(Debug, Snafu)] +pub enum InfoError { + #[snafu(display("Failed to render feed."))] + FeedRender { source: AsXmlError }, +} +impl HttpStatus for InfoError { + fn status_code(&self) -> StatusCode { + match self { + InfoError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +http_error!(InfoError); + +#[utoipa::path( + get, + path = "/search/info", + tag = TAG, + responses( + (status = 200, content_type = "application/atom+xml"), + (status = 500, description = "Internal error", body = ErrorResponse) + ) +)] +pub async fn info() -> Result { + let search = OpenSearchDescription { + short_name: APP_NAME.to_string(), + description: "Search for ebooks".to_string(), + input_encoding: "UTF-8".to_string(), + output_encoding: "UTF-8".to_string(), + url: Url { + type_name: "application/atom+xml".to_string(), + template: "/opds/search?query={searchTerms}".to_string(), + }, + }; + let xml = search.as_xml().context(FeedRenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} diff --git a/little-hesinde/src/api/opds/series.rs b/little-hesinde/src/api/opds/series.rs new file mode 100644 index 0000000..ca42127 --- /dev/null +++ b/little-hesinde/src/api/opds/series.rs @@ -0,0 +1,138 @@ +use std::sync::Arc; + +use crate::{ + APP_NAME, + api::{ + SortOrder, TAG, + error::{ErrorResponse, HttpStatus}, + series::{self, SingleSeriesError}, + }, + app_state::AppState, + http_error, + opds::{ + entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType, + relation::Relation, + }, +}; +use axum::{ + extract::{Path, State}, + http::{StatusCode, header}, + response::{IntoResponse, Response}, +}; +use calibre_db::calibre::MultipleSeriesError; +use snafu::{ResultExt, Snafu}; +use time::OffsetDateTime; + +#[derive(Debug, Snafu)] +pub enum SeriesError { + #[snafu(display("Failed to fetch series data."))] + Data { source: MultipleSeriesError }, + #[snafu(display("Failed to render series data."))] + Render { source: AsXmlError }, +} +impl HttpStatus for SeriesError { + fn status_code(&self) -> StatusCode { + match self { + SeriesError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SeriesError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SeriesError); + +/// Render all series as OPDS entries embedded in a feed. +#[utoipa::path( + get, + path = "/series", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn handler(State(state): State>) -> Result { + let series = state + .calibre + .series(u32::MAX.into(), None, &SortOrder::ASC.into()) + .context(DataSnafu)?; + + let entries: Vec = series.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: "/opds/series".to_string(), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}:series"), + "All Series", + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(RenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} + +#[derive(Debug, Snafu)] +pub enum SingleError { + #[snafu(display("Failed to fetch series data."))] + SeriesData { source: SingleSeriesError }, + #[snafu(display("Failed to render feed."))] + FeedRender { source: AsXmlError }, +} +impl HttpStatus for SingleError { + fn status_code(&self) -> StatusCode { + match self { + SingleError::SeriesData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} +http_error!(SingleError); + +/// Render a single series as an OPDS entry embedded in a feed. +#[utoipa::path( + get, + path = "/series/{id}", + tag = TAG, + responses( + (status = OK, content_type = "application/atom+xml"), + (status = 500, description = "Server failure.", body = ErrorResponse) + ) +)] +pub async fn single( + Path(id): Path, + State(state): State>, +) -> Result { + let (series, books) = series::single(id, &state.calibre, &state.config.library_path) + .await + .context(SeriesDataSnafu)?; + + let entries: Vec = books.into_iter().map(Entry::from).collect(); + let now = OffsetDateTime::now_utc(); + + let self_link = Link { + href: format!("/opds/series/{}", series.id), + media_type: MediaType::Navigation, + rel: Relation::Myself, + title: None, + count: None, + }; + let feed = Feed::create( + now, + &format!("{APP_NAME}:series:{}", series.id), + &series.name, + self_link, + vec![], + entries, + ); + let xml = feed.as_xml().context(FeedRenderSnafu)?; + + Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response()) +} diff --git a/little-hesinde/src/handlers/paginated.rs b/little-hesinde/src/api/paginated.rs similarity index 62% rename from little-hesinde/src/handlers/paginated.rs rename to little-hesinde/src/api/paginated.rs index deb972e..6e48329 100644 --- a/little-hesinde/src/handlers/paginated.rs +++ b/little-hesinde/src/api/paginated.rs @@ -1,13 +1,25 @@ //! Deal with cursor pagination. -use super::error::HandlerError; use crate::templates::TEMPLATES; +use axum::response::{Html, IntoResponse, Response}; use calibre_db::data::error::DataStoreError; -use poem::{error::InternalServerError, web::Html, IntoResponse, Response}; use serde::Serialize; +use snafu::{ResultExt, Snafu}; use std::fmt::Debug; use tera::Context; +#[derive(Debug, Snafu)] +pub enum PaginationError { + #[snafu(display("Failed to fetch pagination data."))] + Fetch { source: DataStoreError }, + #[snafu(display("Failed to render template."))] + Template { source: tera::Error }, + #[snafu(display("Failed to fetch previous items."))] + Previous { source: DataStoreError }, + #[snafu(display("Failed to fetch more items."))] + More { source: DataStoreError }, +} + /// Render a tera template with paginated items and generate back and forth links. pub fn render( template: &str, @@ -15,7 +27,7 @@ pub fn render( sort_field: S, has_previous: P, has_more: M, -) -> Result +) -> Result where F: Fn() -> Result, DataStoreError>, S: Fn(&T) -> String, @@ -25,11 +37,11 @@ where let mut context = Context::new(); context.insert("nav", template); - let items = fetcher().map_err(HandlerError::DataError)?; + let items = fetcher().context(FetchSnafu)?; if items.is_empty() { return Ok(TEMPLATES .render("empty", &context) - .map_err(InternalServerError) + .context(TemplateSnafu) .map(Html)? .into_response()); } @@ -39,8 +51,8 @@ where let (backward_cursor, forward_cursor) = (sort_field(first_item), sort_field(last_item)); - let has_previous = has_previous(&backward_cursor).map_err(HandlerError::DataError)?; - let has_more = has_more(&forward_cursor).map_err(HandlerError::DataError)?; + let has_previous = has_previous(&backward_cursor).context(PreviousSnafu)?; + let has_more = has_more(&forward_cursor).context(MoreSnafu)?; context.insert("has_previous", &has_previous); context.insert("has_more", &has_more); @@ -50,7 +62,7 @@ where Ok(TEMPLATES .render(template, &context) - .map_err(InternalServerError) + .context(TemplateSnafu) .map(Html)? .into_response()) } diff --git a/little-hesinde/src/api/recent.rs b/little-hesinde/src/api/recent.rs new file mode 100644 index 0000000..e69de29 diff --git a/little-hesinde/src/api/routes.rs b/little-hesinde/src/api/routes.rs new file mode 100644 index 0000000..3b648ce --- /dev/null +++ b/little-hesinde/src/api/routes.rs @@ -0,0 +1,53 @@ +//! HTTP route definitions and router configuration. + +use std::sync::Arc; + +use axum::{response::Redirect, routing::get}; +use tower_http::trace::TraceLayer; +use utoipa_axum::{router::OpenApiRouter, routes}; + +use crate::{ + api::{html, opds, static_files}, + app_state::AppState, +}; + +/// Create the main API router with all endpoints and middleware. +pub fn router(state: AppState) -> OpenApiRouter { + let store = Arc::new(state); + + let opds_routes = OpenApiRouter::new() + .routes(routes!(opds::books::handler)) + .routes(routes!(opds::recent::handler)) + .routes(routes!(opds::series::handler)) + .routes(routes!(opds::series::single)) + .routes(routes!(opds::authors::handler)) + .routes(routes!(opds::authors::single)) + .routes(routes!(opds::search::handler)) + .routes(routes!(opds::search::info)) + .layer(TraceLayer::new_for_http()) + .with_state(store.clone()); + + let html_routes = OpenApiRouter::new() + .route("/", get(|| async { Redirect::permanent("/recent") })) + .routes(routes!(html::recent::handler)) + .routes(routes!(html::books::handler_init)) + .routes(routes!(html::books::handler)) + .routes(routes!(html::books::download)) + .routes(routes!(html::series::handler_init)) + .routes(routes!(html::series::handler)) + .routes(routes!(html::series::single)) + .routes(routes!(html::authors::handler_init)) + .routes(routes!(html::authors::handler)) + .routes(routes!(html::authors::single)) + .routes(routes!(html::cover::thumbnail)) + .routes(routes!(html::cover::full)) + .routes(routes!(html::search::handler)) + .routes(routes!(html::archive::handler)) + .routes(routes!(static_files::handler)) + .layer(TraceLayer::new_for_http()) + .with_state(store.clone()); + + OpenApiRouter::new() + .merge(html_routes) + .nest("/opds", opds_routes) +} diff --git a/little-hesinde/src/api/search.rs b/little-hesinde/src/api/search.rs new file mode 100644 index 0000000..d7c8370 --- /dev/null +++ b/little-hesinde/src/api/search.rs @@ -0,0 +1,28 @@ +use std::path::Path; + +use calibre_db::{calibre::Calibre, data::error::DataStoreError}; + +use snafu::{ResultExt, Snafu}; + +use crate::data::book::Book; + +#[derive(Debug, Snafu)] +pub enum SearchQueryError { + #[snafu(display("Failed to search for books."))] + Db { source: DataStoreError }, +} + +pub async fn query( + query: &str, + calibre: &Calibre, + library_path: &Path, +) -> Result, SearchQueryError> { + let books = calibre + .search(query) + .context(DbSnafu)? + .iter() + .filter_map(|book| Book::full_book(book, calibre, library_path)) + .collect(); + + Ok(books) +} diff --git a/little-hesinde/src/api/series.rs b/little-hesinde/src/api/series.rs new file mode 100644 index 0000000..95ee746 --- /dev/null +++ b/little-hesinde/src/api/series.rs @@ -0,0 +1,32 @@ +use std::path::Path; + +use calibre_db::{ + calibre::Calibre, + data::{error::DataStoreError, series::Series}, +}; +use snafu::{ResultExt, Snafu}; + +use crate::data::book::Book; + +#[derive(Debug, Snafu)] +pub enum SingleSeriesError { + #[snafu(display("Failed to fetch series data."))] + SeriesData { source: DataStoreError }, + #[snafu(display("Failed to fetch books in series."))] + BookData { source: DataStoreError }, +} + +pub async fn single( + id: u64, + calibre: &Calibre, + library_path: &Path, +) -> Result<(Series, Vec), SingleSeriesError> { + let series = calibre.scalar_series(id).context(SeriesDataSnafu)?; + let books = calibre.series_books(id).context(BookDataSnafu)?; + let books = books + .iter() + .filter_map(|x| Book::full_book(x, calibre, library_path)) + .collect::>(); + + Ok((series, books)) +} diff --git a/little-hesinde/src/api/static_files.rs b/little-hesinde/src/api/static_files.rs new file mode 100644 index 0000000..8733d67 --- /dev/null +++ b/little-hesinde/src/api/static_files.rs @@ -0,0 +1,48 @@ +use axum::{ + http::{StatusCode, Uri, header}, + response::{IntoResponse, Response}, +}; +use rust_embed::RustEmbed; + +/// Embedd static files. +#[derive(RustEmbed)] +#[folder = "static"] +pub struct Files; + +/// Get static file from the 'static' folder. +#[utoipa::path( + get, + path = "/static/{*file}", + responses( + (status = 200, description = "Static file"), + (status = 404, description = "No such file within 'static'", body = String) + ) +)] +pub async fn handler(uri: Uri) -> impl IntoResponse { + let mut path = uri.path().trim_start_matches('/').to_string(); + + if path.starts_with("static/") { + path = path.replace("static/", ""); + } + + StaticFile(path) +} + +pub struct StaticFile(pub T); + +impl IntoResponse for StaticFile +where + T: Into, +{ + fn into_response(self) -> Response { + let path = self.0.into(); + + match Files::get(path.as_str()) { + Some(content) => { + let mime = mime_guess::from_path(path).first_or_octet_stream(); + ([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response() + } + None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(), + } + } +} diff --git a/little-hesinde/src/cache.rs b/little-hesinde/src/cache.rs index 91965aa..b405baf 100644 --- a/little-hesinde/src/cache.rs +++ b/little-hesinde/src/cache.rs @@ -1,43 +1,38 @@ //! Handle caching of files, specifically book covers. use std::{ + fmt, fs::{self, File}, + io, path::{Path, PathBuf}, }; use sha2::{ - digest::{generic_array::GenericArray, typenum::U32}, Digest, Sha256, + digest::{generic_array::GenericArray, typenum::U32}, }; +use snafu::{ResultExt, Snafu}; use std::fmt::Write; -use thiserror::Error; use tracing::debug; -/// Errors from dealing with file caching. -#[derive(Error, Debug)] -pub enum CacheError { - /// Error converting a hash to its string representation. - #[error("failed to access thumbnail")] - HashError(#[from] std::fmt::Error), - /// Error creating a thumbnail for an image.. - #[error("failed to create thumbnail")] - ImageError(#[from] image::ImageError), - /// Error accessing a thumbnail. - #[error("failed to access thumbnail")] - ThumbnailAccessError(#[from] std::io::Error), - /// Error accessing thumbnail directories. - #[error("failed to access thumbnail directory")] - ThumbnailPathError(PathBuf), +/// Errors from converting a hash to its string representation. +#[derive(Debug, Snafu)] +pub enum HashToPathError { + #[snafu(display("Failed to generate string representation of hash."))] + ToString { source: fmt::Error }, } /// Convert a hash into its path representation inside the cache directory. /// /// First hash character is the top folder, second character the second level folder and the rest /// is the filename. -fn hash_to_path(hash: GenericArray, cache_path: &Path) -> Result { +fn hash_to_path( + hash: GenericArray, + cache_path: &Path, +) -> Result { let mut hash_string = String::new(); for byte in hash { - write!(&mut hash_string, "{:02x}", byte)?; + write!(&mut hash_string, "{:02x}", byte).context(ToStringSnafu)?; } let hash = hash_string; @@ -51,37 +46,78 @@ fn hash_to_path(hash: GenericArray, cache_path: &Path) -> Result Result<(), CacheError> { +fn create_thumbnail(cover_path: &Path, thumbnail_path: &Path) -> Result<(), CreateThumbnailError> { debug!("creating thumbnail for {}", cover_path.to_string_lossy()); - let folders = thumbnail_path - .parent() - .ok_or_else(|| CacheError::ThumbnailPathError(thumbnail_path.to_path_buf()))?; - fs::create_dir_all(folders)?; + let folders = thumbnail_path.parent().ok_or_else(|| { + ParentDirSnafu { + path: thumbnail_path.to_string_lossy(), + } + .build() + })?; + fs::create_dir_all(folders).context(ThumbnailDirSnafu { + path: folders.to_string_lossy(), + })?; const THUMBNAIL_SIZE: u32 = 512; - let img = image::open(cover_path)?; + let img = image::open(cover_path).context(ImageOpenSnafu { + path: cover_path.to_string_lossy(), + })?; let thumbnail = img.thumbnail(THUMBNAIL_SIZE, THUMBNAIL_SIZE); - thumbnail.save_with_format(thumbnail_path, image::ImageFormat::Jpeg)?; + thumbnail + .save_with_format(thumbnail_path, image::ImageFormat::Jpeg) + .context(ImageSaveSnafu { + path: thumbnail_path.to_string_lossy(), + })?; debug!("saved thumbnail to {}", thumbnail_path.to_string_lossy()); Ok(()) } +/// Errors from retrieving a thumbnail. +#[derive(Debug, Snafu)] +pub enum RetrieveThumbnailError { + #[snafu(display("Failed to convert hash to string."))] + HashToPath { source: HashToPathError }, + #[snafu(display("Failed to create not yet existing thumbnail."))] + CreateThumbnail { source: CreateThumbnailError }, + #[snafu(display("Failed to open thumbnail."))] + OpenThumbnail { source: io::Error }, +} + /// Get the thumbnail for a book cover. /// /// If a thumbnail does not yet exist, create it. -pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result { +pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result { let path_str = cover_path.to_string_lossy(); let mut hasher = Sha256::new(); hasher.update(path_str.as_bytes()); let hash = hasher.finalize(); - let thumbnail_path = hash_to_path(hash, cache_path)?; + let thumbnail_path = hash_to_path(hash, cache_path).context(HashToPathSnafu)?; if !thumbnail_path.exists() { - create_thumbnail(cover_path, &thumbnail_path)?; + create_thumbnail(cover_path, &thumbnail_path).context(CreateThumbnailSnafu)?; } - Ok(File::open(thumbnail_path)?) + File::open(thumbnail_path).context(OpenThumbnailSnafu) } diff --git a/little-hesinde/src/config.rs b/little-hesinde/src/config.rs index 19f94a3..fa0f0a4 100644 --- a/little-hesinde/src/config.rs +++ b/little-hesinde/src/config.rs @@ -7,26 +7,27 @@ use std::{ path::{Path, PathBuf}, }; -use thiserror::Error; use tracing::info; use crate::cli::Cli; +use snafu::{ResultExt, Snafu}; -/// Errors when dealing with application configuration. -#[derive(Error, Debug)] -pub enum ConfigError { - /// Calibre library path does not exist. - #[error("no folder at {0}")] - LibraryPathNotFound(String), - /// Calibre database does not exist. - #[error("no metadata.db in {0}")] - MetadataNotFound(String), - /// Error converting a string to a listening address. - #[error("failed to convert into listening address")] - ListeningAddressError(String), - /// Error accessing the configured cache path. - #[error("failed to access cache path")] - CachePathError(#[from] io::Error), +/// Errors from loading application configuration. +#[derive(Debug, Snafu)] +pub enum LoadError { + #[snafu(display("{path} is not a calibre library."))] + LibraryPath { path: String }, + #[snafu(display("Could not find calibre metadata at {path}."))] + MetadataPath { path: String }, + #[snafu(display("Invalid listening address {listen_address}."))] + ListeningAddressParse { + source: io::Error, + listen_address: String, + }, + #[snafu(display("Invalid listening address {listen_address}."))] + ListeningAddress { listen_address: String }, + #[snafu(display("Failed to create cach directory at {path}."))] + CacheDir { source: io::Error, path: String }, } /// Application configuration. @@ -44,7 +45,7 @@ pub struct Config { impl Config { /// Check if the calibre library from `args` exists and if the calibre database can be found. - pub fn load(args: &Cli) -> Result { + pub fn load(args: &Cli) -> Result { let library_path = Path::new(&args.library_path).to_path_buf(); if !library_path.exists() { @@ -53,7 +54,7 @@ impl Config { .to_str() .unwrap_or("") .to_string(); - return Err(ConfigError::LibraryPathNotFound(library_path)); + return LibraryPathSnafu { path: library_path }.fail(); } let metadata_path = library_path.join("metadata.db"); @@ -63,18 +64,24 @@ impl Config { .to_str() .unwrap_or("") .to_string(); - return Err(ConfigError::MetadataNotFound(metadata_path)); + return MetadataPathSnafu { + path: metadata_path, + } + .fail(); } let listen_address = args .listen_address .to_socket_addrs() - .map_err(|e| { - ConfigError::ListeningAddressError(format!("{}: {e:?}", args.listen_address)) + .context(ListeningAddressParseSnafu { + listen_address: args.listen_address.clone(), })? .next() - .ok_or(ConfigError::ListeningAddressError( - args.listen_address.clone(), - ))?; + .ok_or( + ListeningAddressSnafu { + listen_address: args.listen_address.clone(), + } + .build(), + )?; let cache_path = if args.cache_path.starts_with("$TMP") { let cache_base = env::var("XDG_CACHE_HOME") @@ -83,7 +90,9 @@ impl Config { } else { PathBuf::from(&args.cache_path) }; - fs::create_dir_all(&cache_path)?; + fs::create_dir_all(&cache_path).context(CacheDirSnafu { + path: cache_path.to_string_lossy(), + })?; info!("Using {} for cache", cache_path.to_string_lossy()); Ok(Self { diff --git a/little-hesinde/src/data.rs b/little-hesinde/src/data.rs new file mode 100644 index 0000000..5bc8fb3 --- /dev/null +++ b/little-hesinde/src/data.rs @@ -0,0 +1 @@ +pub mod book; diff --git a/little-hesinde/src/data/book.rs b/little-hesinde/src/data/book.rs index 849144e..cd51560 100644 --- a/little-hesinde/src/data/book.rs +++ b/little-hesinde/src/data/book.rs @@ -2,13 +2,12 @@ use std::{collections::HashMap, fmt::Display, path::Path}; -use calibre_db::data::{ - author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries, +use calibre_db::{ + calibre::Calibre, + data::{author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries}, }; use serde::Serialize; -use crate::app_state::AppState; - /// Wrapper type for a file format string (must be a struct in order to implement traits). #[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)] pub struct Format(pub String); @@ -92,10 +91,10 @@ impl Book { /// Wrap a [`DbBook`](struct@calibre_db::data::book::Book) in a [`Book`](struct@Book) by /// fetching additional information about author, formats and series. - pub fn full_book(book: &DbBook, state: &AppState) -> Option { - let formats = Book::formats(book, &state.config.library_path); - let author = state.calibre.book_author(book.id).ok()?; - let series = state.calibre.book_series(book.id).ok()?; + pub fn full_book(book: &DbBook, calibre: &Calibre, library_path: &Path) -> Option { + let formats = Book::formats(book, library_path); + let author = calibre.book_author(book.id).ok()?; + let series = calibre.book_series(book.id).ok()?; Some(Book::from_db_book(book, series, author, formats)) } } diff --git a/little-hesinde/src/handlers/author.rs b/little-hesinde/src/handlers/author.rs deleted file mode 100644 index 60f7e20..0000000 --- a/little-hesinde/src/handlers/author.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Handle requests for a single author. - -use std::sync::Arc; - -use calibre_db::data::pagination::SortOrder; -use poem::{ - handler, - web::{Data, Path}, - Response, -}; - -use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept}; - -/// Handle a request for an author with `id` and decide whether to render to html or OPDS. -#[handler] -pub async fn handler( - id: Path, - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - let author = state - .calibre - .scalar_author(*id) - .map_err(HandlerError::DataError)?; - let books = state - .calibre - .author_books(*id, u32::MAX.into(), None, SortOrder::ASC) - .map_err(HandlerError::DataError)?; - let books = books - .iter() - .filter_map(|x| Book::full_book(x, &state)) - .collect::>(); - - match accept.0 { - Accept::Html => crate::handlers::html::author::handler(author, books).await, - Accept::Opds => crate::handlers::opds::author::handler(author, books).await, - } -} diff --git a/little-hesinde/src/handlers/authors.rs b/little-hesinde/src/handlers/authors.rs deleted file mode 100644 index f6d4ba1..0000000 --- a/little-hesinde/src/handlers/authors.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Handle requests for multiple authors. - -use std::sync::Arc; - -use calibre_db::{calibre::Calibre, data::pagination::SortOrder}; -use poem::{ - handler, - web::{Data, Path}, - Response, -}; - -use crate::{app_state::AppState, Accept}; - -/// Handle a request for multiple authors, starting at the first. -#[handler] -pub async fn handler_init( - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - authors(&accept, &state.calibre, None, &SortOrder::ASC).await -} - -/// Handle a request for multiple authors, starting at the `cursor` and going in the direction of -/// `sort_order`. -#[handler] -pub async fn handler( - Path((cursor, sort_order)): Path<(String, SortOrder)>, - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - authors(&accept, &state.calibre, Some(&cursor), &sort_order).await -} - -async fn authors( - acccept: &Accept, - calibre: &Calibre, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - match acccept { - Accept::Html => crate::handlers::html::authors::handler(calibre, cursor, sort_order).await, - Accept::Opds => crate::handlers::opds::authors::handler(calibre, cursor, sort_order).await, - } -} diff --git a/little-hesinde/src/handlers/books.rs b/little-hesinde/src/handlers/books.rs deleted file mode 100644 index eace2ab..0000000 --- a/little-hesinde/src/handlers/books.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Handle requests for multiple books. - -use std::sync::Arc; - -use calibre_db::data::pagination::SortOrder; -use poem::{ - error::NotFoundError, - handler, - web::{Data, Path}, - Response, -}; -use tokio::fs::File; - -use crate::{ - app_state::AppState, - data::book::{Book, Format}, - handlers::error::HandlerError, - opds::media_type::MediaType, - Accept, -}; - -/// Handle a request for multiple books, starting at the first. -#[handler] -pub async fn handler_init( - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - books(&accept, &state, None, &SortOrder::ASC).await -} - -/// Handle a request for multiple books, starting at the `cursor` and going in the direction of -/// `sort_order`. -#[handler] -pub async fn handler( - Path((cursor, sort_order)): Path<(String, SortOrder)>, - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - books(&accept, &state, Some(&cursor), &sort_order).await -} - -/// Handle a request for a book with id `id` in format `format`. -#[handler] -pub async fn handler_download( - Path((id, format)): Path<(u64, String)>, - state: Data<&Arc>, -) -> Result { - let book = state - .calibre - .scalar_book(id) - .map_err(HandlerError::DataError)?; - let book = Book::full_book(&book, &state).ok_or(NotFoundError)?; - let format = Format(format); - let file_name = book.formats.get(&format).ok_or(NotFoundError)?; - let file_path = state - .config - .library_path - .join(book.data.path) - .join(file_name); - let mut file = File::open(file_path).await.map_err(|_| NotFoundError)?; - let content_type: MediaType = format.into(); - let content_type = format!("{content_type}"); - - crate::handlers::download::handler(file_name, file, &content_type).await -} - -async fn books( - accept: &Accept, - state: &Arc, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - match accept { - Accept::Html => crate::handlers::html::books::handler(state, cursor, sort_order).await, - Accept::Opds => crate::handlers::opds::books::handler(state, cursor, sort_order).await, - } -} diff --git a/little-hesinde/src/handlers/cover.rs b/little-hesinde/src/handlers/cover.rs deleted file mode 100644 index 53c7c0c..0000000 --- a/little-hesinde/src/handlers/cover.rs +++ /dev/null @@ -1,74 +0,0 @@ -//! Handle requests for cover images. - -use std::{fs::File, path::Path as FilePath, sync::Arc}; - -use crate::{ - app_state::AppState, - cache::{self, CacheError}, - config::Config, - handlers::error::HandlerError, -}; -use calibre_db::calibre::Calibre; -use poem::{ - error::NotFoundError, - handler, - web::{headers::ContentType, Data, Path}, - Response, -}; -use thiserror::Error; -use tokio::fs::File as AsyncFile; - -/// Errors from fetching cover images. -#[derive(Error, Debug)] -pub enum CoverError { - /// Error fetching a cover thumbnail. - #[error("failed to access thumbnail")] - ThumbnailError(#[from] CacheError), - /// Error fetching a full cover. - #[error("failed access cover")] - FullCoverError(#[from] std::io::Error), -} - -/// Handle a request for the cover thumbnail of book with id `id`. -#[handler] -pub async fn handler_thumbnail( - id: Path, - state: Data<&Arc>, -) -> Result { - cover( - &state.calibre, - &state.config, - *id, - |cover_path, cache_path| Ok(cache::get_thumbnail(cover_path, cache_path)?), - ) - .await -} - -/// Handle a request for the cover image of book with id `id`. -#[handler] -pub async fn handler_full( - id: Path, - state: Data<&Arc>, -) -> Result { - cover(&state.calibre, &state.config, *id, |cover_path, _| { - Ok(File::open(cover_path)?) - }) - .await -} - -async fn cover( - calibre: &Calibre, - config: &Config, - id: u64, - f: F, -) -> Result -where - F: Fn(&FilePath, &FilePath) -> Result, -{ - let book = calibre.scalar_book(id).map_err(HandlerError::DataError)?; - let cover_path = config.library_path.join(book.path).join("cover.jpg"); - - let cover = f(&cover_path, &config.cache_path).map_err(|_| NotFoundError)?; - let cover = AsyncFile::from_std(cover); - crate::handlers::download::handler("cover.jpg", cover, &ContentType::jpeg().to_string()).await -} diff --git a/little-hesinde/src/handlers/download.rs b/little-hesinde/src/handlers/download.rs deleted file mode 100644 index ef9290a..0000000 --- a/little-hesinde/src/handlers/download.rs +++ /dev/null @@ -1,23 +0,0 @@ -//! Handle requests for specific formats of a book. - -use tokio::io::AsyncRead; - -use poem::{Body, IntoResponse, Response}; -use tokio_util::io::ReaderStream; - -/// Handle a request for file. -/// -/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks. -pub async fn handler( - file_name: &str, - reader: A, - content_type: &str, -) -> Result { - let stream = ReaderStream::new(reader); - let body = Body::from_bytes_stream(stream); - - Ok(body - .with_content_type(content_type) - .with_header("Content-Disposition", format!("filename=\"{file_name}\"")) - .into_response()) -} diff --git a/little-hesinde/src/handlers/error.rs b/little-hesinde/src/handlers/error.rs deleted file mode 100644 index 176b431..0000000 --- a/little-hesinde/src/handlers/error.rs +++ /dev/null @@ -1,57 +0,0 @@ -//! Error handling for requests handlers. - -use calibre_db::data::error::DataStoreError; -use poem::{error::ResponseError, http::StatusCode, Body, Response}; -use thiserror::Error; -use tracing::error; -use uuid::Uuid; - -use crate::opds::error::OpdsError; - -/// Errors happening during handling of requests. -#[derive(Error, Debug)] -#[error("opds error")] -pub enum HandlerError { - /// Error rendering OPDS. - #[error("opds error")] - OpdsError(#[from] OpdsError), - /// Error fetching data from calibre. - #[error("data error")] - DataError(#[from] DataStoreError), -} - -/// Convert a [`HandlerError`](enum@HandlerError) into a suitable response error. -/// -/// Log the real error (internal) with an uuid and send a suitable error message to the user with -/// the same uuid (for correlation purposes). -impl ResponseError for HandlerError { - fn status(&self) -> StatusCode { - match &self { - HandlerError::OpdsError(_) => StatusCode::INTERNAL_SERVER_ERROR, - HandlerError::DataError(e) => match e { - DataStoreError::NoResults(_) => StatusCode::NOT_FOUND, - _ => StatusCode::INTERNAL_SERVER_ERROR, - }, - } - } - - fn as_response(&self) -> Response { - let id = Uuid::new_v4(); - let internal_msg = format!("{:?}", self); - let external_msg = match &self { - HandlerError::OpdsError(_) => "internal server error", - HandlerError::DataError(e) => match e { - DataStoreError::NoResults(_) => "item not found", - _ => "internal server error", - }, - }; - error!("{id}: {internal_msg}"); - - let body = Body::from_json(serde_json::json!({ - "id": id.to_string(), - "message": external_msg, - })) - .unwrap(); - Response::builder().status(self.status()).body(body) - } -} diff --git a/little-hesinde/src/handlers/html/author.rs b/little-hesinde/src/handlers/html/author.rs deleted file mode 100644 index d27c6d3..0000000 --- a/little-hesinde/src/handlers/html/author.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Handle a single author for html. - -use calibre_db::data::author::Author; -use poem::{error::InternalServerError, web::Html, IntoResponse, Response}; -use tera::Context; - -use crate::{data::book::Book, templates::TEMPLATES}; - -/// Render a single author in html. -pub async fn handler(author: Author, books: Vec) -> Result { - let mut context = Context::new(); - context.insert("title", &author.name); - context.insert("nav", "authors"); - context.insert("books", &books); - - Ok(TEMPLATES - .render("book_list", &context) - .map_err(InternalServerError) - .map(Html)? - .into_response()) -} diff --git a/little-hesinde/src/handlers/html/authors.rs b/little-hesinde/src/handlers/html/authors.rs deleted file mode 100644 index 6db29d4..0000000 --- a/little-hesinde/src/handlers/html/authors.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Handle multiple authors in html. - -use calibre_db::{calibre::Calibre, data::pagination::SortOrder}; -use poem::Response; - -use crate::handlers::paginated; - -/// Render all authors paginated by cursor in html. -pub async fn handler( - calibre: &Calibre, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - paginated::render( - "authors", - || calibre.authors(25, cursor, sort_order), - |author| author.sort.clone(), - |cursor| calibre.has_previous_authors(cursor), - |cursor| calibre.has_more_authors(cursor), - ) -} diff --git a/little-hesinde/src/handlers/html/books.rs b/little-hesinde/src/handlers/html/books.rs deleted file mode 100644 index 14700ae..0000000 --- a/little-hesinde/src/handlers/html/books.rs +++ /dev/null @@ -1,26 +0,0 @@ -//! Handle multiple books in html. - -use calibre_db::data::pagination::SortOrder; -use poem::Response; - -use crate::{app_state::AppState, data::book::Book, handlers::paginated}; - -/// Render all books paginated by cursor in html. -pub async fn handler( - state: &AppState, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - paginated::render( - "books", - || { - state - .calibre - .books(25, cursor, sort_order) - .map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect()) - }, - |book| book.data.sort.clone(), - |cursor| state.calibre.has_previous_books(cursor), - |cursor| state.calibre.has_more_books(cursor), - ) -} diff --git a/little-hesinde/src/handlers/html/recent.rs b/little-hesinde/src/handlers/html/recent.rs deleted file mode 100644 index d9ab06f..0000000 --- a/little-hesinde/src/handlers/html/recent.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! Handle recent books in html. - -use poem::{error::InternalServerError, web::Html, IntoResponse, Response}; -use tera::Context; - -use crate::{data::book::Book, templates::TEMPLATES}; - -/// Render recent books as html. -pub async fn handler(recent_books: Vec) -> Result { - let mut context = Context::new(); - context.insert("title", ""); - context.insert("nav", "recent"); - context.insert("books", &recent_books); - - Ok(TEMPLATES - .render("book_list", &context) - .map_err(InternalServerError) - .map(Html)? - .into_response()) -} diff --git a/little-hesinde/src/handlers/html/search.rs b/little-hesinde/src/handlers/html/search.rs deleted file mode 100644 index fc1b547..0000000 --- a/little-hesinde/src/handlers/html/search.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! Handle search results in html. - -use poem::{error::InternalServerError, web::Html, IntoResponse, Response}; -use tera::Context; - -use crate::{data::book::Book, templates::TEMPLATES}; - -/// Render all search results as html. -pub async fn handler(books: Vec) -> Result { - let mut context = Context::new(); - context.insert("title", "Search Results"); - context.insert("nav", "search"); - context.insert("books", &books); - - Ok(TEMPLATES - .render("book_list", &context) - .map_err(InternalServerError) - .map(Html)? - .into_response()) -} diff --git a/little-hesinde/src/handlers/html/series.rs b/little-hesinde/src/handlers/html/series.rs deleted file mode 100644 index 856939d..0000000 --- a/little-hesinde/src/handlers/html/series.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Handle multiple series in html. - -use calibre_db::{calibre::Calibre, data::pagination::SortOrder}; -use poem::Response; - -use crate::handlers::paginated; - -/// Render all series paginated by cursor as html. -pub async fn handler( - calibre: &Calibre, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - paginated::render( - "series", - || calibre.series(25, cursor, sort_order), - |series| series.sort.clone(), - |cursor| calibre.has_previous_series(cursor), - |cursor| calibre.has_more_series(cursor), - ) -} diff --git a/little-hesinde/src/handlers/html/series_single.rs b/little-hesinde/src/handlers/html/series_single.rs deleted file mode 100644 index 148aad4..0000000 --- a/little-hesinde/src/handlers/html/series_single.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Handle a single series in html. - -use calibre_db::data::series::Series; -use poem::{error::InternalServerError, web::Html, IntoResponse, Response}; -use tera::Context; - -use crate::{data::book::Book, templates::TEMPLATES}; - -/// Render a single series as html. -pub async fn handler(series: Series, books: Vec) -> Result { - let mut context = Context::new(); - context.insert("title", &series.name); - context.insert("nav", "series"); - context.insert("books", &books); - - Ok(TEMPLATES - .render("book_list", &context) - .map_err(InternalServerError) - .map(Html)? - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/author.rs b/little-hesinde/src/handlers/opds/author.rs deleted file mode 100644 index 922bd22..0000000 --- a/little-hesinde/src/handlers/opds/author.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Handle a single author for opds. - -use calibre_db::data::author::Author; -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - data::book::Book, - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render a single author as an OPDS entry embedded in a feed. -pub async fn handler(author: Author, books: Vec) -> Result { - let entries: Vec = books.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: format!("/opds/authors/{}", author.id), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}author:{}", author.id), - &author.name, - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/authors.rs b/little-hesinde/src/handlers/opds/authors.rs deleted file mode 100644 index 84a152c..0000000 --- a/little-hesinde/src/handlers/opds/authors.rs +++ /dev/null @@ -1,49 +0,0 @@ -//! Handle multiple authors for opds. - -use calibre_db::{ - calibre::Calibre, - data::{author::Author as DbAuthor, pagination::SortOrder}, -}; -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render all authors as OPDS entries embedded in a feed. -pub async fn handler( - calibre: &Calibre, - _cursor: Option<&str>, - _sort_order: &SortOrder, -) -> Result { - let authors: Vec = calibre - .authors(u32::MAX.into(), None, &SortOrder::ASC) - .map_err(HandlerError::DataError)?; - - let entries: Vec = authors.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: "/opds/authors".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}:authors"), - "All Authors", - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/books.rs b/little-hesinde/src/handlers/opds/books.rs deleted file mode 100644 index 90f2a60..0000000 --- a/little-hesinde/src/handlers/opds/books.rs +++ /dev/null @@ -1,54 +0,0 @@ -//! Handle multiple books for opds. - -use calibre_db::data::pagination::SortOrder; -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - app_state::AppState, - data::book::Book, - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render all books as OPDS entries embedded in a feed. -pub async fn handler( - state: &AppState, - _cursor: Option<&str>, - _sort_order: &SortOrder, -) -> Result { - let books: Vec = state - .calibre - .books(u32::MAX.into(), None, &SortOrder::ASC) - .map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect()) - .map_err(HandlerError::DataError)?; - render_books(books).await -} - -/// Render a list of books as OPDS entries in a feed. -pub(crate) async fn render_books(books: Vec) -> Result { - let entries: Vec = books.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: "/opds/books".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}:books"), - "All Books", - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/feed.rs b/little-hesinde/src/handlers/opds/feed.rs deleted file mode 100644 index d7fe543..0000000 --- a/little-hesinde/src/handlers/opds/feed.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! Handle the OPDS root feed. - -use poem::{handler, web::WithContentType, IntoResponse}; -use time::OffsetDateTime; - -use crate::{ - handlers::error::HandlerError, - opds::{ - content::Content, entry::Entry, feed::Feed, link::Link, media_type::MediaType, - relation::Relation, - }, - APP_NAME, -}; - -/// Render a root OPDS feed with links to the subsections (authors, books, series and recent). -#[handler] -pub async fn handler() -> Result, poem::Error> { - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: "/opds".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let books_entry = Entry { - title: "Books".to_string(), - id: format!("{APP_NAME}:books"), - updated: now, - content: Some(Content { - media_type: MediaType::Text, - content: "Index of all books".to_string(), - }), - author: None, - links: vec![Link { - href: "/opds/books".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Subsection, - title: None, - count: None, - }], - }; - - let authors_entry = Entry { - title: "Authors".to_string(), - id: format!("{APP_NAME}:authors"), - updated: now, - content: Some(Content { - media_type: MediaType::Text, - content: "Index of all authors".to_string(), - }), - author: None, - links: vec![Link { - href: "/opds/authors".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Subsection, - title: None, - count: None, - }], - }; - - let series_entry = Entry { - title: "Series".to_string(), - id: format!("{APP_NAME}:series"), - updated: now, - content: Some(Content { - media_type: MediaType::Text, - content: "Index of all series".to_string(), - }), - author: None, - links: vec![Link { - href: "/opds/series".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Subsection, - title: None, - count: None, - }], - }; - - let recents_entry = Entry { - title: "Recent Additions".to_string(), - id: format!("{APP_NAME}:recentbooks"), - updated: now, - content: Some(Content { - media_type: MediaType::Text, - content: "Recently added books".to_string(), - }), - author: None, - links: vec![Link { - href: "/opds/recent".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Subsection, - title: None, - count: None, - }], - }; - - let feed = Feed::create( - now, - &format!("{APP_NAME}:catalog"), - "Little Hesinde", - self_link, - vec![], - vec![authors_entry, series_entry, books_entry, recents_entry], - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml.with_content_type("application/atom+xml")) -} diff --git a/little-hesinde/src/handlers/opds/recent.rs b/little-hesinde/src/handlers/opds/recent.rs deleted file mode 100644 index 9f337d6..0000000 --- a/little-hesinde/src/handlers/opds/recent.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Handle recent books for OPDS. - -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - data::book::Book, - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render recent books as OPDS entries embedded in a feed. -pub async fn handler(recent_books: Vec) -> Result { - let entries: Vec = recent_books.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: "/opds/recent".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}:recentbooks"), - "Recent Books", - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/search.rs b/little-hesinde/src/handlers/opds/search.rs deleted file mode 100644 index efbf3f0..0000000 --- a/little-hesinde/src/handlers/opds/search.rs +++ /dev/null @@ -1,12 +0,0 @@ -//! Handle search results in opds. - -use poem::Response; - -use crate::data::book::Book; - -use super::books::render_books; - -/// Render search results as OPDS entries in a feed. -pub async fn handler(books: Vec) -> Result { - render_books(books).await -} diff --git a/little-hesinde/src/handlers/opds/search_info.rs b/little-hesinde/src/handlers/opds/search_info.rs deleted file mode 100644 index 92cee0d..0000000 --- a/little-hesinde/src/handlers/opds/search_info.rs +++ /dev/null @@ -1,27 +0,0 @@ -//! Handle open search description.. - -use crate::{ - handlers::error::HandlerError, - opds::search::{OpenSearchDescription, Url}, - APP_NAME, -}; -use poem::{handler, IntoResponse, Response}; - -/// Render search information as open search description. -#[handler] -pub async fn handler() -> Result { - let search = OpenSearchDescription { - short_name: APP_NAME.to_string(), - description: "Search for ebooks".to_string(), - input_encoding: "UTF-8".to_string(), - output_encoding: "UTF-8".to_string(), - url: Url { - type_name: "application/atom+xml".to_string(), - template: "/opds/search?query={searchTerms}".to_string(), - }, - }; - let xml = search.as_xml().map_err(HandlerError::OpdsError)?; - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/series.rs b/little-hesinde/src/handlers/opds/series.rs deleted file mode 100644 index 8a90360..0000000 --- a/little-hesinde/src/handlers/opds/series.rs +++ /dev/null @@ -1,46 +0,0 @@ -//! Handle multiple series for OPDS. - -use calibre_db::{calibre::Calibre, data::pagination::SortOrder}; -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render all series as OPDS entries embedded in a feed. -pub async fn handler( - calibre: &Calibre, - _cursor: Option<&str>, - _sort_order: &SortOrder, -) -> Result { - let series = calibre - .series(u32::MAX.into(), None, &SortOrder::ASC) - .map_err(HandlerError::DataError)?; - - let entries: Vec = series.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: "/opds/series".to_string(), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}:series"), - "All Series", - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/opds/series_single.rs b/little-hesinde/src/handlers/opds/series_single.rs deleted file mode 100644 index 42377e6..0000000 --- a/little-hesinde/src/handlers/opds/series_single.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Handle a single series for opds. - -use calibre_db::data::series::Series; -use poem::{IntoResponse, Response}; -use time::OffsetDateTime; - -use crate::{ - data::book::Book, - handlers::error::HandlerError, - opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation}, - APP_NAME, -}; - -/// Render a single series as an OPDS entry embedded in a feed. -pub async fn handler(series: Series, books: Vec) -> Result { - let entries: Vec = books.into_iter().map(Entry::from).collect(); - let now = OffsetDateTime::now_utc(); - - let self_link = Link { - href: format!("/opds/series/{}", series.id), - media_type: MediaType::Navigation, - rel: Relation::Myself, - title: None, - count: None, - }; - let feed = Feed::create( - now, - &format!("{APP_NAME}:series:{}", series.id), - &series.name, - self_link, - vec![], - entries, - ); - let xml = feed.as_xml().map_err(HandlerError::OpdsError)?; - - Ok(xml - .with_content_type("application/atom+xml") - .into_response()) -} diff --git a/little-hesinde/src/handlers/recent.rs b/little-hesinde/src/handlers/recent.rs index 44f268c..e69de29 100644 --- a/little-hesinde/src/handlers/recent.rs +++ b/little-hesinde/src/handlers/recent.rs @@ -1,28 +0,0 @@ -//! Handle requests for recent books. - -use std::sync::Arc; - -use poem::{handler, web::Data, Response}; - -use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept}; - -/// Handle a request recent books and decide whether to render to html or OPDS. -#[handler] -pub async fn handler( - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - let recent_books = state - .calibre - .recent_books(25) - .map_err(HandlerError::DataError)?; - let recent_books = recent_books - .iter() - .filter_map(|x| Book::full_book(x, &state)) - .collect::>(); - - match accept.0 { - Accept::Html => crate::handlers::html::recent::handler(recent_books).await, - Accept::Opds => crate::handlers::opds::recent::handler(recent_books).await, - } -} diff --git a/little-hesinde/src/handlers/search.rs b/little-hesinde/src/handlers/search.rs deleted file mode 100644 index 416043d..0000000 --- a/little-hesinde/src/handlers/search.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Handle search requests. - -use std::sync::Arc; - -use poem::{ - handler, - web::{Data, Query}, - Response, -}; -use serde::Deserialize; - -use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept}; - -#[derive(Deserialize)] -struct Params { - /// Query for a search request. - query: String, -} -/// Handle a search request with query parameter `query`. -#[handler] -pub async fn handler( - accept: Data<&Accept>, - state: Data<&Arc>, - Query(params): Query, -) -> Result { - let books = state - .calibre - .search(¶ms.query) - .map_err(HandlerError::DataError)? - .iter() - .filter_map(|book| Book::full_book(book, *state)) - .collect(); - - match *accept { - Accept::Html => crate::handlers::html::search::handler(books).await, - Accept::Opds => crate::handlers::opds::search::handler(books).await, - } -} diff --git a/little-hesinde/src/handlers/series.rs b/little-hesinde/src/handlers/series.rs deleted file mode 100644 index 9c701e9..0000000 --- a/little-hesinde/src/handlers/series.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! Handle requests for multiple series. - -use std::sync::Arc; - -use calibre_db::data::pagination::SortOrder; -use poem::{ - handler, - web::{Data, Path}, - Response, -}; - -use crate::{app_state::AppState, Accept}; - -/// Handle a request for multiple series, starting at the first. -#[handler] -pub async fn handler_init( - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - series(&accept, &state, None, &SortOrder::ASC).await -} - -/// Handle a request for multiple series, starting at the `cursor` and going in the direction of -/// `sort_order`. -#[handler] -pub async fn handler( - Path((cursor, sort_order)): Path<(String, SortOrder)>, - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - series(&accept, &state, Some(&cursor), &sort_order).await -} - -async fn series( - accept: &Accept, - state: &Arc, - cursor: Option<&str>, - sort_order: &SortOrder, -) -> Result { - match accept { - Accept::Html => { - crate::handlers::html::series::handler(&state.calibre, cursor, sort_order).await - } - Accept::Opds => { - crate::handlers::opds::series::handler(&state.calibre, cursor, sort_order).await - } - } -} diff --git a/little-hesinde/src/handlers/series_single.rs b/little-hesinde/src/handlers/series_single.rs deleted file mode 100644 index 7b41b7a..0000000 --- a/little-hesinde/src/handlers/series_single.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! Handle requests for a single series. - -use std::sync::Arc; - -use poem::{ - handler, - web::{Data, Path}, - Response, -}; - -use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept}; - -/// Handle a request for a series with `id` and decide whether to render to html or OPDS. -#[handler] -pub async fn handler( - id: Path, - accept: Data<&Accept>, - state: Data<&Arc>, -) -> Result { - let series = state - .calibre - .scalar_series(*id) - .map_err(HandlerError::DataError)?; - let books = state - .calibre - .series_books(*id) - .map_err(HandlerError::DataError)?; - let books = books - .iter() - .filter_map(|x| Book::full_book(x, &state)) - .collect::>(); - - match accept.0 { - Accept::Html => crate::handlers::html::series_single::handler(series, books).await, - Accept::Opds => crate::handlers::opds::series_single::handler(series, books).await, - } -} diff --git a/little-hesinde/src/handlers/source_archive.rs b/little-hesinde/src/handlers/source_archive.rs deleted file mode 100644 index 7cbffa4..0000000 --- a/little-hesinde/src/handlers/source_archive.rs +++ /dev/null @@ -1,11 +0,0 @@ -use crate::{APP_NAME, VERSION}; -use poem::{handler, Response}; - -const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip")); - -/// Handle a request for source code of the server.. -#[handler] -pub async fn handler() -> Result { - let file_name = format!("{APP_NAME}-{VERSION}.zip"); - crate::handlers::download::handler(&file_name, SOURCE_ARCHIVE, "application/zip").await -} diff --git a/little-hesinde/src/lib.rs b/little-hesinde/src/lib.rs index 0d670b6..994ed4f 100644 --- a/little-hesinde/src/lib.rs +++ b/little-hesinde/src/lib.rs @@ -2,82 +2,29 @@ //! //! Shamelessly written to scratch my own itches. -use std::sync::Arc; - +use api::ServeError; use app_state::AppState; -use calibre_db::calibre::Calibre; +use calibre_db::calibre::{Calibre, LoadError}; use config::Config; -use poem::{ - endpoint::EmbeddedFilesEndpoint, get, listener::TcpListener, middleware::Tracing, EndpointExt, - Route, Server, -}; -use rust_embed::RustEmbed; +use snafu::{ResultExt, Snafu}; use tokio::signal; use tracing::info; +pub mod api; pub mod app_state; pub mod cache; pub mod cli; pub mod config; /// Data structs and their functions. -pub mod data { - pub mod book; -} -/// Request handlers. Because it can not be guaranteed that a proper accept header is sent, the -/// routes are doubled and the decision on whether to render html or OPDS is made with internal -/// data on the respective routes. -pub mod handlers { - /// Handle requests for html. - pub mod html { - pub mod author; - pub mod authors; - pub mod books; - pub mod recent; - pub mod search; - pub mod series; - pub mod series_single; - } - /// Handle requests for OPDS. - pub mod opds { - pub mod author; - pub mod authors; - pub mod books; - pub mod feed; - pub mod recent; - pub mod search; - pub mod search_info; - pub mod series; - pub mod series_single; - } - pub mod author; - pub mod authors; - pub mod books; - pub mod cover; - pub mod download; - pub mod error; - pub mod paginated; - pub mod recent; - pub mod search; - pub mod series; - pub mod series_single; - pub mod source_archive; -} +pub mod data; /// OPDS data structs. -pub mod opds { - pub mod author; - pub mod content; - pub mod entry; - pub mod error; - pub mod feed; - pub mod link; - pub mod media_type; - pub mod relation; - pub mod search; -} +pub mod opds; pub mod templates; -pub const APP_NAME: &str = "little-hesinde"; -pub const VERSION: &str = "0.3.1"; +// App name from Cargo.toml +const APP_NAME: &str = env!("CARGO_PKG_NAME"); +// Version from Cargo.toml +const APP_VERSION: &str = env!("CARGO_PKG_VERSION"); /// Internal marker data in lieu of a proper `Accept` header. #[derive(Debug, Clone, Copy)] @@ -88,73 +35,32 @@ pub enum Accept { Opds, } -/// Embedd static files. -#[derive(RustEmbed)] -#[folder = "static"] -pub struct Files; +/// Errors from running little-hesinde. +#[derive(Debug, Snafu)] +pub enum RunError { + #[snafu(display("Failed to load calibre database."))] + LoadCalibre { source: LoadError }, + #[snafu(display("Failed to run http server."))] + Serve { source: ServeError }, +} /// Main entry point to run the ebook server with a calibre library specified in `config`. -pub async fn run(config: Config) -> Result<(), std::io::Error> { - let calibre = Calibre::load(&config.metadata_path).expect("failed to load calibre database"); - let app_state = Arc::new(AppState { +pub async fn run(config: Config) -> Result<(), RunError> { + let calibre = Calibre::load(&config.metadata_path).context(LoadCalibreSnafu)?; + let app_state = AppState { calibre, config: config.clone(), - }); + }; - let html_routes = Route::new() - .at("/", get(handlers::recent::handler)) - .at("/books", get(handlers::books::handler_init)) - .at("/books/:cursor/:sort_order", get(handlers::books::handler)) - .at("/series", get(handlers::series::handler_init)) - .at( - "/series/:cursor/:sort_order", - get(handlers::series::handler), - ) - .at("/series/:id", get(handlers::series_single::handler)) - .at("/authors", get(handlers::authors::handler_init)) - .at("/authors/:id", get(handlers::author::handler)) - .at( - "/authors/:cursor/:sort_order", - get(handlers::authors::handler), - ) - .at("/cover/:id", get(handlers::cover::handler_full)) - .at( - "/cover/:id/thumbnail", - get(handlers::cover::handler_thumbnail), - ) - .at("/book/:id/:format", get(handlers::books::handler_download)) - .at("/archive", get(handlers::source_archive::handler)) - .at("/search", get(handlers::search::handler)) - .nest("/static", EmbeddedFilesEndpoint::::new()) - .data(Accept::Html); - - let opds_routes = Route::new() - .at("/", get(handlers::opds::feed::handler)) - .at("/recent", get(handlers::recent::handler)) - .at("/books", get(handlers::books::handler_init)) - .at("/authors", get(handlers::authors::handler_init)) - .at("/authors/:id", get(handlers::author::handler)) - .at("/series", get(handlers::series::handler_init)) - .at("/series/:id", get(handlers::series_single::handler)) - .at("/search/info", get(handlers::opds::search_info::handler)) - .at("/search", get(handlers::search::handler)) - .data(Accept::Opds); - - let app = Route::new() - .nest("/", html_routes) - .nest("/opds", opds_routes) - .data(app_state) - .with(Tracing); - - let server = Server::new(TcpListener::bind(config.listen_address)) - .name("little-hesinde") - .run(app); + let server = api::serve(config.listen_address, app_state); tokio::select! { - _ = server => {}, + res = server => { + res.context(ServeSnafu) + }, _ = signal::ctrl_c() => { info!("Received Ctrl+C, shutting down..."); + Ok(()) }, } - Ok(()) } diff --git a/little-hesinde/src/main.rs b/little-hesinde/src/main.rs index 596f3a1..0513a80 100644 --- a/little-hesinde/src/main.rs +++ b/little-hesinde/src/main.rs @@ -1,12 +1,39 @@ use clap::Parser; -use little_hesinde::{cli::Cli, config::Config}; +use little_hesinde::{ + RunError, + cli::Cli, + config::{Config, LoadError}, +}; +use snafu::{ResultExt, Snafu}; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +/// Top-level application errors. +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("Failed to load config."))] + Config { source: LoadError }, + #[snafu(display("Failed to run little-hesinde."))] + Run { source: RunError }, +} #[tokio::main] -async fn main() -> Result<(), std::io::Error> { - tracing_subscriber::fmt::init(); +#[snafu::report] +async fn main() -> Result<(), Error> { + tracing_subscriber::registry() + .with( + tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| { + format!( + "{}=debug,tower_http=debug,axum::rejection=trace", + env!("CARGO_CRATE_NAME") + ) + .into() + }), + ) + .with(tracing_subscriber::fmt::layer()) + .init(); let args = Cli::parse(); - let config = Config::load(&args).expect("failed to load configuration"); + let config = Config::load(&args).context(ConfigSnafu)?; - little_hesinde::run(config).await + little_hesinde::run(config).await.context(RunSnafu) } diff --git a/little-hesinde/src/opds.rs b/little-hesinde/src/opds.rs new file mode 100644 index 0000000..1ea31b4 --- /dev/null +++ b/little-hesinde/src/opds.rs @@ -0,0 +1,9 @@ +pub mod author; +pub mod content; +pub mod entry; +pub mod error; +pub mod feed; +pub mod link; +pub mod media_type; +pub mod relation; +pub mod search; diff --git a/little-hesinde/src/opds/error.rs b/little-hesinde/src/opds/error.rs index 30e65ba..204c6f6 100644 --- a/little-hesinde/src/opds/error.rs +++ b/little-hesinde/src/opds/error.rs @@ -2,26 +2,18 @@ use std::{io, string::FromUtf8Error}; -use quick_xml::DeError; -use thiserror::Error; +use quick_xml::SeError; +use snafu::Snafu; /// Errors happening during handling OPDS data. -#[derive(Error, Debug)] -#[error("opds error")] -pub enum OpdsError { - /// Error serializing OPDS data. - #[error("failed to serialize struct")] - SerializingError(#[from] DeError), - /// Error parsing OPDS xml structure. - #[error("xml failure")] - XmlError(#[from] quick_xml::Error), - /// Error decoding xml as UTF-8. - #[error("failed to decode as utf-8")] - Utf8Error(#[from] FromUtf8Error), - /// Error parsing OPDS xml structure. - #[error("xml serialization failure")] - XmlSerializationError(#[from] quick_xml::SeError), - /// Error parsing OPDS xml structure. - #[error("xml io failure")] - XmlIoError(#[from] io::Error), +#[derive(Debug, Snafu)] +pub enum AsXmlError { + #[snafu(display("Failed to serialize object."), visibility(pub))] + ToString { source: SeError }, + #[snafu(display("Failed to write xml event."), visibility(pub))] + WriteXmlEvent { source: io::Error }, + #[snafu(display("Failed to read xml event."), visibility(pub))] + ReadXmlEvent { source: quick_xml::Error }, + #[snafu(display("Failed to read bytes as utf8 string."), visibility(pub))] + BytesToUtf8 { source: FromUtf8Error }, } diff --git a/little-hesinde/src/opds/feed.rs b/little-hesinde/src/opds/feed.rs index be248c7..f0348ad 100644 --- a/little-hesinde/src/opds/feed.rs +++ b/little-hesinde/src/opds/feed.rs @@ -8,10 +8,15 @@ use quick_xml::{ se::to_string, }; use serde::Serialize; +use snafu::ResultExt; use time::OffsetDateTime; use super::{ - author::Author, entry::Entry, error::OpdsError, link::Link, media_type::MediaType, + author::Author, + entry::Entry, + error::{AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu}, + link::Link, + media_type::MediaType, relation::Relation, }; @@ -84,14 +89,16 @@ impl Feed { } /// Serialize a feed to OPDS xml. - pub fn as_xml(&self) -> Result { - let xml = to_string(&self)?; + pub fn as_xml(&self) -> Result { + let xml = to_string(&self).context(ToStringSnafu)?; let mut reader = Reader::from_str(&xml); reader.config_mut().trim_text(true); let declaration = BytesDecl::new("1.0", Some("UTF-8"), None); let mut writer = Writer::new(Cursor::new(Vec::new())); - writer.write_event(Event::Decl(declaration))?; + writer + .write_event(Event::Decl(declaration)) + .context(WriteXmlEventSnafu)?; let mut feed_start = BytesStart::new("feed"); feed_start.push_attribute(("xmlns", "http://www.w3.org/2005/Atom")); @@ -103,15 +110,15 @@ impl Feed { loop { match reader.read_event() { - Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => { - writer.write_event(Event::Start(feed_start.clone()))? - } + Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer + .write_event(Event::Start(feed_start.clone())) + .context(WriteXmlEventSnafu)?, Ok(Event::Eof) => break, - Ok(e) => writer.write_event(e)?, - Err(e) => return Err(e)?, + Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?, + Err(e) => return Err(e).context(ReadXmlEventSnafu)?, } } let result = writer.into_inner().into_inner(); - Ok(String::from_utf8(result)?) + String::from_utf8(result).context(BytesToUtf8Snafu) } } diff --git a/little-hesinde/src/opds/search.rs b/little-hesinde/src/opds/search.rs index 42b1cb0..a15df9b 100644 --- a/little-hesinde/src/opds/search.rs +++ b/little-hesinde/src/opds/search.rs @@ -3,13 +3,16 @@ use std::io::Cursor; use quick_xml::{ + Reader, Writer, events::{BytesDecl, BytesStart, Event}, se::to_string, - Reader, Writer, }; use serde::Serialize; -use super::error::OpdsError; +use super::error::{ + AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu, +}; +use snafu::ResultExt; /// Url pointing to a location. #[derive(Debug, Serialize)] @@ -37,29 +40,31 @@ pub struct OpenSearchDescription { impl OpenSearchDescription { /// Serialize search information to an open search description xml. - pub fn as_xml(&self) -> Result { - let xml = to_string(&self)?; + pub fn as_xml(&self) -> Result { + let xml = to_string(&self).context(ToStringSnafu)?; let mut reader = Reader::from_str(&xml); reader.config_mut().trim_text(true); let declaration = BytesDecl::new("1.0", Some("UTF-8"), None); let mut writer = Writer::new(Cursor::new(Vec::new())); - writer.write_event(Event::Decl(declaration))?; + writer + .write_event(Event::Decl(declaration)) + .context(WriteXmlEventSnafu)?; let mut search_start = BytesStart::new("OpenSearchDescription"); search_start.push_attribute(("xmlns", "http://a9.com/-/spec/opensearch/1.1/")); loop { match reader.read_event() { - Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => { - writer.write_event(Event::Start(search_start.clone()))? - } + Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer + .write_event(Event::Start(search_start.clone())) + .context(WriteXmlEventSnafu)?, Ok(Event::Eof) => break, - Ok(e) => writer.write_event(e)?, - Err(e) => return Err(e)?, + Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?, + Err(e) => return Err(e).context(ReadXmlEventSnafu)?, } } let result = writer.into_inner().into_inner(); - Ok(String::from_utf8(result)?) + String::from_utf8(result).context(BytesToUtf8Snafu) } }